skip vram check if slave is not a CUDA device

pull/15/head
unknown 2023-06-11 04:11:12 -05:00
parent 6a28587796
commit 07b33c9bfd
No known key found for this signature in database
GPG Key ID: CA376082283AF69A
1 changed files with 10 additions and 6 deletions

View File

@ -277,12 +277,16 @@ class Worker:
self.full_url("memory"),
verify=self.verify_remotes
)
memory_response = memory_response.json()['cuda']['system'] # all in bytes
free_vram = int(memory_response['free']) / (1024 * 1024 * 1024)
total_vram = int(memory_response['total']) / (1024 * 1024 * 1024)
logger.debug(f"Worker '{self.uuid}' {free_vram:.2f}/{total_vram:.2f} GB VRAM free\n")
self.free_vram = bytes(memory_response['free'])
memory_response = memory_response.json()
try:
memory_response = memory_response['cuda']['system'] # all in bytes
free_vram = int(memory_response['free']) / (1024 * 1024 * 1024)
total_vram = int(memory_response['total']) / (1024 * 1024 * 1024)
logger.debug(f"Worker '{self.uuid}' {free_vram:.2f}/{total_vram:.2f} GB VRAM free\n")
self.free_vram = bytes(memory_response['free'])
except KeyError:
error = memory_response['cuda']['error']
logger.debug(f"CUDA doesn't seem to be available for worker '{self.uuid}'\nError: {error}")
if sync_options is True:
options_response = requests.post(