Wait for subjob completion and download render files to host (#17)

* Fix Blender image sequence -> video conversion and change video to use ProRes

* Wait for child jobs to complete

* Download and extract render files from subjobs

* Fix issue where zip was not removed

* Update client to use new method names in server proxy

* Fix minor download issue
This commit is contained in:
2023-06-15 17:44:34 -05:00
committed by GitHub
parent 0a0a228731
commit e6eb344d19
6 changed files with 154 additions and 103 deletions

View File

@@ -193,8 +193,7 @@ def make_job_ready(job_id):
found_job = RenderQueue.job_with_id(job_id)
if found_job.status in [RenderStatus.NOT_READY, RenderStatus.NOT_STARTED]:
if found_job.children:
for child_name in found_job.children.split(','):
child_id, hostname = child_name.split('@')
for hostname, child_id in found_job.children.items():
RenderServerProxy(hostname).request_data(f'/api/job/<child_id>/make_ready')
found_job.status = RenderStatus.NOT_STARTED
RenderQueue.save_state()
@@ -323,7 +322,7 @@ def add_job_handler():
logger.info(f"Attempting to download URL: {project_url}")
try:
downloaded_file_url, info = urlretrieve(project_url)
referred_name = info.get_filename()
referred_name = info.get_filename() or os.path.basename(project_url)
except Exception as e:
err_msg = f"Error downloading file: {e}"
logger.error(err_msg)
@@ -507,7 +506,8 @@ def create_subjobs(worker, job_data, project_path):
# start subjobs
logger.debug(f"Starting {len(server_frame_ranges) - 1} attempted subjobs")
worker.children = ",".join([f"{results['id']}@{hostname}" for hostname, results in submission_results.items()])
for hostname, results in submission_results.items():
worker.children[hostname] = results['id']
worker.name = f"{worker.name}[{worker.start_frame}-{worker.end_frame}]"
except Exception as e:

View File

@@ -83,7 +83,10 @@ class RenderServerProxy:
def stop_background_update(self):
self.__update_in_background = False
def get_jobs(self, timeout=5, ignore_token=False):
def get_job_info(self, job_id, timeout=5):
return self.request_data(f'job/{job_id}', timeout=timeout)
def get_all_jobs(self, timeout=5, ignore_token=False):
if not self.__update_in_background or ignore_token:
self.__update_job_cache(timeout, ignore_token)
return self.__jobs_cache.copy() if self.__jobs_cache else None
@@ -130,3 +133,17 @@ class RenderServerProxy:
response = requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=monitor, headers=headers)
return response
def get_job_files(self, job_id, save_path):
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"
return self.download_file(url, filename=save_path)
@staticmethod
def download_file(url, filename):
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
return filename