Use local_paths when submitting jobs to localhost

This commit is contained in:
Brett Williams
2023-06-15 20:45:46 -05:00
parent 79ff451af8
commit 0080cdb371
3 changed files with 20 additions and 14 deletions

View File

@@ -286,7 +286,7 @@ def add_job_handler():
# initial handling of raw data
try:
if request.is_json:
jobs_list = [request.json]
jobs_list = [request.json] if not isinstance(request.json, list) else request.json
elif request.form.get('json', None):
jobs_list = json.loads(request.form['json'])
else:
@@ -313,7 +313,7 @@ def add_job_handler():
logger.debug(f"Incoming new job request: {jobs_list}")
uploaded_project = request.files.get('file', None)
project_url = jobs_list[0].get('url', None)
input_path = jobs_list[0].get('input_path', None)
local_path = jobs_list[0].get('local_path', None)
renderer = jobs_list[0].get('renderer')
downloaded_file_url = None
@@ -329,8 +329,8 @@ def add_job_handler():
err_msg = f"Error downloading file: {e}"
logger.error(err_msg)
return err_msg, 406
elif input_path and os.path.exists(input_path):
referred_name = os.path.basename(input_path)
elif local_path and os.path.exists(local_path):
referred_name = os.path.basename(local_path)
else:
return "Cannot find any valid project paths", 400
@@ -351,9 +351,9 @@ def add_job_handler():
loaded_project_local_path = os.path.join(upload_dir, referred_name)
shutil.move(downloaded_file_url, loaded_project_local_path)
logger.info(f"Download complete for {loaded_project_local_path.split(server.config['UPLOAD_FOLDER'])[-1]}")
elif input_path:
elif local_path:
loaded_project_local_path = os.path.join(upload_dir, referred_name)
shutil.copy(input_path, loaded_project_local_path)
shutil.copy(local_path, loaded_project_local_path)
logger.info(f"Import complete for {loaded_project_local_path.split(server.config['UPLOAD_FOLDER'])[-1]}")
# process uploaded zip files
@@ -492,7 +492,7 @@ def create_subjobs(worker, job_data, project_path):
logger.debug(f"Posting subjob with frames {subjob['start_frame']}-"
f"{subjob['end_frame']} to {server_hostname}")
post_results = RenderServerProxy(server_hostname).post_job_to_server(
input_path=project_path, job_list=[subjob])
file_path=project_path, job_list=[subjob])
if post_results.ok:
submission_results[server_hostname] = post_results.json()[0]
else:

View File

@@ -2,6 +2,7 @@ import logging
import os
import json
import requests
import socket
import time
import threading
from lib.workers.base_worker import RenderStatus
@@ -115,10 +116,17 @@ class RenderServerProxy:
timeout=1) or {}
return request_data.get('is_available', False)
def post_job_to_server(self, input_path, job_list, callback=None):
def post_job_to_server(self, file_path, job_list, callback=None):
# bypass uploading file if posting to localhost
if self.hostname == socket.gethostname():
jobs_with_path = [{**item, "local_path": file_path} for item in job_list]
return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=json.dumps(jobs_with_path),
headers={'Content-Type': 'application/json'})
# Prepare the form data
encoder = MultipartEncoder({
'file': (os.path.basename(input_path), open(input_path, 'rb'), 'application/octet-stream'),
'file': (os.path.basename(file_path), open(file_path, 'rb'), 'application/octet-stream'),
'json': (None, json.dumps(job_list), 'application/json'),
})
@@ -130,9 +138,7 @@ class RenderServerProxy:
# Send the request
headers = {'Content-Type': monitor.content_type}
response = requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=monitor, headers=headers)
return response
return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=monitor, headers=headers)
def get_job_files(self, job_id, save_path):
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"