Job Submission CLI (#122)

* Initial commit of job submission cli tool, with minor fixes in API code

* Refactored and further decoupled server / client code

* Clean up ServerProxy to not use hardcoded loopback addresses
This commit is contained in:
2025-12-27 18:36:34 -06:00
committed by GitHub
parent 6bfa5629d5
commit 574c6f0755
10 changed files with 350 additions and 231 deletions

View File

@@ -38,7 +38,7 @@ def handle_uploaded_project_files(request, jobs_list, upload_directory):
uploaded_project = request.files.get('file', None)
project_url = jobs_list[0].get('url', None)
local_path = jobs_list[0].get('local_path', None)
renderer = jobs_list[0].get('renderer')
renderer = jobs_list[0]['renderer']
downloaded_file_url = None
if uploaded_project and uploaded_project.filename:

View File

@@ -34,7 +34,7 @@ ssl._create_default_https_context = ssl._create_unverified_context # disable SS
API_VERSION = "1"
def start_server(hostname=None):
def start_api_server(hostname=None):
# get hostname
if not hostname:
@@ -463,6 +463,10 @@ def get_renderer_help(renderer):
# --------------------------------------------
# Miscellaneous:
# --------------------------------------------
@server.get('/api/heartbeat')
def heartbeat():
return datetime.now().isoformat(), 200
@server.post('/api/job/<job_id>/send_subjob_update_notification')
def subjob_update_notification(job_id):
subjob_details = request.json

View File

@@ -21,7 +21,6 @@ categories = [RenderStatus.RUNNING, RenderStatus.WAITING_FOR_SUBJOBS, RenderStat
logger = logging.getLogger()
OFFLINE_MAX = 4
LOOPBACK = '127.0.0.1'
class RenderServerProxy:
@@ -55,14 +54,18 @@ class RenderServerProxy:
def __repr__(self):
return f"<RenderServerProxy - {self.hostname}>"
def connect(self):
return self.status()
def check_connection(self):
try:
return self.request("heartbeat").ok
except Exception:
pass
return False
def is_online(self):
if self.__update_in_background:
return self.__offline_flags < OFFLINE_MAX
else:
return self.get_status() is not None
return self.check_connection()
def status(self):
if not self.is_online():
@@ -102,8 +105,7 @@ class RenderServerProxy:
def request(self, payload, timeout=5):
from src.api.api_server import API_VERSION
hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.get(f'http://{hostname}:{self.port}/api/{payload}', timeout=timeout,
return requests.get(f'http://{self.hostname}:{self.port}/api/{payload}', timeout=timeout,
headers={"X-API-Version": str(API_VERSION)})
# --------------------------------------------
@@ -203,7 +205,7 @@ class RenderServerProxy:
if self.is_localhost:
jobs_with_path = [{'local_path': file_path, **item} for item in job_list]
job_data = json.dumps(jobs_with_path)
url = urljoin(f'http://{LOOPBACK}:{self.port}', '/api/add_job')
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job')
headers = {'Content-Type': 'application/json'}
return requests.post(url, data=job_data, headers=headers)
@@ -245,32 +247,32 @@ class RenderServerProxy:
Returns:
Response: The response from the server.
"""
hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.post(f'http://{hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification',
return requests.post(f'http://{self.hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification',
json=subjob.json())
# --------------------------------------------
# Renderers:
# Engines:
# --------------------------------------------
def is_engine_available(self, engine_name):
return self.request_data(f'{engine_name}/is_available')
def get_all_engines(self):
# todo: this doesnt work
return self.request_data('all_engines')
def get_renderer_info(self, response_type='standard', timeout=5):
def get_engine_info(self, response_type='standard', timeout=5):
"""
Fetches renderer information from the server.
Fetches engine information from the server.
Args:
response_type (str, optional): Returns standard or full version of renderer info
response_type (str, optional): Returns standard or full version of engine info
timeout (int, optional): The number of seconds to wait for a response from the server. Defaults to 5.
Returns:
dict: A dictionary containing the renderer information.
dict: A dictionary containing the engine information.
"""
all_data = self.request_data(f"renderer_info?response_type={response_type}", timeout=timeout)
all_data = self.request_data(f"engine_info?response_type={response_type}", timeout=timeout)
return all_data
def delete_engine(self, engine, version, system_cpu=None):
@@ -286,21 +288,18 @@ class RenderServerProxy:
Response: The response from the server.
"""
form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu}
hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.post(f'http://{hostname}:{self.port}/api/delete_engine', json=form_data)
return requests.post(f'http://{self.hostname}:{self.port}/api/delete_engine', json=form_data)
# --------------------------------------------
# Download Files:
# --------------------------------------------
def download_all_job_files(self, job_id, save_path):
hostname = LOOPBACK if self.is_localhost else self.hostname
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download_all"
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"
return self.__download_file_from_url(url, output_filepath=save_path)
def download_job_file(self, job_id, job_filename, save_path):
hostname = LOOPBACK if self.is_localhost else self.hostname
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
return self.__download_file_from_url(url, output_filepath=save_path)
@staticmethod