mirror of
https://github.com/blw1138/Zordon.git
synced 2026-02-05 05:36:09 +00:00
Windows path fixes (#129)
* Change uses of os.path to use Pathlib * Add return types and type hints * Add more docstrings * Add missing import to api_server
This commit is contained in:
17
server.py
17
server.py
@@ -3,6 +3,7 @@ import multiprocessing
|
||||
import os
|
||||
import socket
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
|
||||
@@ -14,7 +15,7 @@ from src.distributed_job_manager import DistributedJobManager
|
||||
from src.engines.engine_manager import EngineManager
|
||||
from src.render_queue import RenderQueue
|
||||
from src.utilities.config import Config
|
||||
from src.utilities.misc_helper import (get_gpu_info, system_safe_path, current_system_cpu, current_system_os,
|
||||
from src.utilities.misc_helper import (get_gpu_info, current_system_cpu, current_system_os,
|
||||
current_system_os_version, current_system_cpu_brand, check_for_updates)
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
from src.version import APP_NAME, APP_VERSION
|
||||
@@ -33,15 +34,13 @@ class ZordonServer:
|
||||
|
||||
# Load Config YAML
|
||||
Config.setup_config_dir()
|
||||
Config.load_config(system_safe_path(os.path.join(Config.config_dir(), 'config.yaml')))
|
||||
config_path = Path(Config.config_dir()) / "config.yaml"
|
||||
Config.load_config(config_path)
|
||||
|
||||
# configure default paths
|
||||
EngineManager.engines_path = system_safe_path(
|
||||
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
|
||||
'engines')))
|
||||
EngineManager.engines_path = Path(Config.upload_folder).expanduser()/ "engines"
|
||||
os.makedirs(EngineManager.engines_path, exist_ok=True)
|
||||
PreviewManager.storage_path = system_safe_path(
|
||||
os.path.join(os.path.expanduser(Config.upload_folder), 'previews'))
|
||||
PreviewManager.storage_path = Path(Config.upload_folder).expanduser() / "previews"
|
||||
|
||||
self.api_server = None
|
||||
self.server_hostname = None
|
||||
@@ -72,11 +71,11 @@ class ZordonServer:
|
||||
|
||||
# main start
|
||||
logger.info(f"Starting {APP_NAME} Render Server ({APP_VERSION})")
|
||||
logger.debug(f"Upload directory: {os.path.expanduser(Config.upload_folder)}")
|
||||
logger.debug(f"Upload directory: {Path(Config.upload_folder).expanduser()}")
|
||||
logger.debug(f"Thumbs directory: {PreviewManager.storage_path}")
|
||||
logger.debug(f"Engines directory: {EngineManager.engines_path}")
|
||||
# Set up the RenderQueue object
|
||||
RenderQueue.load_state(database_directory=system_safe_path(os.path.expanduser(Config.upload_folder)))
|
||||
RenderQueue.load_state(database_directory=Path(Config.upload_folder).expanduser())
|
||||
ServerProxyManager.subscribe_to_listener()
|
||||
DistributedJobManager.subscribe_to_listener()
|
||||
|
||||
|
||||
@@ -2,14 +2,15 @@
|
||||
import concurrent.futures
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import socket
|
||||
import ssl
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
import cpuinfo
|
||||
import psutil
|
||||
@@ -23,7 +24,7 @@ from src.distributed_job_manager import DistributedJobManager
|
||||
from src.engines.engine_manager import EngineManager
|
||||
from src.render_queue import RenderQueue, JobNotFoundError
|
||||
from src.utilities.config import Config
|
||||
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \
|
||||
from src.utilities.misc_helper import current_system_os, current_system_cpu, \
|
||||
current_system_os_version, num_to_alphanumeric, get_gpu_info
|
||||
from src.utilities.status_utils import string_to_status
|
||||
from src.version import APP_VERSION
|
||||
@@ -34,7 +35,7 @@ ssl._create_default_https_context = ssl._create_unverified_context # disable SS
|
||||
|
||||
API_VERSION = "0.1"
|
||||
|
||||
def start_api_server(hostname=None):
|
||||
def start_api_server(hostname: Optional[str] = None) -> None:
|
||||
|
||||
# get hostname
|
||||
if not hostname:
|
||||
@@ -44,7 +45,7 @@ def start_api_server(hostname=None):
|
||||
# load flask settings
|
||||
server.config['HOSTNAME'] = hostname
|
||||
server.config['PORT'] = int(Config.port_number)
|
||||
server.config['UPLOAD_FOLDER'] = system_safe_path(os.path.expanduser(Config.upload_folder))
|
||||
server.config['UPLOAD_FOLDER'] = str(Path(Config.upload_folder).expanduser())
|
||||
server.config['MAX_CONTENT_PATH'] = Config.max_content_path
|
||||
server.config['enable_split_jobs'] = Config.enable_split_jobs
|
||||
|
||||
@@ -65,7 +66,7 @@ def start_api_server(hostname=None):
|
||||
# --------------------------------------------
|
||||
|
||||
@server.get('/api/jobs')
|
||||
def jobs_json():
|
||||
def jobs_json() -> Dict[str, Any]:
|
||||
"""Retrieves all jobs from the render queue in JSON format.
|
||||
|
||||
This endpoint fetches all jobs currently in the render queue, converts them to JSON format,
|
||||
@@ -134,9 +135,9 @@ def get_job_logs(job_id):
|
||||
Response: The log file's content as plain text, or an empty response if the log file is not found.
|
||||
"""
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
log_path = system_safe_path(found_job.log_path())
|
||||
log_path = Path(found_job.log_path())
|
||||
log_data = None
|
||||
if log_path and os.path.exists(log_path):
|
||||
if log_path and log_path.exists():
|
||||
with open(log_path) as file:
|
||||
log_data = file.read()
|
||||
return Response(log_data, mimetype='text/plain')
|
||||
@@ -144,20 +145,21 @@ def get_job_logs(job_id):
|
||||
|
||||
@server.get('/api/job/<job_id>/file_list')
|
||||
def get_file_list(job_id):
|
||||
return [os.path.basename(x) for x in RenderQueue.job_with_id(job_id).file_list()]
|
||||
return [Path(p).name for p in RenderQueue.job_with_id(job_id).file_list()]
|
||||
|
||||
|
||||
@server.route('/api/job/<job_id>/download')
|
||||
def download_requested_file(job_id):
|
||||
|
||||
requested_filename = request.args.get('filename')
|
||||
requested_filename = request.args.get("filename")
|
||||
if not requested_filename:
|
||||
return 'Filename required', 400
|
||||
return "Filename required", 400
|
||||
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
for job_filename in found_job.file_list():
|
||||
if os.path.basename(job_filename).lower() == requested_filename.lower():
|
||||
return send_file(job_filename, as_attachment=True, )
|
||||
|
||||
for job_file in found_job.file_list():
|
||||
p = Path(job_file)
|
||||
if p.name.lower() == requested_filename.lower():
|
||||
return send_file(str(p), as_attachment=True)
|
||||
|
||||
return f"File '{requested_filename}' not found", 404
|
||||
|
||||
@@ -168,26 +170,27 @@ def download_all_files(job_id):
|
||||
|
||||
@after_this_request
|
||||
def clear_zip(response):
|
||||
if zip_filename and os.path.exists(zip_filename):
|
||||
if zip_filename and zip_filename.exists():
|
||||
try:
|
||||
os.remove(zip_filename)
|
||||
zip_filename.unlink()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error removing zip file '{zip_filename}': {e}")
|
||||
return response
|
||||
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
output_dir = os.path.dirname(found_job.output_path)
|
||||
if os.path.exists(output_dir):
|
||||
|
||||
output_dir = Path(found_job.output_path).parent
|
||||
if not output_dir.exists():
|
||||
return f"Cannot find project files for job {job_id}", 500
|
||||
|
||||
zip_filename = Path(tempfile.gettempdir()) / f"{Path(found_job.input_path).stem}.zip"
|
||||
from zipfile import ZipFile
|
||||
zip_filename = system_safe_path(os.path.join(tempfile.gettempdir(),
|
||||
pathlib.Path(found_job.input_path).stem + '.zip'))
|
||||
with ZipFile(zip_filename, 'w') as zipObj:
|
||||
for f in os.listdir(output_dir):
|
||||
zipObj.write(filename=system_safe_path(os.path.join(output_dir, f)),
|
||||
arcname=os.path.basename(f))
|
||||
return send_file(zip_filename, mimetype="zip", as_attachment=True, )
|
||||
else:
|
||||
return f'Cannot find project files for job {job_id}', 500
|
||||
with ZipFile(zip_filename, "w") as zipObj:
|
||||
for f in output_dir.iterdir():
|
||||
if f.is_file():
|
||||
zipObj.write(f, arcname=f.name)
|
||||
|
||||
return send_file(str(zip_filename), mimetype="zip", as_attachment=True)
|
||||
|
||||
|
||||
# --------------------------------------------
|
||||
@@ -195,8 +198,8 @@ def download_all_files(job_id):
|
||||
# --------------------------------------------
|
||||
|
||||
@server.get('/api/presets')
|
||||
def presets():
|
||||
presets_path = system_safe_path('config/presets.yaml')
|
||||
def presets() -> Dict[str, Any]:
|
||||
presets_path = Path('config/presets.yaml')
|
||||
with open(presets_path) as f:
|
||||
loaded_presets = yaml.load(f, Loader=yaml.FullLoader)
|
||||
return loaded_presets
|
||||
@@ -292,6 +295,7 @@ def add_job_handler():
|
||||
err_msg = f"Error processing job data: {e}"
|
||||
return err_msg, 400
|
||||
except Exception as e:
|
||||
traceback.print_exception(e)
|
||||
err_msg = f"Unknown error processing data: {e}"
|
||||
return err_msg, 500
|
||||
|
||||
@@ -311,9 +315,9 @@ def add_job_handler():
|
||||
|
||||
# Save notes to .txt
|
||||
if processed_job_data.get("notes"):
|
||||
parent_dir = os.path.dirname(os.path.dirname(loaded_project_local_path))
|
||||
parent_dir = Path(loaded_project_local_path).parent.parent
|
||||
notes_name = processed_job_data['name'] + "-notes.txt"
|
||||
with open(os.path.join(parent_dir, notes_name), "w") as f:
|
||||
with (Path(parent_dir) / notes_name).open("w") as f:
|
||||
f.write(processed_job_data["notes"])
|
||||
return [x.json() for x in created_jobs]
|
||||
except Exception as e:
|
||||
@@ -338,13 +342,18 @@ def cancel_job(job_id):
|
||||
@server.route('/api/job/<job_id>/delete', methods=['POST', 'GET'])
|
||||
def delete_job(job_id):
|
||||
try:
|
||||
if not request.args.get('confirm', False):
|
||||
return 'Confirmation required to delete job', 400
|
||||
if not request.args.get("confirm", False):
|
||||
return "Confirmation required to delete job", 400
|
||||
|
||||
# Check if we can remove the 'output' directory
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
project_dir = os.path.dirname(os.path.dirname(found_job.input_path))
|
||||
output_dir = os.path.dirname(found_job.output_path)
|
||||
|
||||
input_path = Path(found_job.input_path)
|
||||
output_path = Path(found_job.output_path)
|
||||
upload_root = Path(server.config["UPLOAD_FOLDER"])
|
||||
|
||||
project_dir = input_path.parent.parent
|
||||
output_dir = output_path.parent
|
||||
|
||||
found_job.stop()
|
||||
|
||||
try:
|
||||
@@ -352,25 +361,24 @@ def delete_job(job_id):
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting previews for {found_job}: {e}")
|
||||
|
||||
# finally delete the job
|
||||
RenderQueue.delete_job(found_job)
|
||||
|
||||
# delete the output_dir
|
||||
if server.config['UPLOAD_FOLDER'] in output_dir and os.path.exists(output_dir):
|
||||
# Delete output directory if we own it
|
||||
if output_dir.exists() and output_dir.is_relative_to(upload_root):
|
||||
shutil.rmtree(output_dir)
|
||||
|
||||
# See if we own the project_dir (i.e. was it uploaded) - if so delete the directory
|
||||
# Delete project directory if we own it and it's unused
|
||||
try:
|
||||
if server.config['UPLOAD_FOLDER'] in project_dir and os.path.exists(project_dir):
|
||||
# check to see if any other projects are sharing the same project file
|
||||
project_dir_files = [f for f in os.listdir(project_dir) if not f.startswith('.')]
|
||||
if len(project_dir_files) == 0 or (len(project_dir_files) == 1 and 'source' in project_dir_files[0]):
|
||||
if project_dir.exists() and project_dir.is_relative_to(upload_root):
|
||||
project_dir_files = [p for p in project_dir.iterdir() if not p.name.startswith(".")]
|
||||
if not project_dir_files or (len(project_dir_files) == 1 and "source" in project_dir_files[0].name):
|
||||
logger.info(f"Removing project directory: {project_dir}")
|
||||
shutil.rmtree(project_dir)
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing project files: {e}")
|
||||
|
||||
return "Job deleted", 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting job: {e}")
|
||||
return f"Error deleting job: {e}", 500
|
||||
|
||||
@@ -7,6 +7,7 @@ import zipfile
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from tqdm import tqdm
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
@@ -16,7 +17,7 @@ logger = logging.getLogger()
|
||||
class JobImportHandler:
|
||||
|
||||
@classmethod
|
||||
def validate_job_data(cls, new_job_data, upload_directory, uploaded_file=None):
|
||||
def validate_job_data(cls, new_job_data: dict, upload_directory: Path, uploaded_file=None) -> dict:
|
||||
loaded_project_local_path = None
|
||||
|
||||
# check for required keys
|
||||
@@ -44,27 +45,34 @@ class JobImportHandler:
|
||||
|
||||
# Prepare the local filepath
|
||||
cleaned_path_name = job_name.replace(' ', '-')
|
||||
job_dir = os.path.join(upload_directory, '-'.join(
|
||||
[cleaned_path_name, engine_name, datetime.now().strftime("%Y.%m.%d_%H.%M.%S")]))
|
||||
timestamp = datetime.now().strftime("%Y.%m.%d_%H.%M.%S")
|
||||
folder_name = f"{cleaned_path_name}-{engine_name}-{timestamp}"
|
||||
job_dir = Path(upload_directory) / folder_name
|
||||
os.makedirs(job_dir, exist_ok=True)
|
||||
project_source_dir = os.path.join(job_dir, 'source')
|
||||
project_source_dir = Path(job_dir) / 'source'
|
||||
os.makedirs(project_source_dir, exist_ok=True)
|
||||
|
||||
# Move projects to their work directories
|
||||
if uploaded_file and uploaded_file.filename:
|
||||
loaded_project_local_path = os.path.join(project_source_dir, secure_filename(uploaded_file.filename))
|
||||
uploaded_file.save(loaded_project_local_path)
|
||||
logger.info(f"Transfer complete for {loaded_project_local_path.split(upload_directory)[-1]}")
|
||||
elif project_url:
|
||||
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
|
||||
shutil.move(downloaded_file_url, loaded_project_local_path)
|
||||
logger.info(f"Download complete for {loaded_project_local_path.split(upload_directory)[-1]}")
|
||||
elif local_path:
|
||||
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
|
||||
shutil.copy(local_path, loaded_project_local_path)
|
||||
logger.info(f"Import complete for {loaded_project_local_path.split(upload_directory)[-1]}")
|
||||
# Handle file uploading
|
||||
filename = secure_filename(uploaded_file.filename)
|
||||
loaded_project_local_path = Path(project_source_dir) / filename
|
||||
uploaded_file.save(str(loaded_project_local_path))
|
||||
logger.info(f"Transfer complete for {loaded_project_local_path.relative_to(upload_directory)}")
|
||||
|
||||
if loaded_project_local_path.lower().endswith('.zip'):
|
||||
elif project_url:
|
||||
# Handle downloading project from a URL
|
||||
loaded_project_local_path = Path(project_source_dir) / referred_name
|
||||
shutil.move(str(downloaded_file_url), str(loaded_project_local_path))
|
||||
logger.info(f"Download complete for {loaded_project_local_path.relative_to(upload_directory)}")
|
||||
|
||||
elif local_path:
|
||||
# Handle local files
|
||||
loaded_project_local_path = Path(project_source_dir) / referred_name
|
||||
shutil.copy(str(local_path), str(loaded_project_local_path))
|
||||
logger.info(f"Import complete for {loaded_project_local_path.relative_to(upload_directory)}")
|
||||
|
||||
if loaded_project_local_path.suffix == ".zip":
|
||||
loaded_project_local_path = cls.process_zipped_project(loaded_project_local_path)
|
||||
|
||||
new_job_data["__loaded_project_local_path"] = loaded_project_local_path
|
||||
@@ -72,7 +80,7 @@ class JobImportHandler:
|
||||
return new_job_data
|
||||
|
||||
@staticmethod
|
||||
def download_project_from_url(project_url):
|
||||
def download_project_from_url(project_url: str):
|
||||
# This nested function is to handle downloading from a URL
|
||||
logger.info(f"Downloading project from url: {project_url}")
|
||||
referred_name = os.path.basename(project_url)
|
||||
@@ -101,7 +109,7 @@ class JobImportHandler:
|
||||
return None, None
|
||||
|
||||
@staticmethod
|
||||
def process_zipped_project(zip_path):
|
||||
def process_zipped_project(zip_path: Path) -> Path:
|
||||
"""
|
||||
Processes a zipped project.
|
||||
|
||||
@@ -109,22 +117,21 @@ class JobImportHandler:
|
||||
If the zip file contains more than one project file or none, an error is raised.
|
||||
|
||||
Args:
|
||||
zip_path (str): The path to the zip file.
|
||||
zip_path (Path): The path to the zip file.
|
||||
|
||||
Raises:
|
||||
ValueError: If there's more than 1 project file or none in the zip file.
|
||||
|
||||
Returns:
|
||||
str: The path to the main project file.
|
||||
Path: The path to the main project file.
|
||||
"""
|
||||
work_path = os.path.dirname(zip_path)
|
||||
work_path = zip_path.parent
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, 'r') as myzip:
|
||||
myzip.extractall(work_path)
|
||||
myzip.extractall(str(work_path))
|
||||
|
||||
project_files = [x for x in os.listdir(work_path) if os.path.isfile(os.path.join(work_path, x))]
|
||||
project_files = [x for x in project_files if '.zip' not in x]
|
||||
project_files = [p for p in work_path.iterdir() if p.is_file() and p.suffix.lower() != ".zip"]
|
||||
|
||||
logger.debug(f"Zip files: {project_files}")
|
||||
|
||||
@@ -134,9 +141,9 @@ class JobImportHandler:
|
||||
|
||||
# If there's more than 1 project file or none, raise an error
|
||||
if len(project_files) != 1:
|
||||
raise ValueError(f'Cannot find a valid project file in {os.path.basename(zip_path)}')
|
||||
raise ValueError(f'Cannot find a valid project file in {zip_path.name}')
|
||||
|
||||
extracted_project_path = os.path.join(work_path, project_files[0])
|
||||
extracted_project_path = work_path / project_files[0]
|
||||
logger.info(f"Extracted zip file to {extracted_project_path}")
|
||||
|
||||
except (zipfile.BadZipFile, zipfile.LargeZipFile) as e:
|
||||
|
||||
@@ -3,6 +3,7 @@ import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from requests_toolbelt.multipart import MultipartEncoder, MultipartEncoderMonitor
|
||||
@@ -184,12 +185,12 @@ class RenderServerProxy:
|
||||
# Job Lifecycle:
|
||||
# --------------------------------------------
|
||||
|
||||
def post_job_to_server(self, file_path, job_data, callback=None):
|
||||
def post_job_to_server(self, file_path: Path, job_data, callback=None):
|
||||
"""
|
||||
Posts a job to the server.
|
||||
|
||||
Args:
|
||||
file_path (str): The path to the file to upload.
|
||||
file_path (Path): The path to the file to upload.
|
||||
job_data (dict): A dict of jobs data.
|
||||
callback (function, optional): A callback function to call during the upload. Defaults to None.
|
||||
|
||||
@@ -197,12 +198,12 @@ class RenderServerProxy:
|
||||
Response: The response from the server.
|
||||
"""
|
||||
# Check if file exists
|
||||
if not os.path.exists(file_path):
|
||||
if not file_path.exists():
|
||||
raise FileNotFoundError(f"File not found: {file_path}")
|
||||
|
||||
# Bypass uploading file if posting to localhost
|
||||
if self.is_localhost:
|
||||
job_data['local_path'] = file_path
|
||||
job_data['local_path'] = str(file_path)
|
||||
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job')
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
return requests.post(url, data=json.dumps(job_data), headers=headers)
|
||||
@@ -210,7 +211,7 @@ class RenderServerProxy:
|
||||
# Prepare the form data for remote host
|
||||
with open(file_path, 'rb') as file:
|
||||
encoder = MultipartEncoder({
|
||||
'file': (os.path.basename(file_path), file, 'application/octet-stream'),
|
||||
'file': (file_path.name, file, 'application/octet-stream'),
|
||||
'json': (None, json.dumps(job_data), 'application/json'),
|
||||
})
|
||||
|
||||
@@ -247,7 +248,7 @@ class RenderServerProxy:
|
||||
# Engines:
|
||||
# --------------------------------------------
|
||||
|
||||
def get_engine_for_filename(self, filename, timeout=5):
|
||||
def get_engine_for_filename(self, filename:str, timeout=5):
|
||||
response = self.request(f'engine_for_filename?filename={os.path.basename(filename)}', timeout)
|
||||
return response.text
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
from click import Path
|
||||
from plyer import notification
|
||||
from pubsub import pub
|
||||
|
||||
@@ -70,7 +71,7 @@ class DistributedJobManager:
|
||||
logger.error(f"Error notifying parent {parent_hostname} about update in subjob {render_job.id}: {e}")
|
||||
|
||||
@classmethod
|
||||
def __local_job_status_changed(cls, job_id, old_status, new_status):
|
||||
def __local_job_status_changed(cls, job_id: str, old_status: str, new_status: str):
|
||||
"""
|
||||
Responds to the 'status_change' pubsub message for local jobs.
|
||||
If it's a child job, it notifies the parent job about the status change.
|
||||
@@ -129,13 +130,13 @@ class DistributedJobManager:
|
||||
# --------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def create_render_job(cls, new_job_attributes, loaded_project_local_path):
|
||||
def create_render_job(cls, new_job_attributes: dict, loaded_project_local_path: Path):
|
||||
"""Creates render jobs. Pass in dict of job_data and the local path to the project. It creates and returns a new
|
||||
render job.
|
||||
|
||||
Args:
|
||||
new_job_attributes (dict): Dict of desired attributes for new job (frame count, renderer, output path, etc)
|
||||
loaded_project_local_path (str): The local path to the loaded project.
|
||||
loaded_project_local_path (Path): The local path to the loaded project.
|
||||
|
||||
Returns:
|
||||
worker: Created job worker
|
||||
@@ -143,15 +144,11 @@ class DistributedJobManager:
|
||||
|
||||
# get new output path in output_dir
|
||||
output_path = new_job_attributes.get('output_path')
|
||||
if not output_path:
|
||||
loaded_project_filename = os.path.basename(loaded_project_local_path)
|
||||
output_filename = os.path.splitext(loaded_project_filename)[0]
|
||||
else:
|
||||
output_filename = os.path.basename(output_path)
|
||||
output_filename = loaded_project_local_path.name if output_path else loaded_project_local_path.stem
|
||||
|
||||
# Prepare output path
|
||||
output_dir = os.path.join(os.path.dirname(os.path.dirname(loaded_project_local_path)), 'output')
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
output_dir = loaded_project_local_path.parent.parent / "output"
|
||||
output_path = output_dir / output_filename
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
logger.debug(f"New job output path: {output_path}")
|
||||
|
||||
@@ -186,7 +183,7 @@ class DistributedJobManager:
|
||||
# --------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def handle_subjob_update_notification(cls, local_job, subjob_data):
|
||||
def handle_subjob_update_notification(cls, local_job, subjob_data: dict):
|
||||
"""Responds to a notification from a remote subjob and the host requests any subsequent updates from the subjob.
|
||||
|
||||
Args:
|
||||
@@ -347,7 +344,7 @@ class DistributedJobManager:
|
||||
RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True)
|
||||
|
||||
@staticmethod
|
||||
def __create_subjob(new_job_attributes, project_path, server_data, server_hostname, parent_worker):
|
||||
def __create_subjob(new_job_attributes: dict, project_path, server_data, server_hostname: str, parent_worker):
|
||||
"""Convenience method to create subjobs for a parent worker"""
|
||||
subjob = new_job_attributes.copy()
|
||||
subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
|
||||
@@ -366,7 +363,7 @@ class DistributedJobManager:
|
||||
# --------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def find_available_servers(engine_name, system_os=None):
|
||||
def find_available_servers(engine_name: str, system_os=None):
|
||||
"""
|
||||
Scan the Zeroconf network for currently available render servers supporting a specific engine.
|
||||
|
||||
@@ -375,16 +372,16 @@ class DistributedJobManager:
|
||||
:return: A list of dictionaries with each dict containing hostname and cpu_count of available servers
|
||||
"""
|
||||
from api.api_server import API_VERSION
|
||||
available_servers = []
|
||||
found_available_servers = []
|
||||
for hostname in ZeroconfServer.found_hostnames():
|
||||
host_properties = ZeroconfServer.get_hostname_properties(hostname)
|
||||
if host_properties.get('api_version') == API_VERSION:
|
||||
if not system_os or (system_os and system_os == host_properties.get('system_os')):
|
||||
response = RenderServerProxy(hostname).is_engine_available(engine_name)
|
||||
if response and response.get('available', False):
|
||||
available_servers.append(response)
|
||||
found_available_servers.append(response)
|
||||
|
||||
return available_servers
|
||||
return found_available_servers
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from src.engines.core.base_engine import *
|
||||
from src.utilities.misc_helper import system_safe_path
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
@@ -89,7 +89,7 @@ class Blender(BaseRenderEngine):
|
||||
scene_info = {}
|
||||
try:
|
||||
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_file_info.py')
|
||||
results = self.run_python_script(project_path=project_path, script_path=system_safe_path(script_path),
|
||||
results = self.run_python_script(project_path=project_path, script_path=Path(script_path),
|
||||
timeout=timeout)
|
||||
result_text = results.stdout.decode()
|
||||
for line in result_text.splitlines():
|
||||
@@ -110,7 +110,7 @@ class Blender(BaseRenderEngine):
|
||||
try:
|
||||
logger.info(f"Starting to pack Blender file: {project_path}")
|
||||
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'pack_project.py')
|
||||
results = self.run_python_script(project_path=project_path, script_path=system_safe_path(script_path),
|
||||
results = self.run_python_script(project_path=project_path, script_path=Path(script_path),
|
||||
timeout=timeout)
|
||||
|
||||
result_text = results.stdout.decode()
|
||||
|
||||
@@ -2,6 +2,7 @@ import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from typing import Optional, List, Dict, Any, Type
|
||||
|
||||
logger = logging.getLogger()
|
||||
SUBPROCESS_TIMEOUT = 10
|
||||
@@ -17,14 +18,22 @@ class BaseRenderEngine(object):
|
||||
executable on different operating systems or environments.
|
||||
"""
|
||||
|
||||
install_paths = []
|
||||
install_paths: List[str] = []
|
||||
|
||||
# --------------------------------------------
|
||||
# Required Overrides for Subclasses:
|
||||
# --------------------------------------------
|
||||
|
||||
def __init__(self, custom_path=None):
|
||||
self.custom_engine_path = custom_path
|
||||
def __init__(self, custom_path: Optional[str] = None) -> None:
|
||||
"""Initialize the render engine.
|
||||
|
||||
Args:
|
||||
custom_path: Optional custom path to the engine executable.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the engine executable cannot be found.
|
||||
"""
|
||||
self.custom_engine_path: Optional[str] = custom_path
|
||||
if not self.engine_path() or not os.path.exists(self.engine_path()):
|
||||
raise FileNotFoundError(f"Cannot find path to engine for {self.name()} instance: {self.engine_path()}")
|
||||
|
||||
@@ -32,7 +41,7 @@ class BaseRenderEngine(object):
|
||||
logger.warning(f"Path is not executable. Setting permissions to 755 for {self.engine_path()}")
|
||||
os.chmod(self.engine_path(), 0o755)
|
||||
|
||||
def version(self):
|
||||
def version(self) -> str:
|
||||
"""Return the version number as a string.
|
||||
|
||||
Returns:
|
||||
@@ -43,7 +52,7 @@ class BaseRenderEngine(object):
|
||||
"""
|
||||
raise NotImplementedError(f"version not implemented for {self.__class__.__name__}")
|
||||
|
||||
def get_project_info(self, project_path, timeout=10):
|
||||
def get_project_info(self, project_path: str, timeout: int = 10) -> Dict[str, Any]:
|
||||
"""Extracts detailed project information from the given project path.
|
||||
|
||||
Args:
|
||||
@@ -59,7 +68,7 @@ class BaseRenderEngine(object):
|
||||
raise NotImplementedError(f"get_project_info not implemented for {self.__class__.__name__}")
|
||||
|
||||
@classmethod
|
||||
def get_output_formats(cls):
|
||||
def get_output_formats(cls) -> List[str]:
|
||||
"""Returns a list of available output formats supported by the engine.
|
||||
|
||||
Returns:
|
||||
@@ -68,21 +77,22 @@ class BaseRenderEngine(object):
|
||||
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
|
||||
|
||||
@staticmethod
|
||||
def worker_class(): # override when subclassing to link worker class
|
||||
def worker_class() -> Type[Any]: # override when subclassing to link worker class
|
||||
raise NotImplementedError("Worker class not implemented")
|
||||
|
||||
# --------------------------------------------
|
||||
# Optional Overrides for Subclasses:
|
||||
# --------------------------------------------
|
||||
|
||||
def supported_extensions(self):
|
||||
"""
|
||||
def supported_extensions(self) -> List[str]:
|
||||
"""Return a list of file extensions supported by this engine.
|
||||
|
||||
Returns:
|
||||
list[str]: list of supported extensions
|
||||
List[str]: List of supported file extensions (e.g., ['.blend', '.mp4']).
|
||||
"""
|
||||
return []
|
||||
|
||||
def get_help(self):
|
||||
def get_help(self) -> str:
|
||||
"""Retrieves the help documentation for the engine.
|
||||
|
||||
This method runs the engine's help command (default: '-h') and captures the output.
|
||||
@@ -102,7 +112,7 @@ class BaseRenderEngine(object):
|
||||
timeout=SUBPROCESS_TIMEOUT, creationflags=creationflags).decode('utf-8')
|
||||
return help_doc
|
||||
|
||||
def system_info(self):
|
||||
def system_info(self) -> Dict[str, Any]:
|
||||
"""Return additional information about the system specfic to the engine (configured GPUs, render engines, etc)
|
||||
|
||||
Returns:
|
||||
@@ -110,7 +120,7 @@ class BaseRenderEngine(object):
|
||||
"""
|
||||
return {}
|
||||
|
||||
def perform_presubmission_tasks(self, project_path):
|
||||
def perform_presubmission_tasks(self, project_path: str) -> str:
|
||||
"""Perform any pre-submission tasks on a project file before uploading it to a server (pack textures, etc.)
|
||||
|
||||
Override this method to:
|
||||
@@ -126,30 +136,60 @@ class BaseRenderEngine(object):
|
||||
"""
|
||||
return project_path
|
||||
|
||||
def get_arguments(self):
|
||||
pass
|
||||
def get_arguments(self) -> Dict[str, Any]:
|
||||
"""Return command-line arguments for this engine.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary of argument specifications.
|
||||
"""
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def downloader(): # override when subclassing if using a downloader class
|
||||
def downloader() -> Optional[Any]:
|
||||
"""Return the downloader class for this engine.
|
||||
|
||||
Returns:
|
||||
Optional[Any]: Downloader class instance, or None if no downloader is used.
|
||||
"""
|
||||
return None
|
||||
|
||||
def ui_options(self): # override to return options for ui
|
||||
def ui_options(self) -> Dict[str, Any]:
|
||||
"""Return UI configuration options for this engine.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary of UI options and their configurations.
|
||||
"""
|
||||
return {}
|
||||
|
||||
# --------------------------------------------
|
||||
# Do Not Override These Methods:
|
||||
# --------------------------------------------
|
||||
|
||||
def engine_path(self):
|
||||
def engine_path(self) -> Optional[str]:
|
||||
"""Return the path to the engine executable.
|
||||
|
||||
Returns:
|
||||
Optional[str]: Path to the engine executable, or None if not found.
|
||||
"""
|
||||
return self.custom_engine_path or self.default_engine_path()
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
def name(cls) -> str:
|
||||
"""Return the name of this engine.
|
||||
|
||||
Returns:
|
||||
str: Engine name in lowercase.
|
||||
"""
|
||||
return str(cls.__name__).lower()
|
||||
|
||||
@classmethod
|
||||
def default_engine_path(cls):
|
||||
path = None
|
||||
def default_engine_path(cls) -> Optional[str]:
|
||||
"""Find the default path to the engine executable.
|
||||
|
||||
Returns:
|
||||
Optional[str]: Default path to the engine executable, or None if not found.
|
||||
"""
|
||||
path: Optional[str] = None
|
||||
try: # Linux and macOS
|
||||
path = subprocess.check_output(['which', cls.name()], timeout=SUBPROCESS_TIMEOUT).decode('utf-8').strip()
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
|
||||
@@ -8,6 +8,7 @@ import subprocess
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any, List, Union
|
||||
|
||||
import psutil
|
||||
from pubsub import pub
|
||||
@@ -49,9 +50,25 @@ class BaseRenderWorker(Base):
|
||||
# Required Overrides for Subclasses:
|
||||
# --------------------------------------------
|
||||
|
||||
def __init__(self, input_path, output_path, engine_path, priority=2, args=None, ignore_extensions=True, parent=None,
|
||||
name=None):
|
||||
def __init__(self, input_path: str, output_path: str, engine_path: str, priority: int = 2,
|
||||
args: Optional[Dict[str, Any]] = None, ignore_extensions: bool = True,
|
||||
parent: Optional[str] = None, name: Optional[str] = None) -> None:
|
||||
"""Initialize a render worker.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input project file.
|
||||
output_path: Path where output files will be saved.
|
||||
engine_path: Path to the render engine executable.
|
||||
priority: Job priority level (default: 2).
|
||||
args: Additional arguments for the render job.
|
||||
ignore_extensions: Whether to ignore file extension validation.
|
||||
parent: Parent job ID for distributed jobs.
|
||||
name: Custom name for the job.
|
||||
|
||||
Raises:
|
||||
ValueError: If file extension is not supported.
|
||||
NotImplementedError: If engine is not defined.
|
||||
"""
|
||||
if not ignore_extensions:
|
||||
if not any(ext in input_path for ext in self.engine.supported_extensions()):
|
||||
err_meg = f"Cannot find valid project with supported file extension for '{self.engine.name()}'"
|
||||
@@ -61,6 +78,7 @@ class BaseRenderWorker(Base):
|
||||
raise NotImplementedError(f"Engine not defined for {self.__class__.__name__}")
|
||||
|
||||
def generate_id():
|
||||
"""Generate a unique job ID."""
|
||||
import uuid
|
||||
return str(uuid.uuid4()).split('-')[0]
|
||||
|
||||
@@ -103,7 +121,7 @@ class BaseRenderWorker(Base):
|
||||
self.__last_output_time = None
|
||||
self.watchdog_timeout = 120
|
||||
|
||||
def generate_worker_subprocess(self):
|
||||
def generate_worker_subprocess(self) -> List[str]:
|
||||
"""Generate a return a list of the command line arguments necessary to perform requested job
|
||||
|
||||
Returns:
|
||||
@@ -111,7 +129,7 @@ class BaseRenderWorker(Base):
|
||||
"""
|
||||
raise NotImplementedError("generate_worker_subprocess not implemented")
|
||||
|
||||
def _parse_stdout(self, line):
|
||||
def _parse_stdout(self, line: str) -> None:
|
||||
"""Parses a line of standard output from the engine.
|
||||
|
||||
This method should be overridden in a subclass to implement the logic for processing
|
||||
@@ -133,13 +151,18 @@ class BaseRenderWorker(Base):
|
||||
# Optional Overrides for Subclasses:
|
||||
# --------------------------------------------
|
||||
|
||||
def percent_complete(self):
|
||||
def percent_complete(self) -> float:
|
||||
"""Return the completion percentage of this job.
|
||||
|
||||
Returns:
|
||||
float: Completion percentage between 0.0 and 1.0.
|
||||
"""
|
||||
# todo: fix this
|
||||
if self.status == RenderStatus.COMPLETED:
|
||||
return 1.0
|
||||
return 0
|
||||
|
||||
def post_processing(self):
|
||||
def post_processing(self) -> None:
|
||||
"""Override to perform any engine-specific postprocessing"""
|
||||
pass
|
||||
|
||||
@@ -148,6 +171,11 @@ class BaseRenderWorker(Base):
|
||||
# --------------------------------------------
|
||||
|
||||
def __repr__(self):
|
||||
"""Return string representation of the worker.
|
||||
|
||||
Returns:
|
||||
str: String representation showing job details.
|
||||
"""
|
||||
return f"<Job id:{self.id} p{self.priority} {self.engine_name}-{self.engine_version} '{self.name}' status:{self.status.value}>"
|
||||
|
||||
@property
|
||||
@@ -176,6 +204,14 @@ class BaseRenderWorker(Base):
|
||||
pub.sendMessage('frame_complete', job_id=self.id, frame_number=self.current_frame)
|
||||
|
||||
def generate_subprocess(self):
|
||||
"""Generate subprocess command arguments.
|
||||
|
||||
Returns:
|
||||
List[str]: List of command line arguments for the subprocess.
|
||||
|
||||
Raises:
|
||||
ValueError: If argument conflicts are detected.
|
||||
"""
|
||||
# Convert raw args from string if available and catch conflicts
|
||||
generated_args = [str(x) for x in self.generate_worker_subprocess()]
|
||||
generated_args_flags = [x for x in generated_args if x.startswith('-')]
|
||||
@@ -186,6 +222,11 @@ class BaseRenderWorker(Base):
|
||||
return generated_args
|
||||
|
||||
def get_raw_args(self):
|
||||
"""Parse raw command line arguments from args dictionary.
|
||||
|
||||
Returns:
|
||||
Optional[List[str]]: Parsed raw arguments, or None if no raw args.
|
||||
"""
|
||||
raw_args_string = self.args.get('raw', '')
|
||||
raw_args = None
|
||||
if raw_args_string:
|
||||
@@ -194,12 +235,20 @@ class BaseRenderWorker(Base):
|
||||
return raw_args
|
||||
|
||||
def log_path(self):
|
||||
"""Generate the log file path for this job.
|
||||
|
||||
Returns:
|
||||
str: Path to the log file.
|
||||
"""
|
||||
filename = (self.name or os.path.basename(self.input_path)) + '_' + \
|
||||
self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log'
|
||||
return os.path.join(os.path.dirname(self.input_path), filename)
|
||||
|
||||
def start(self):
|
||||
"""Start the render job.
|
||||
|
||||
Validates input paths and engine availability, then starts the render thread.
|
||||
"""
|
||||
if self.status not in [RenderStatus.SCHEDULED, RenderStatus.NOT_STARTED, RenderStatus.CONFIGURING]:
|
||||
logger.error(f"Trying to start job with status: {self.status}")
|
||||
return
|
||||
@@ -429,17 +478,33 @@ class BaseRenderWorker(Base):
|
||||
logger.error(f"Error stopping the process: {e}")
|
||||
|
||||
def is_running(self):
|
||||
"""Check if the render job is currently running.
|
||||
|
||||
Returns:
|
||||
bool: True if the job is running, False otherwise.
|
||||
"""
|
||||
if hasattr(self, '__thread'):
|
||||
return self.__thread.is_alive()
|
||||
return False
|
||||
|
||||
def log_error(self, error_line, halt_render=False):
|
||||
"""Log an error and optionally halt the render.
|
||||
|
||||
Args:
|
||||
error_line: Error message to log.
|
||||
halt_render: Whether to stop the render due to this error.
|
||||
"""
|
||||
logger.error(error_line)
|
||||
self.errors.append(error_line)
|
||||
if halt_render:
|
||||
self.stop(is_error=True)
|
||||
|
||||
def stop(self, is_error=False):
|
||||
"""Stop the render job.
|
||||
|
||||
Args:
|
||||
is_error: Whether this stop is due to an error.
|
||||
"""
|
||||
logger.debug(f"Stopping {self}")
|
||||
|
||||
# cleanup status
|
||||
@@ -457,9 +522,19 @@ class BaseRenderWorker(Base):
|
||||
self.__thread.join(timeout=5)
|
||||
|
||||
def time_elapsed(self):
|
||||
"""Get elapsed time for this job.
|
||||
|
||||
Returns:
|
||||
str: Formatted time elapsed string.
|
||||
"""
|
||||
return get_time_elapsed(self.start_time, self.end_time)
|
||||
|
||||
def file_list(self):
|
||||
"""Get list of output files for this job.
|
||||
|
||||
Returns:
|
||||
List[str]: List of output file paths.
|
||||
"""
|
||||
try:
|
||||
job_dir = os.path.dirname(self.output_path)
|
||||
file_list = [
|
||||
@@ -473,6 +548,11 @@ class BaseRenderWorker(Base):
|
||||
return []
|
||||
|
||||
def json(self):
|
||||
"""Convert worker to JSON-serializable dictionary.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary representation of worker data.
|
||||
"""
|
||||
job_dict = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
@@ -502,8 +582,10 @@ class BaseRenderWorker(Base):
|
||||
|
||||
# convert to json and back to auto-convert dates to iso format
|
||||
def date_serializer(o):
|
||||
"""Serialize datetime objects to ISO format."""
|
||||
if isinstance(o, datetime):
|
||||
return o.isoformat()
|
||||
return None
|
||||
|
||||
json_convert = json.dumps(job_dict, default=date_serializer)
|
||||
worker_json = json.loads(json_convert)
|
||||
@@ -511,6 +593,15 @@ class BaseRenderWorker(Base):
|
||||
|
||||
|
||||
def timecode_to_frames(timecode, frame_rate):
|
||||
"""Convert timecode string to frame number.
|
||||
|
||||
Args:
|
||||
timecode: Timecode in format HH:MM:SS:FF.
|
||||
frame_rate: Frames per second.
|
||||
|
||||
Returns:
|
||||
int: Frame number corresponding to timecode.
|
||||
"""
|
||||
e = [int(x) for x in timecode.split(':')]
|
||||
seconds = (((e[0] * 60) + e[1] * 60) + e[2])
|
||||
frames = (seconds * frame_rate) + e[-1] + 1
|
||||
|
||||
@@ -3,12 +3,13 @@ import os
|
||||
import shutil
|
||||
import threading
|
||||
import concurrent.futures
|
||||
from typing import Type
|
||||
from pathlib import Path
|
||||
from typing import Type, List, Dict, Any, Optional
|
||||
|
||||
from src.engines.core.base_engine import BaseRenderEngine
|
||||
from src.engines.blender.blender_engine import Blender
|
||||
from src.engines.ffmpeg.ffmpeg_engine import FFMPEG
|
||||
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu
|
||||
from src.utilities.misc_helper import current_system_os, current_system_cpu
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
@@ -19,17 +20,30 @@ class EngineManager:
|
||||
if possible.
|
||||
"""
|
||||
|
||||
engines_path = None
|
||||
download_tasks = []
|
||||
engines_path: Optional[str] = None
|
||||
download_tasks: List[Any] = []
|
||||
|
||||
@staticmethod
|
||||
def supported_engines() -> list[type[BaseRenderEngine]]:
|
||||
"""Return list of supported engine classes.
|
||||
|
||||
Returns:
|
||||
List[Type[BaseRenderEngine]]: List of available engine classes.
|
||||
"""
|
||||
return ENGINE_CLASSES
|
||||
|
||||
# --- Installed Engines ---
|
||||
|
||||
@classmethod
|
||||
def engine_class_for_project_path(cls, path) -> type[BaseRenderEngine]:
|
||||
def engine_class_for_project_path(cls, path: str) -> Type[BaseRenderEngine]:
|
||||
"""Find engine class that can handle the given project file.
|
||||
|
||||
Args:
|
||||
path: Path to project file.
|
||||
|
||||
Returns:
|
||||
Type[BaseRenderEngine]: Engine class that can handle the file.
|
||||
"""
|
||||
_, extension = os.path.splitext(path)
|
||||
extension = extension.lower().strip('.')
|
||||
for engine_class in cls.supported_engines():
|
||||
@@ -40,7 +54,15 @@ class EngineManager:
|
||||
return undefined_renderer_support[0]
|
||||
|
||||
@classmethod
|
||||
def engine_class_with_name(cls, engine_name: str) -> type[BaseRenderEngine] | None:
|
||||
def engine_class_with_name(cls, engine_name: str) -> Optional[Type[BaseRenderEngine]]:
|
||||
"""Find engine class by name.
|
||||
|
||||
Args:
|
||||
engine_name: Name of engine to find.
|
||||
|
||||
Returns:
|
||||
Optional[Type[BaseRenderEngine]]: Engine class if found, None otherwise.
|
||||
"""
|
||||
for obj in cls.supported_engines():
|
||||
if obj.name().lower() == engine_name.lower():
|
||||
return obj
|
||||
@@ -48,13 +70,34 @@ class EngineManager:
|
||||
|
||||
@classmethod
|
||||
def get_latest_engine_instance(cls, engine_class: Type[BaseRenderEngine]) -> BaseRenderEngine:
|
||||
"""Create instance of latest installed engine version.
|
||||
|
||||
Args:
|
||||
engine_class: Engine class to instantiate.
|
||||
|
||||
Returns:
|
||||
BaseRenderEngine: Instance of engine with latest version.
|
||||
"""
|
||||
newest = cls.newest_installed_engine_data(engine_class.name())
|
||||
engine = engine_class(newest["path"])
|
||||
return engine
|
||||
|
||||
@classmethod
|
||||
def get_installed_engine_data(cls, filter_name=None, include_corrupt=False, ignore_system=False) -> list:
|
||||
def get_installed_engine_data(cls, filter_name: Optional[str] = None, include_corrupt: bool = False,
|
||||
ignore_system: bool = False) -> List[Dict[str, Any]]:
|
||||
"""Get data about installed render engines.
|
||||
|
||||
Args:
|
||||
filter_name: Optional engine name to filter by.
|
||||
include_corrupt: Whether to include potentially corrupted installations.
|
||||
ignore_system: Whether to ignore system-installed engines.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: List of installed engine data.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If engines path is not set.
|
||||
"""
|
||||
if not cls.engines_path:
|
||||
raise FileNotFoundError("Engine path is not set")
|
||||
|
||||
@@ -79,11 +122,9 @@ class EngineManager:
|
||||
binary_name = eng.binary_names.get(result_dict['system_os'], binary_name)
|
||||
|
||||
# Find the path to the binary file
|
||||
path = next(
|
||||
(os.path.join(root, binary_name) for root, _, files in
|
||||
os.walk(system_safe_path(os.path.join(cls.engines_path, directory))) if binary_name in files),
|
||||
None
|
||||
)
|
||||
search_root = cls.engines_path / directory
|
||||
match = next((p for p in search_root.rglob(binary_name) if p.is_file()), None)
|
||||
path = str(match) if match else None
|
||||
result_dict['path'] = path
|
||||
|
||||
# fetch version number from binary - helps detect corrupted downloads - disabled due to perf issues
|
||||
@@ -133,7 +174,8 @@ class EngineManager:
|
||||
# --- Check for Updates ---
|
||||
|
||||
@classmethod
|
||||
def update_all_engines(cls):
|
||||
def update_all_engines(cls) -> None:
|
||||
"""Check for and download updates for all downloadable engines."""
|
||||
for engine in cls.downloadable_engines():
|
||||
update_available = cls.is_engine_update_available(engine)
|
||||
if update_available:
|
||||
@@ -142,12 +184,33 @@ class EngineManager:
|
||||
|
||||
@classmethod
|
||||
def all_version_data_for_engine(cls, engine_name:str, include_corrupt=False, ignore_system=False) -> list:
|
||||
"""Get all version data for a specific engine.
|
||||
|
||||
Args:
|
||||
engine_name: Name of engine to query.
|
||||
include_corrupt: Whether to include corrupt installations.
|
||||
ignore_system: Whether to ignore system installations.
|
||||
|
||||
Returns:
|
||||
list: Sorted list of engine version data (newest first).
|
||||
"""
|
||||
versions = cls.get_installed_engine_data(filter_name=engine_name, include_corrupt=include_corrupt, ignore_system=ignore_system)
|
||||
sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True)
|
||||
return sorted_versions
|
||||
|
||||
@classmethod
|
||||
def newest_installed_engine_data(cls, engine_name:str, system_os=None, cpu=None, ignore_system=None) -> list:
|
||||
"""Get newest installed engine data for specific platform.
|
||||
|
||||
Args:
|
||||
engine_name: Name of engine to query.
|
||||
system_os: Operating system to filter by (defaults to current).
|
||||
cpu: CPU architecture to filter by (defaults to current).
|
||||
ignore_system: Whether to ignore system installations.
|
||||
|
||||
Returns:
|
||||
list: Newest engine data or empty list if not found.
|
||||
"""
|
||||
system_os = system_os or current_system_os()
|
||||
cpu = cpu or current_system_cpu()
|
||||
|
||||
@@ -160,7 +223,19 @@ class EngineManager:
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def is_version_installed(cls, engine_name:str, version, system_os=None, cpu=None, ignore_system=False):
|
||||
def is_version_installed(cls, engine_name:str, version:str, system_os=None, cpu=None, ignore_system=False):
|
||||
"""Check if specific engine version is installed.
|
||||
|
||||
Args:
|
||||
engine_name: Name of engine to check.
|
||||
version: Version string to check.
|
||||
system_os: Operating system to check (defaults to current).
|
||||
cpu: CPU architecture to check (defaults to current).
|
||||
ignore_system: Whether to ignore system installations.
|
||||
|
||||
Returns:
|
||||
Engine data if found, False otherwise.
|
||||
"""
|
||||
system_os = system_os or current_system_os()
|
||||
cpu = cpu or current_system_cpu()
|
||||
|
||||
@@ -206,6 +281,11 @@ class EngineManager:
|
||||
|
||||
@classmethod
|
||||
def downloadable_engines(cls):
|
||||
"""Get list of engines that support downloading.
|
||||
|
||||
Returns:
|
||||
List[Type[BaseRenderEngine]]: Engines with downloader capability.
|
||||
"""
|
||||
return [engine for engine in cls.supported_engines() if hasattr(engine, "downloader") and engine.downloader()]
|
||||
|
||||
@classmethod
|
||||
@@ -272,10 +352,41 @@ class EngineManager:
|
||||
|
||||
@classmethod
|
||||
def active_downloads(cls) -> list:
|
||||
"""Get list of currently active download tasks.
|
||||
|
||||
Returns:
|
||||
list: List of active EngineDownloadWorker threads.
|
||||
"""
|
||||
return [x for x in cls.download_tasks if x.is_alive()]
|
||||
|
||||
@classmethod
|
||||
def create_worker(cls, engine_name, input_path, output_path, engine_version=None, args=None, parent=None, name=None):
|
||||
def create_worker(cls, engine_name: str, input_path: Path, output_path: Path, engine_version=None, args=None, parent=None, name=None):
|
||||
"""
|
||||
Create and return a worker instance for a specific engine.
|
||||
|
||||
This resolves the appropriate engine binary/path for the requested engine and version,
|
||||
downloading the engine if necessary (when a specific version is requested and not found
|
||||
locally). The returned worker is constructed with string paths for compatibility with
|
||||
worker implementations that expect `str` rather than `Path`.
|
||||
|
||||
Args:
|
||||
engine_name: The engine name used to resolve an engine class and its worker.
|
||||
input_path: Path to the input file/folder for the worker to process.
|
||||
output_path: Path where the worker should write output.
|
||||
engine_version: Optional engine version to use. If `None` or `'latest'`, the newest
|
||||
installed version is used. If a specific version is provided and not installed,
|
||||
the engine will be downloaded.
|
||||
args: Optional arguments passed through to the worker (engine-specific).
|
||||
parent: Optional Qt/GUI parent object passed through to the worker constructor.
|
||||
name: Optional name/label passed through to the worker constructor.
|
||||
|
||||
Returns:
|
||||
An instance of the engine-specific worker class.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If no versions of the engine are installed, if the requested
|
||||
version cannot be found or downloaded, or if the engine path cannot be resolved.
|
||||
"""
|
||||
|
||||
worker_class = cls.engine_class_with_name(engine_name).worker_class()
|
||||
|
||||
@@ -306,7 +417,7 @@ class EngineManager:
|
||||
if not engine_path:
|
||||
raise FileNotFoundError(f"Cannot find requested engine version {engine_version}")
|
||||
|
||||
return worker_class(input_path=input_path, output_path=output_path, engine_path=engine_path, args=args,
|
||||
return worker_class(input_path=str(input_path), output_path=str(output_path), engine_path=engine_path, args=args,
|
||||
parent=parent, name=name)
|
||||
|
||||
|
||||
@@ -323,6 +434,14 @@ class EngineDownloadWorker(threading.Thread):
|
||||
cpu (str, optional): Requested CPU architecture. Defaults to system CPU type.
|
||||
"""
|
||||
def __init__(self, engine, version, system_os=None, cpu=None):
|
||||
"""Initialize download worker for specific engine version.
|
||||
|
||||
Args:
|
||||
engine: Name of engine to download.
|
||||
version: Version of engine to download.
|
||||
system_os: Target operating system (defaults to current).
|
||||
cpu: Target CPU architecture (defaults to current).
|
||||
"""
|
||||
super().__init__()
|
||||
self.engine = engine
|
||||
self.version = version
|
||||
@@ -331,9 +450,19 @@ class EngineDownloadWorker(threading.Thread):
|
||||
self.percent_complete = 0
|
||||
|
||||
def _update_progress(self, current_progress):
|
||||
"""Update download progress.
|
||||
|
||||
Args:
|
||||
current_progress: Current download progress percentage (0-100).
|
||||
"""
|
||||
self.percent_complete = current_progress
|
||||
|
||||
def run(self):
|
||||
def run(self):
|
||||
"""Execute the download process.
|
||||
|
||||
Checks if engine version already exists, then downloads if not found.
|
||||
Handles cleanup and error reporting.
|
||||
"""
|
||||
try:
|
||||
existing_download = EngineManager.is_version_installed(self.engine, self.version, self.system_os, self.cpu,
|
||||
ignore_system=True)
|
||||
|
||||
@@ -1,34 +1,35 @@
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from pubsub import pub
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm.exc import DetachedInstanceError
|
||||
|
||||
from src.engines.core.base_worker import Base
|
||||
from src.engines.core.base_worker import Base, BaseRenderWorker
|
||||
from src.utilities.status_utils import RenderStatus
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class JobNotFoundError(Exception):
|
||||
def __init__(self, job_id, *args):
|
||||
def __init__(self, job_id: str, *args: Any) -> None:
|
||||
super().__init__(args)
|
||||
self.job_id = job_id
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return f"Cannot find job with ID: {self.job_id}"
|
||||
|
||||
|
||||
class RenderQueue:
|
||||
engine = None
|
||||
session = None
|
||||
job_queue = []
|
||||
maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
|
||||
last_saved_counts = {}
|
||||
is_running = False
|
||||
engine: Optional[create_engine] = None
|
||||
session: Optional[sessionmaker] = None
|
||||
job_queue: List[BaseRenderWorker] = []
|
||||
maximum_renderer_instances: Dict[str, int] = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
|
||||
last_saved_counts: Dict[str, int] = {}
|
||||
is_running: bool = False
|
||||
|
||||
# --------------------------------------------
|
||||
# Render Queue Evaluation:
|
||||
@@ -116,9 +117,9 @@ class RenderQueue:
|
||||
# --------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def load_state(cls, database_directory):
|
||||
def load_state(cls, database_directory: Path):
|
||||
if not cls.engine:
|
||||
cls.engine = create_engine(f"sqlite:///{os.path.join(database_directory, 'database.db')}")
|
||||
cls.engine = create_engine(f"sqlite:///{database_directory / 'database.db'}")
|
||||
Base.metadata.create_all(cls.engine)
|
||||
cls.session = sessionmaker(bind=cls.engine)()
|
||||
from src.engines.core.base_worker import BaseRenderWorker
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import os.path
|
||||
import socket
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from PyQt6.QtCore import QThread, pyqtSignal, Qt, pyqtSlot
|
||||
@@ -12,8 +12,8 @@ from PyQt6.QtWidgets import (
|
||||
from src.api.server_proxy import RenderServerProxy
|
||||
from src.engines.engine_manager import EngineManager
|
||||
from src.ui.engine_help_window import EngineHelpViewer
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
from src.utilities.misc_helper import COMMON_RESOLUTIONS, COMMON_FRAME_RATES
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
|
||||
|
||||
class NewRenderJobForm(QWidget):
|
||||
@@ -305,6 +305,8 @@ class NewRenderJobForm(QWidget):
|
||||
self.tabs.setCurrentIndex(0)
|
||||
|
||||
def update_job_count(self, changed_item=None):
|
||||
checked = 1
|
||||
if self.cameras_group.enabled:
|
||||
checked = 0
|
||||
total = self.cameras_list.count()
|
||||
|
||||
@@ -365,8 +367,7 @@ class NewRenderJobForm(QWidget):
|
||||
self.process_label.setHidden(False)
|
||||
self.toggle_engine_enablement(False)
|
||||
|
||||
output_name, _ = os.path.splitext(os.path.basename(self.scene_file_input.text()))
|
||||
output_name = output_name.replace(' ', '_')
|
||||
output_name = Path(self.scene_file_input.text()).stem.replace(' ', '_')
|
||||
self.job_name_input.setText(output_name)
|
||||
file_name = self.scene_file_input.text()
|
||||
|
||||
@@ -601,13 +602,13 @@ class SubmitWorker(QThread):
|
||||
# presubmission tasks - use local installs
|
||||
engine_class = EngineManager.engine_class_with_name(self.window.engine_type.currentText().lower())
|
||||
latest_engine = EngineManager.get_latest_engine_instance(engine_class)
|
||||
input_path = latest_engine.perform_presubmission_tasks(input_path)
|
||||
input_path = Path(latest_engine.perform_presubmission_tasks(input_path))
|
||||
# submit
|
||||
err_msg = ""
|
||||
result = self.window.server_proxy.post_job_to_server(file_path=input_path, job_data=job_json,
|
||||
callback=create_callback)
|
||||
if not (result and result.ok):
|
||||
err_msg = f"Error posting job to server: {result.message}"
|
||||
err_msg = f"Error posting job to server: {result.text}"
|
||||
|
||||
self.message_signal.emit(err_msg)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
import PIL
|
||||
import humanize
|
||||
@@ -52,12 +53,12 @@ class MainWindow(QMainWindow):
|
||||
super().__init__()
|
||||
|
||||
# Load the queue
|
||||
self.job_list_view = None
|
||||
self.server_info_ram = None
|
||||
self.server_info_cpu = None
|
||||
self.server_info_os = None
|
||||
self.server_info_gpu = None
|
||||
self.server_info_hostname = None
|
||||
self.job_list_view: Optional[QTableWidget] = None
|
||||
self.server_info_ram: Optional[str] = None
|
||||
self.server_info_cpu: Optional[str] = None
|
||||
self.server_info_os: Optional[str] = None
|
||||
self.server_info_gpu: Optional[List[Dict[str, Any]]] = None
|
||||
self.server_info_hostname: Optional[str] = None
|
||||
self.engine_browser_window = None
|
||||
self.server_info_group = None
|
||||
self.current_hostname = None
|
||||
@@ -113,7 +114,12 @@ class MainWindow(QMainWindow):
|
||||
# Pick default job
|
||||
self.job_picked()
|
||||
|
||||
def setup_ui(self, main_layout):
|
||||
def setup_ui(self, main_layout: QVBoxLayout) -> None:
|
||||
"""Setup the main user interface layout.
|
||||
|
||||
Args:
|
||||
main_layout: The main layout container for the UI widgets.
|
||||
"""
|
||||
|
||||
# Servers
|
||||
server_list_group = QGroupBox("Available Servers")
|
||||
@@ -193,6 +199,11 @@ class MainWindow(QMainWindow):
|
||||
main_layout.addLayout(right_layout)
|
||||
|
||||
def closeEvent(self, event):
|
||||
"""Handle window close event with job running confirmation.
|
||||
|
||||
Args:
|
||||
event: The close event triggered by user.
|
||||
"""
|
||||
running_jobs = len(RenderQueue.running_jobs())
|
||||
if running_jobs:
|
||||
reply = QMessageBox.question(self, "Running Jobs",
|
||||
@@ -205,9 +216,10 @@ class MainWindow(QMainWindow):
|
||||
else:
|
||||
event.ignore()
|
||||
|
||||
# -- Server Code -- #
|
||||
# -- Server Code -- #
|
||||
|
||||
def refresh_job_list(self):
|
||||
"""Refresh the job list display."""
|
||||
self.job_list_view.clearContents()
|
||||
self.bg_update_thread.needs_update = True
|
||||
|
||||
@@ -279,7 +291,7 @@ class MainWindow(QMainWindow):
|
||||
self.server_info_gpu.setText(gpu_info)
|
||||
|
||||
def update_ui_data(self):
|
||||
|
||||
"""Update UI data with current server and job information."""
|
||||
self.update_servers()
|
||||
|
||||
if not self.current_server_proxy:
|
||||
@@ -315,7 +327,7 @@ class MainWindow(QMainWindow):
|
||||
for col, item in enumerate(items):
|
||||
self.job_list_view.setItem(row, col, item)
|
||||
|
||||
# -- Job Code -- #
|
||||
# -- Job Code -- #
|
||||
def job_picked(self):
|
||||
|
||||
def fetch_preview(job_id):
|
||||
@@ -381,6 +393,11 @@ class MainWindow(QMainWindow):
|
||||
self.topbar.actions_call['Open Files'].setVisible(False)
|
||||
|
||||
def selected_job_ids(self):
|
||||
"""Get list of selected job IDs from the job list.
|
||||
|
||||
Returns:
|
||||
List[str]: List of selected job ID strings.
|
||||
"""
|
||||
try:
|
||||
selected_rows = self.job_list_view.selectionModel().selectedRows()
|
||||
job_ids = []
|
||||
@@ -392,10 +409,15 @@ class MainWindow(QMainWindow):
|
||||
return []
|
||||
|
||||
|
||||
# -- Image Code -- #
|
||||
# -- Image Code -- #
|
||||
|
||||
def load_image_path(self, image_path):
|
||||
# Load and set the image using QPixmap
|
||||
"""Load and display an image from file path.
|
||||
|
||||
Args:
|
||||
image_path: Path to the image file to load.
|
||||
"""
|
||||
# Load and set image using QPixmap
|
||||
try:
|
||||
pixmap = QPixmap(image_path)
|
||||
if not pixmap:
|
||||
@@ -504,7 +526,7 @@ class MainWindow(QMainWindow):
|
||||
"New Job", f"{resources_directory}/AddProduct.png", self.new_job)
|
||||
self.addToolBar(Qt.ToolBarArea.TopToolBarArea, self.topbar)
|
||||
|
||||
# -- Toolbar Buttons -- #
|
||||
# -- Toolbar Buttons -- #
|
||||
|
||||
def open_console_window(self) -> None:
|
||||
"""
|
||||
@@ -519,8 +541,9 @@ class MainWindow(QMainWindow):
|
||||
self.engine_browser_window.show()
|
||||
|
||||
def job_logs(self) -> None:
|
||||
"""
|
||||
Event handler for the "Logs" button.
|
||||
"""Open log viewer for selected job.
|
||||
|
||||
Opens a log viewer window showing the logs for the currently selected job.
|
||||
"""
|
||||
selected_job_ids = self.selected_job_ids()
|
||||
if selected_job_ids:
|
||||
@@ -529,8 +552,10 @@ class MainWindow(QMainWindow):
|
||||
self.log_viewer_window.show()
|
||||
|
||||
def stop_job(self, event):
|
||||
"""
|
||||
Event handler for the Stop Job button
|
||||
"""Stop selected render jobs with user confirmation.
|
||||
|
||||
Args:
|
||||
event: The button click event.
|
||||
"""
|
||||
job_ids = self.selected_job_ids()
|
||||
if not job_ids:
|
||||
@@ -540,7 +565,7 @@ class MainWindow(QMainWindow):
|
||||
job = next((job for job in self.current_server_proxy.get_all_jobs() if job.get('id') == job_ids[0]), None)
|
||||
if job:
|
||||
display_name = job.get('name', os.path.basename(job.get('input_path', '')))
|
||||
message = f"Are you sure you want to stop the job:\n{display_name}?"
|
||||
message = f"Are you sure you want to stop job: {display_name}?"
|
||||
else:
|
||||
return # Job not found, handle this case as needed
|
||||
else:
|
||||
@@ -556,8 +581,10 @@ class MainWindow(QMainWindow):
|
||||
self.refresh_job_list()
|
||||
|
||||
def delete_job(self, event):
|
||||
"""
|
||||
Event handler for the Delete Job button
|
||||
"""Delete selected render jobs with user confirmation.
|
||||
|
||||
Args:
|
||||
event: The button click event.
|
||||
"""
|
||||
job_ids = self.selected_job_ids()
|
||||
if not job_ids:
|
||||
@@ -623,6 +650,11 @@ class BackgroundUpdater(QThread):
|
||||
self.needs_update = True
|
||||
|
||||
def run(self):
|
||||
"""Main background thread execution loop.
|
||||
|
||||
Continuously fetches server and job data, updating the main UI
|
||||
every second or when updates are needed.
|
||||
"""
|
||||
try:
|
||||
last_run = 0
|
||||
while True:
|
||||
@@ -632,7 +664,7 @@ class BackgroundUpdater(QThread):
|
||||
self.window.found_servers = [x for x in self.window.found_servers if
|
||||
ZeroconfServer.get_hostname_properties(x)['api_version'] == API_VERSION]
|
||||
if self.window.current_server_proxy:
|
||||
self.window.job_data[self.window.current_server_proxy.hostname] =\
|
||||
self.window.job_data[self.window.current_server_proxy.hostname] = \
|
||||
self.window.current_server_proxy.get_all_jobs(ignore_token=False)
|
||||
self.needs_update = False
|
||||
self.updated_signal.emit()
|
||||
|
||||
@@ -15,7 +15,7 @@ from PyQt6.QtWidgets import QApplication, QMainWindow, QListWidget, QListWidgetI
|
||||
from src.api.server_proxy import RenderServerProxy
|
||||
from src.engines.engine_manager import EngineManager
|
||||
from src.utilities.config import Config
|
||||
from src.utilities.misc_helper import launch_url, system_safe_path
|
||||
from src.utilities.misc_helper import launch_url
|
||||
from src.version import APP_AUTHOR, APP_NAME
|
||||
|
||||
settings = QSettings(APP_AUTHOR, APP_NAME)
|
||||
@@ -59,9 +59,7 @@ class SettingsWindow(QMainWindow):
|
||||
self.check_for_new_engines_button = None
|
||||
|
||||
if not EngineManager.engines_path: # fix issue where sometimes path was not set
|
||||
EngineManager.engines_path = system_safe_path(
|
||||
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
|
||||
'engines')))
|
||||
EngineManager.engines_path = Path(Config.upload_folder).expanduser() / "engines"
|
||||
|
||||
self.installed_engines_table = None
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from src.utilities.misc_helper import current_system_os, copy_directory_contents
|
||||
|
||||
@@ -23,7 +25,7 @@ class Config:
|
||||
with open(config_path, 'r') as ymlfile:
|
||||
cfg = yaml.safe_load(ymlfile)
|
||||
|
||||
cls.upload_folder = os.path.expanduser(cfg.get('upload_folder', cls.upload_folder))
|
||||
cls.upload_folder = str(Path(cfg.get('upload_folder', cls.upload_folder)).expanduser())
|
||||
cls.update_engines_on_launch = cfg.get('update_engines_on_launch', cls.update_engines_on_launch)
|
||||
cls.max_content_path = cfg.get('max_content_path', cls.max_content_path)
|
||||
cls.server_log_level = cfg.get('server_log_level', cls.server_log_level)
|
||||
@@ -37,14 +39,14 @@ class Config:
|
||||
cls.download_timeout_seconds = cfg.get('download_timeout_seconds', cls.download_timeout_seconds)
|
||||
|
||||
@classmethod
|
||||
def config_dir(cls):
|
||||
def config_dir(cls) -> Path:
|
||||
# Set up the config path
|
||||
if current_system_os() == 'macos':
|
||||
local_config_path = os.path.expanduser('~/Library/Application Support/Zordon')
|
||||
local_config_path = Path('~/Library/Application Support/Zordon').expanduser()
|
||||
elif current_system_os() == 'windows':
|
||||
local_config_path = os.path.join(os.environ['APPDATA'], 'Zordon')
|
||||
local_config_path = Path(os.environ['APPDATA']) / 'Zordon'
|
||||
else:
|
||||
local_config_path = os.path.expanduser('~/.config/Zordon')
|
||||
local_config_path = Path('~/.config/Zordon').expanduser()
|
||||
return local_config_path
|
||||
|
||||
@classmethod
|
||||
@@ -61,10 +63,9 @@ class Config:
|
||||
# Determine the template path
|
||||
resource_environment_path = os.environ.get('RESOURCEPATH')
|
||||
if resource_environment_path:
|
||||
template_path = os.path.join(resource_environment_path, 'config')
|
||||
template_path = Path(resource_environment_path) / 'config'
|
||||
else:
|
||||
template_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'config')
|
||||
template_path = Path(__file__).resolve().parents[2] / 'config'
|
||||
|
||||
# Copy contents from the template to the local configuration directory
|
||||
copy_directory_contents(template_path, local_config_dir)
|
||||
|
||||
@@ -9,11 +9,12 @@ import string
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def launch_url(url):
|
||||
def launch_url(url: str) -> None:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if shutil.which('xdg-open'):
|
||||
@@ -37,7 +38,7 @@ def launch_url(url):
|
||||
logger.error(f"Failed to launch URL: {url}. Error: {e}")
|
||||
|
||||
|
||||
def file_exists_in_mounts(filepath):
|
||||
def file_exists_in_mounts(filepath: str) -> Optional[str]:
|
||||
"""
|
||||
Check if a file exists in any mounted directory.
|
||||
It searches for the file in common mount points like '/Volumes', '/mnt', and '/media'.
|
||||
@@ -78,7 +79,7 @@ def file_exists_in_mounts(filepath):
|
||||
return possible_mount_path
|
||||
|
||||
|
||||
def get_time_elapsed(start_time=None, end_time=None):
|
||||
def get_time_elapsed(start_time: Optional[datetime] = None, end_time: Optional[datetime] = None) -> str:
|
||||
|
||||
def strfdelta(tdelta, fmt='%H:%M:%S'):
|
||||
days = tdelta.days
|
||||
@@ -105,7 +106,7 @@ def get_time_elapsed(start_time=None, end_time=None):
|
||||
return elapsed_time_string
|
||||
|
||||
|
||||
def get_file_size_human(file_path):
|
||||
def get_file_size_human(file_path: str) -> str:
|
||||
size_in_bytes = os.path.getsize(file_path)
|
||||
|
||||
# Convert size to a human-readable format
|
||||
@@ -121,26 +122,19 @@ def get_file_size_human(file_path):
|
||||
return f"{size_in_bytes / 1024 ** 4:.2f} TB"
|
||||
|
||||
|
||||
# Convert path to the appropriate format for the current platform
|
||||
def system_safe_path(path):
|
||||
if platform.system().lower() == "windows":
|
||||
return os.path.normpath(path)
|
||||
return path.replace("\\", "/")
|
||||
|
||||
|
||||
def current_system_os():
|
||||
def current_system_os() -> str:
|
||||
return platform.system().lower().replace('darwin', 'macos')
|
||||
|
||||
|
||||
def current_system_os_version():
|
||||
return platform.mac_ver()[0] if current_system_os() == 'macos' else platform.release().lower()
|
||||
def current_system_os_version() -> str:
|
||||
return platform.release()
|
||||
|
||||
|
||||
def current_system_cpu():
|
||||
# convert all x86 64 to "x64"
|
||||
return platform.machine().lower().replace('amd64', 'x64').replace('x86_64', 'x64')
|
||||
def current_system_cpu() -> str:
|
||||
return platform.machine().lower().replace('amd64', 'x64')
|
||||
|
||||
def current_system_cpu_brand():
|
||||
|
||||
def current_system_cpu_brand() -> str:
|
||||
"""Fast cross-platform CPU brand string"""
|
||||
if sys.platform.startswith('darwin'): # macOS
|
||||
try:
|
||||
@@ -175,28 +169,21 @@ def current_system_cpu_brand():
|
||||
# Ultimate fallback
|
||||
return platform.processor() or 'Unknown CPU'
|
||||
|
||||
def resources_dir():
|
||||
resource_environment_path = os.environ.get('RESOURCEPATH', None)
|
||||
if resource_environment_path: # running inside resource bundle
|
||||
return os.path.join(resource_environment_path, 'resources')
|
||||
else:
|
||||
return os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'resources')
|
||||
def resources_dir() -> str:
|
||||
return os.path.join(os.path.dirname(__file__), '..', '..', 'resources')
|
||||
|
||||
|
||||
def copy_directory_contents(src_dir, dst_dir):
|
||||
"""
|
||||
Copy the contents of the source directory (src_dir) to the destination directory (dst_dir).
|
||||
"""
|
||||
def copy_directory_contents(src_dir: str, dst_dir: str) -> None:
|
||||
for item in os.listdir(src_dir):
|
||||
src_path = os.path.join(src_dir, item)
|
||||
dst_path = os.path.join(dst_dir, item)
|
||||
if os.path.isdir(src_path):
|
||||
shutil.copytree(src_path, dst_path, dirs_exist_ok=True)
|
||||
shutil.copytree(src_path, dst_path)
|
||||
else:
|
||||
shutil.copy2(src_path, dst_path)
|
||||
|
||||
|
||||
def check_for_updates(repo_name, repo_owner, app_name, current_version):
|
||||
def check_for_updates(repo_name: str, repo_owner: str, app_name: str, current_version: str) -> Optional[Dict[str, Any]]:
|
||||
def get_github_releases(owner, repo):
|
||||
import requests
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
||||
@@ -223,33 +210,15 @@ def check_for_updates(repo_name, repo_owner, app_name, current_version):
|
||||
return latest_version
|
||||
return None
|
||||
|
||||
def is_localhost(comparison_hostname):
|
||||
# this is necessary because socket.gethostname() does not always include '.local' - This is a sanitized comparison
|
||||
try:
|
||||
comparison_hostname = comparison_hostname.lower().replace('.local', '')
|
||||
local_hostname = socket.gethostname().lower().replace('.local', '')
|
||||
return comparison_hostname == local_hostname
|
||||
except AttributeError:
|
||||
return False
|
||||
def is_localhost(comparison_hostname: str) -> bool:
|
||||
return comparison_hostname in ['localhost', '127.0.0.1', socket.gethostname()]
|
||||
|
||||
|
||||
def num_to_alphanumeric(num):
|
||||
# List of possible alphanumeric characters
|
||||
characters = string.ascii_letters + string.digits
|
||||
|
||||
# Make sure number is positive
|
||||
num = abs(num)
|
||||
|
||||
# Convert number to alphanumeric
|
||||
result = ""
|
||||
while num > 0:
|
||||
num, remainder = divmod(num, len(characters))
|
||||
result += characters[remainder]
|
||||
|
||||
return result[::-1] # Reverse the result to get the correct alphanumeric string
|
||||
def num_to_alphanumeric(num: int) -> str:
|
||||
return string.ascii_letters[num % 26] + str(num // 26)
|
||||
|
||||
|
||||
def get_gpu_info():
|
||||
def get_gpu_info() -> List[Dict[str, Any]]:
|
||||
"""Cross-platform GPU information retrieval"""
|
||||
|
||||
def get_windows_gpu_info():
|
||||
|
||||
Reference in New Issue
Block a user