mirror of
https://github.com/blw1138/Zordon.git
synced 2025-12-17 16:58:12 +00:00
Compare commits
20 Commits
feature/do
...
feature/10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4da03e30a2 | ||
|
|
4a566ec7c3 | ||
|
|
085d39fde8 | ||
|
|
d5f1224c33 | ||
|
|
e97e3d74c8 | ||
|
|
1af4169447 | ||
|
|
ea728f7809 | ||
|
|
a4e6fca73d | ||
|
|
9aafb5c0fb | ||
| 2548280dcc | |||
|
|
98ab837057 | ||
|
|
3fda87935e | ||
|
|
e35a5a689c | ||
| dea7574888 | |||
| a19db9fcf7 | |||
| 80b0adb2ad | |||
| 18873cec6f | |||
| af6d6e1525 | |||
| 8bbf19cb30 | |||
| 6bdb488ce1 |
38
.github/workflows/create-executables.yml
vendored
Normal file
38
.github/workflows/create-executables.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
name: Create Executables
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
- types: [created]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pyinstaller-build-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Create Executables (Windows)
|
||||||
|
uses: sayyid5416/pyinstaller@v1
|
||||||
|
with:
|
||||||
|
python_ver: '3.11'
|
||||||
|
spec: 'main.spec'
|
||||||
|
requirements: 'requirements.txt'
|
||||||
|
upload_exe_with_name: 'Zordon'
|
||||||
|
pyinstaller-build-linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Create Executables (Linux)
|
||||||
|
uses: sayyid5416/pyinstaller@v1
|
||||||
|
with:
|
||||||
|
python_ver: '3.11'
|
||||||
|
spec: 'main.spec'
|
||||||
|
requirements: 'requirements.txt'
|
||||||
|
upload_exe_with_name: 'Zordon'
|
||||||
|
pyinstaller-build-macos:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Create Executables (macOS)
|
||||||
|
uses: sayyid5416/pyinstaller@v1
|
||||||
|
with:
|
||||||
|
python_ver: '3.11'
|
||||||
|
spec: 'main.spec'
|
||||||
|
requirements: 'requirements.txt'
|
||||||
|
upload_exe_with_name: 'Zordon'
|
||||||
23
.github/workflows/pylint.yml
vendored
23
.github/workflows/pylint.yml
vendored
@@ -1,23 +0,0 @@
|
|||||||
name: Pylint
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10", "3.11", "3.12"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v3
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install pylint
|
|
||||||
- name: Analysing the code with pylint
|
|
||||||
run: |
|
|
||||||
pylint $(git ls-files '*.py')
|
|
||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
A tool designed for small render farms, such as those used in home studios or small businesses, to efficiently manage and run render jobs for Blender, FFMPEG, and other video renderers. It simplifies the process of distributing rendering tasks across multiple available machines, optimizing the rendering workflow for artists, animators, and video professionals.
|
A tool designed for small render farms, such as those used in home studios or small businesses, to efficiently manage and run render jobs for Blender, FFMPEG, and other video renderers. It simplifies the process of distributing rendering tasks across multiple available machines, optimizing the rendering workflow for artists, animators, and video professionals.
|
||||||
|
|
||||||
|
Notice: This should be considered a beta and is meant for casual / hobbiest use. Do not use in mission critical environments!
|
||||||
|
|
||||||
## Supported Renderers
|
## Supported Renderers
|
||||||
|
|
||||||
Zordon supports or plans to support the following renderers:
|
Zordon supports or plans to support the following renderers:
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ a = Analysis(
|
|||||||
runtime_hooks=[],
|
runtime_hooks=[],
|
||||||
excludes=[],
|
excludes=[],
|
||||||
noarchive=False,
|
noarchive=False,
|
||||||
optimize=0,
|
optimize=1, # fyi: optim level 2 breaks on windows
|
||||||
)
|
)
|
||||||
pyz = PYZ(a.pure)
|
pyz = PYZ(a.pure)
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ if platform.system() == 'Darwin': # macOS
|
|||||||
name=APP_NAME,
|
name=APP_NAME,
|
||||||
debug=False,
|
debug=False,
|
||||||
bootloader_ignore_signals=False,
|
bootloader_ignore_signals=False,
|
||||||
strip=False,
|
strip=True,
|
||||||
upx=True,
|
upx=True,
|
||||||
console=False,
|
console=False,
|
||||||
disable_windowed_traceback=False,
|
disable_windowed_traceback=False,
|
||||||
@@ -87,7 +87,7 @@ elif platform.system() == 'Windows':
|
|||||||
name=APP_NAME,
|
name=APP_NAME,
|
||||||
debug=False,
|
debug=False,
|
||||||
bootloader_ignore_signals=False,
|
bootloader_ignore_signals=False,
|
||||||
strip=False,
|
strip=True,
|
||||||
upx=True,
|
upx=True,
|
||||||
console=False,
|
console=False,
|
||||||
disable_windowed_traceback=False,
|
disable_windowed_traceback=False,
|
||||||
@@ -108,7 +108,7 @@ else: # linux
|
|||||||
name=APP_NAME,
|
name=APP_NAME,
|
||||||
debug=False,
|
debug=False,
|
||||||
bootloader_ignore_signals=False,
|
bootloader_ignore_signals=False,
|
||||||
strip=False,
|
strip=True,
|
||||||
upx=True,
|
upx=True,
|
||||||
console=False,
|
console=False,
|
||||||
disable_windowed_traceback=False,
|
disable_windowed_traceback=False,
|
||||||
|
|||||||
@@ -35,3 +35,4 @@ attrs>=23.2.0
|
|||||||
lxml>=5.1.0
|
lxml>=5.1.0
|
||||||
click>=8.1.7
|
click>=8.1.7
|
||||||
requests_toolbelt>=1.0.0
|
requests_toolbelt>=1.0.0
|
||||||
|
pyinstaller_versionfile>=2.1.1
|
||||||
|
|||||||
@@ -10,143 +10,87 @@ import ssl
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from zipfile import ZipFile
|
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
import yaml
|
import yaml
|
||||||
from flask import Flask, request, send_file, after_this_request, Response, redirect, url_for, abort
|
from flask import Flask, request, send_file, after_this_request, Response, redirect, url_for
|
||||||
from sqlalchemy.orm.exc import DetachedInstanceError
|
from sqlalchemy.orm.exc import DetachedInstanceError
|
||||||
|
|
||||||
from src.api.add_job_helpers import handle_uploaded_project_files, process_zipped_project
|
from src.api.add_job_helpers import handle_uploaded_project_files, process_zipped_project
|
||||||
from src.api.preview_manager import PreviewManager
|
from src.api.preview_manager import PreviewManager
|
||||||
from src.distributed_job_manager import DistributedJobManager
|
from src.distributed_job_manager import DistributedJobManager
|
||||||
from src.engines.core.base_worker import string_to_status, RenderStatus
|
|
||||||
from src.engines.engine_manager import EngineManager
|
from src.engines.engine_manager import EngineManager
|
||||||
from src.render_queue import RenderQueue, JobNotFoundError
|
from src.render_queue import RenderQueue, JobNotFoundError
|
||||||
from src.utilities.benchmark import cpu_benchmark, disk_io_benchmark
|
|
||||||
from src.utilities.config import Config
|
from src.utilities.config import Config
|
||||||
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \
|
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \
|
||||||
current_system_os_version, num_to_alphanumeric
|
current_system_os_version, num_to_alphanumeric
|
||||||
from src.utilities.zeroconf_server import ZeroconfServer
|
from src.utilities.status_utils import string_to_status
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
server = Flask(__name__)
|
server = Flask(__name__)
|
||||||
ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads
|
ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads
|
||||||
|
|
||||||
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
|
|
||||||
RenderStatus.COMPLETED, RenderStatus.CANCELLED]
|
def start_server(hostname=None):
|
||||||
|
|
||||||
|
# get hostname
|
||||||
|
if not hostname:
|
||||||
|
local_hostname = socket.gethostname()
|
||||||
|
hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
|
||||||
|
|
||||||
|
# load flask settings
|
||||||
|
server.config['HOSTNAME'] = hostname
|
||||||
|
server.config['PORT'] = int(Config.port_number)
|
||||||
|
server.config['UPLOAD_FOLDER'] = system_safe_path(os.path.expanduser(Config.upload_folder))
|
||||||
|
server.config['MAX_CONTENT_PATH'] = Config.max_content_path
|
||||||
|
server.config['enable_split_jobs'] = Config.enable_split_jobs
|
||||||
|
|
||||||
|
# disable most Flask logging
|
||||||
|
flask_log = logging.getLogger('werkzeug')
|
||||||
|
flask_log.setLevel(Config.flask_log_level.upper())
|
||||||
|
|
||||||
|
logger.debug('Starting API server')
|
||||||
|
try:
|
||||||
|
server.run(host=hostname, port=server.config['PORT'], debug=Config.flask_debug_enable, use_reloader=False,
|
||||||
|
threaded=True)
|
||||||
|
finally:
|
||||||
|
logger.debug('Stopping API server')
|
||||||
|
|
||||||
|
|
||||||
# -- Error Handlers --
|
# --------------------------------------------
|
||||||
|
# Get All Jobs
|
||||||
@server.errorhandler(JobNotFoundError)
|
# --------------------------------------------
|
||||||
def handle_job_not_found(job_error):
|
|
||||||
return str(job_error), 400
|
|
||||||
|
|
||||||
|
|
||||||
@server.errorhandler(DetachedInstanceError)
|
|
||||||
def handle_detached_instance(error):
|
|
||||||
# logger.debug(f"detached instance: {error}")
|
|
||||||
return "Unavailable", 503
|
|
||||||
|
|
||||||
|
|
||||||
@server.errorhandler(Exception)
|
|
||||||
def handle_general_error(general_error):
|
|
||||||
err_msg = f"Server error: {general_error}"
|
|
||||||
logger.error(err_msg)
|
|
||||||
return err_msg, 500
|
|
||||||
|
|
||||||
|
|
||||||
# -- Jobs --
|
|
||||||
|
|
||||||
|
|
||||||
def sorted_jobs(all_jobs, sort_by_date=True):
|
|
||||||
if not sort_by_date:
|
|
||||||
sorted_job_list = []
|
|
||||||
if all_jobs:
|
|
||||||
for status_category in categories:
|
|
||||||
found_jobs = [x for x in all_jobs if x.status == status_category.value]
|
|
||||||
if found_jobs:
|
|
||||||
sorted_found_jobs = sorted(found_jobs, key=lambda d: d.date_created, reverse=True)
|
|
||||||
sorted_job_list.extend(sorted_found_jobs)
|
|
||||||
else:
|
|
||||||
sorted_job_list = sorted(all_jobs, key=lambda d: d.date_created, reverse=True)
|
|
||||||
return sorted_job_list
|
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/jobs')
|
@server.get('/api/jobs')
|
||||||
def jobs_json():
|
def jobs_json():
|
||||||
try:
|
"""Retrieves all jobs from the render queue in JSON format.
|
||||||
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
|
|
||||||
job_cache_int = int(json.dumps(all_jobs).__hash__())
|
This endpoint fetches all jobs currently in the render queue, converts them to JSON format,
|
||||||
job_cache_token = num_to_alphanumeric(job_cache_int)
|
and returns them along with a cache token that represents the current state of the job list.
|
||||||
return {'jobs': all_jobs, 'token': job_cache_token}
|
|
||||||
except DetachedInstanceError as e:
|
Returns:
|
||||||
raise e
|
dict: A dictionary containing:
|
||||||
except Exception as e:
|
- 'jobs' (list[dict]): A list of job dictionaries, each representing a job in the queue.
|
||||||
logger.error(f"Error fetching jobs_json: {e}")
|
- 'token' (str): A cache token generated from the hash of the job list.
|
||||||
raise e
|
"""
|
||||||
|
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
|
||||||
|
job_cache_int = int(json.dumps(all_jobs).__hash__())
|
||||||
|
job_cache_token = num_to_alphanumeric(job_cache_int)
|
||||||
|
return {'jobs': all_jobs, 'token': job_cache_token}
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/jobs_long_poll')
|
@server.get('/api/jobs_long_poll')
|
||||||
def long_polling_jobs():
|
def long_polling_jobs():
|
||||||
try:
|
hash_token = request.args.get('token', None)
|
||||||
hash_token = request.args.get('token', None)
|
start_time = time.time()
|
||||||
start_time = time.time()
|
while True:
|
||||||
while True:
|
all_jobs = jobs_json()
|
||||||
all_jobs = jobs_json()
|
if all_jobs['token'] != hash_token:
|
||||||
if all_jobs['token'] != hash_token:
|
return all_jobs
|
||||||
return all_jobs
|
# Break after 30 seconds to avoid gateway timeout
|
||||||
# Break after 30 seconds to avoid gateway timeout
|
if time.time() - start_time > 30:
|
||||||
if time.time() - start_time > 30:
|
return {}, 204
|
||||||
return {}, 204
|
time.sleep(1)
|
||||||
time.sleep(1)
|
|
||||||
except DetachedInstanceError as e:
|
|
||||||
raise e
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching long_polling_jobs: {e}")
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
@server.route('/api/job/<job_id>/thumbnail')
|
|
||||||
def job_thumbnail(job_id):
|
|
||||||
|
|
||||||
try:
|
|
||||||
big_thumb = request.args.get('size', False) == "big"
|
|
||||||
video_ok = request.args.get('video_ok', False)
|
|
||||||
found_job = RenderQueue.job_with_id(job_id, none_ok=False)
|
|
||||||
|
|
||||||
# trigger a thumbnail update - just in case
|
|
||||||
PreviewManager.update_previews_for_job(found_job, wait_until_completion=True, timeout=60)
|
|
||||||
previews = PreviewManager.get_previews_for_job(found_job)
|
|
||||||
all_previews_list = previews.get('output', previews.get('input', []))
|
|
||||||
|
|
||||||
video_previews = [x for x in all_previews_list if x['kind'] == 'video']
|
|
||||||
image_previews = [x for x in all_previews_list if x['kind'] == 'image']
|
|
||||||
filtered_list = video_previews if video_previews and video_ok else image_previews
|
|
||||||
|
|
||||||
# todo - sort by size or other metrics here
|
|
||||||
if filtered_list:
|
|
||||||
preview_to_send = filtered_list[0]
|
|
||||||
mime_types = {'image': 'image/jpeg', 'video': 'video/mp4'}
|
|
||||||
file_mime_type = mime_types.get(preview_to_send['kind'], 'unknown')
|
|
||||||
return send_file(preview_to_send['filename'], mimetype=file_mime_type)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f'Error getting thumbnail: {e}')
|
|
||||||
return f'Error getting thumbnail: {e}', 500
|
|
||||||
return "No thumbnail available", 404
|
|
||||||
|
|
||||||
|
|
||||||
# Get job file routing
|
|
||||||
@server.route('/api/job/<job_id>/file/<filename>', methods=['GET'])
|
|
||||||
def get_job_file(job_id, filename):
|
|
||||||
found_job = RenderQueue.job_with_id(job_id)
|
|
||||||
try:
|
|
||||||
for full_path in found_job.file_list():
|
|
||||||
if filename in full_path:
|
|
||||||
return send_file(path_or_file=full_path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
abort(404)
|
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/jobs/<status_val>')
|
@server.get('/api/jobs/<status_val>')
|
||||||
@@ -159,20 +103,33 @@ def filtered_jobs_json(status_val):
|
|||||||
return f'Cannot find jobs with status {status_val}', 400
|
return f'Cannot find jobs with status {status_val}', 400
|
||||||
|
|
||||||
|
|
||||||
@server.post('/api/job/<job_id>/send_subjob_update_notification')
|
# --------------------------------------------
|
||||||
def subjob_update_notification(job_id):
|
# Job Details / File Handling
|
||||||
subjob_details = request.json
|
# --------------------------------------------
|
||||||
DistributedJobManager.handle_subjob_update_notification(RenderQueue.job_with_id(job_id), subjob_data=subjob_details)
|
|
||||||
return Response(status=200)
|
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/job/<job_id>')
|
@server.get('/api/job/<job_id>')
|
||||||
def get_job_status(job_id):
|
def get_job_details(job_id):
|
||||||
|
"""Retrieves the details of a requested job in JSON format
|
||||||
|
|
||||||
|
Args:
|
||||||
|
job_id (str): The ID of the render job.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A JSON representation of the job's details.
|
||||||
|
"""
|
||||||
return RenderQueue.job_with_id(job_id).json()
|
return RenderQueue.job_with_id(job_id).json()
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/job/<job_id>/logs')
|
@server.get('/api/job/<job_id>/logs')
|
||||||
def get_job_logs(job_id):
|
def get_job_logs(job_id):
|
||||||
|
"""Retrieves the log file for a specific render job.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
job_id (str): The ID of the render job.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: The log file's content as plain text, or an empty response if the log file is not found.
|
||||||
|
"""
|
||||||
found_job = RenderQueue.job_with_id(job_id)
|
found_job = RenderQueue.job_with_id(job_id)
|
||||||
log_path = system_safe_path(found_job.log_path())
|
log_path = system_safe_path(found_job.log_path())
|
||||||
log_data = None
|
log_data = None
|
||||||
@@ -188,7 +145,7 @@ def get_file_list(job_id):
|
|||||||
|
|
||||||
|
|
||||||
@server.route('/api/job/<job_id>/download')
|
@server.route('/api/job/<job_id>/download')
|
||||||
def download_file(job_id):
|
def download_requested_file(job_id):
|
||||||
|
|
||||||
requested_filename = request.args.get('filename')
|
requested_filename = request.args.get('filename')
|
||||||
if not requested_filename:
|
if not requested_filename:
|
||||||
@@ -203,7 +160,7 @@ def download_file(job_id):
|
|||||||
|
|
||||||
|
|
||||||
@server.route('/api/job/<job_id>/download_all')
|
@server.route('/api/job/<job_id>/download_all')
|
||||||
def download_all(job_id):
|
def download_all_files(job_id):
|
||||||
zip_filename = None
|
zip_filename = None
|
||||||
|
|
||||||
@after_this_request
|
@after_this_request
|
||||||
@@ -218,6 +175,7 @@ def download_all(job_id):
|
|||||||
found_job = RenderQueue.job_with_id(job_id)
|
found_job = RenderQueue.job_with_id(job_id)
|
||||||
output_dir = os.path.dirname(found_job.output_path)
|
output_dir = os.path.dirname(found_job.output_path)
|
||||||
if os.path.exists(output_dir):
|
if os.path.exists(output_dir):
|
||||||
|
from zipfile import ZipFile
|
||||||
zip_filename = system_safe_path(os.path.join(tempfile.gettempdir(),
|
zip_filename = system_safe_path(os.path.join(tempfile.gettempdir(),
|
||||||
pathlib.Path(found_job.input_path).stem + '.zip'))
|
pathlib.Path(found_job.input_path).stem + '.zip'))
|
||||||
with ZipFile(zip_filename, 'w') as zipObj:
|
with ZipFile(zip_filename, 'w') as zipObj:
|
||||||
@@ -229,6 +187,10 @@ def download_all(job_id):
|
|||||||
return f'Cannot find project files for job {job_id}', 500
|
return f'Cannot find project files for job {job_id}', 500
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# System Environment / Status
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@server.get('/api/presets')
|
@server.get('/api/presets')
|
||||||
def presets():
|
def presets():
|
||||||
presets_path = system_safe_path('config/presets.yaml')
|
presets_path = system_safe_path('config/presets.yaml')
|
||||||
@@ -260,13 +222,28 @@ def snapshot():
|
|||||||
return server_data
|
return server_data
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/_detected_clients')
|
@server.route('/api/status')
|
||||||
def detected_clients():
|
def status():
|
||||||
# todo: dev/debug only. Should not ship this - probably.
|
return {"timestamp": datetime.now().isoformat(),
|
||||||
return ZeroconfServer.found_hostnames()
|
"system_os": current_system_os(),
|
||||||
|
"system_os_version": current_system_os_version(),
|
||||||
|
"system_cpu": current_system_cpu(),
|
||||||
|
"cpu_percent": psutil.cpu_percent(percpu=False),
|
||||||
|
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
|
||||||
|
"cpu_count": psutil.cpu_count(logical=False),
|
||||||
|
"memory_total": psutil.virtual_memory().total,
|
||||||
|
"memory_available": psutil.virtual_memory().available,
|
||||||
|
"memory_percent": psutil.virtual_memory().percent,
|
||||||
|
"job_counts": RenderQueue.job_counts(),
|
||||||
|
"hostname": server.config['HOSTNAME'],
|
||||||
|
"port": server.config['PORT']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# New version
|
# --------------------------------------------
|
||||||
|
# Job Lifecyle (Create, Cancel, Delete)
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@server.post('/api/add_job')
|
@server.post('/api/add_job')
|
||||||
def add_job_handler():
|
def add_job_handler():
|
||||||
# Process request data
|
# Process request data
|
||||||
@@ -353,31 +330,9 @@ def delete_job(job_id):
|
|||||||
return f"Error deleting job: {e}", 500
|
return f"Error deleting job: {e}", 500
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/clear_history')
|
# --------------------------------------------
|
||||||
def clear_history():
|
# Engine Info and Management:
|
||||||
RenderQueue.clear_history()
|
# --------------------------------------------
|
||||||
return 'success'
|
|
||||||
|
|
||||||
|
|
||||||
@server.route('/api/status')
|
|
||||||
def status():
|
|
||||||
|
|
||||||
# Get system info
|
|
||||||
return {"timestamp": datetime.now().isoformat(),
|
|
||||||
"system_os": current_system_os(),
|
|
||||||
"system_os_version": current_system_os_version(),
|
|
||||||
"system_cpu": current_system_cpu(),
|
|
||||||
"cpu_percent": psutil.cpu_percent(percpu=False),
|
|
||||||
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
|
|
||||||
"cpu_count": psutil.cpu_count(logical=False),
|
|
||||||
"memory_total": psutil.virtual_memory().total,
|
|
||||||
"memory_available": psutil.virtual_memory().available,
|
|
||||||
"memory_percent": psutil.virtual_memory().percent,
|
|
||||||
"job_counts": RenderQueue.job_counts(),
|
|
||||||
"hostname": server.config['HOSTNAME'],
|
|
||||||
"port": server.config['PORT']
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/renderer_info')
|
@server.get('/api/renderer_info')
|
||||||
def renderer_info():
|
def renderer_info():
|
||||||
@@ -499,35 +454,95 @@ def get_renderer_help(renderer):
|
|||||||
return f"Cannot find renderer '{renderer}'", 400
|
return f"Cannot find renderer '{renderer}'", 400
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Miscellaneous:
|
||||||
|
# --------------------------------------------
|
||||||
|
@server.post('/api/job/<job_id>/send_subjob_update_notification')
|
||||||
|
def subjob_update_notification(job_id):
|
||||||
|
subjob_details = request.json
|
||||||
|
DistributedJobManager.handle_subjob_update_notification(RenderQueue.job_with_id(job_id), subjob_data=subjob_details)
|
||||||
|
return Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
|
@server.route('/api/job/<job_id>/thumbnail')
|
||||||
|
def job_thumbnail(job_id):
|
||||||
|
|
||||||
|
try:
|
||||||
|
big_thumb = request.args.get('size', False) == "big"
|
||||||
|
video_ok = request.args.get('video_ok', False)
|
||||||
|
found_job = RenderQueue.job_with_id(job_id, none_ok=False)
|
||||||
|
|
||||||
|
# trigger a thumbnail update - just in case
|
||||||
|
PreviewManager.update_previews_for_job(found_job, wait_until_completion=True, timeout=60)
|
||||||
|
previews = PreviewManager.get_previews_for_job(found_job)
|
||||||
|
all_previews_list = previews.get('output', previews.get('input', []))
|
||||||
|
|
||||||
|
video_previews = [x for x in all_previews_list if x['kind'] == 'video']
|
||||||
|
image_previews = [x for x in all_previews_list if x['kind'] == 'image']
|
||||||
|
filtered_list = video_previews if video_previews and video_ok else image_previews
|
||||||
|
|
||||||
|
# todo - sort by size or other metrics here
|
||||||
|
if filtered_list:
|
||||||
|
preview_to_send = filtered_list[0]
|
||||||
|
mime_types = {'image': 'image/jpeg', 'video': 'video/mp4'}
|
||||||
|
file_mime_type = mime_types.get(preview_to_send['kind'], 'unknown')
|
||||||
|
return send_file(preview_to_send['filename'], mimetype=file_mime_type)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f'Error getting thumbnail: {e}')
|
||||||
|
return f'Error getting thumbnail: {e}', 500
|
||||||
|
return "No thumbnail available", 404
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# System Benchmarks:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@server.get('/api/cpu_benchmark')
|
@server.get('/api/cpu_benchmark')
|
||||||
def get_cpu_benchmark_score():
|
def get_cpu_benchmark_score():
|
||||||
|
from src.utilities.benchmark import cpu_benchmark
|
||||||
return str(cpu_benchmark(10))
|
return str(cpu_benchmark(10))
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/disk_benchmark')
|
@server.get('/api/disk_benchmark')
|
||||||
def get_disk_benchmark():
|
def get_disk_benchmark():
|
||||||
|
from src.utilities.benchmark import disk_io_benchmark
|
||||||
results = disk_io_benchmark()
|
results = disk_io_benchmark()
|
||||||
return {'write_speed': results[0], 'read_speed': results[-1]}
|
return {'write_speed': results[0], 'read_speed': results[-1]}
|
||||||
|
|
||||||
|
|
||||||
def start_server(hostname=None):
|
# --------------------------------------------
|
||||||
|
# Error Handlers:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
# get hostname
|
@server.errorhandler(JobNotFoundError)
|
||||||
if not hostname:
|
def handle_job_not_found(job_error):
|
||||||
local_hostname = socket.gethostname()
|
return str(job_error), 400
|
||||||
hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
|
|
||||||
|
|
||||||
# load flask settings
|
|
||||||
server.config['HOSTNAME'] = hostname
|
|
||||||
server.config['PORT'] = int(Config.port_number)
|
|
||||||
server.config['UPLOAD_FOLDER'] = system_safe_path(os.path.expanduser(Config.upload_folder))
|
|
||||||
server.config['MAX_CONTENT_PATH'] = Config.max_content_path
|
|
||||||
server.config['enable_split_jobs'] = Config.enable_split_jobs
|
|
||||||
|
|
||||||
# disable most Flask logging
|
@server.errorhandler(DetachedInstanceError)
|
||||||
flask_log = logging.getLogger('werkzeug')
|
def handle_detached_instance(_):
|
||||||
flask_log.setLevel(Config.flask_log_level.upper())
|
return "Unavailable", 503
|
||||||
|
|
||||||
logger.debug('Starting API server')
|
|
||||||
server.run(host='0.0.0.0', port=server.config['PORT'], debug=Config.flask_debug_enable, use_reloader=False,
|
@server.errorhandler(Exception)
|
||||||
threaded=True)
|
def handle_general_error(general_error):
|
||||||
|
err_msg = f"Server error: {general_error}"
|
||||||
|
logger.error(err_msg)
|
||||||
|
return err_msg, 500
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Debug / Development Only:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
@server.get('/api/_debug/detected_clients')
|
||||||
|
def detected_clients():
|
||||||
|
# todo: dev/debug only. Should not ship this - probably.
|
||||||
|
from src.utilities.zeroconf_server import ZeroconfServer
|
||||||
|
return ZeroconfServer.found_hostnames()
|
||||||
|
|
||||||
|
|
||||||
|
@server.get('/api/_debug/clear_history')
|
||||||
|
def clear_history():
|
||||||
|
RenderQueue.clear_history()
|
||||||
|
return 'success'
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from urllib.parse import urljoin
|
|||||||
|
|
||||||
from src.utilities.misc_helper import is_localhost
|
from src.utilities.misc_helper import is_localhost
|
||||||
from src.utilities.status_utils import RenderStatus
|
from src.utilities.status_utils import RenderStatus
|
||||||
from src.utilities.zeroconf_server import ZeroconfServer
|
|
||||||
|
|
||||||
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
|
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
|
||||||
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
|
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
|
||||||
@@ -26,15 +25,8 @@ LOOPBACK = '127.0.0.1'
|
|||||||
|
|
||||||
|
|
||||||
class RenderServerProxy:
|
class RenderServerProxy:
|
||||||
"""
|
"""The ServerProxy class is responsible for interacting with a remote server.
|
||||||
The ServerProxy class is responsible for interacting with a remote server.
|
It provides convenience methods to request data from the server and store the status of the server.
|
||||||
It provides methods to request data from the server and store the status of the server.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
system_cpu (str): The CPU type of the system.
|
|
||||||
system_cpu_count (int): The number of CPUs in the system.
|
|
||||||
system_os (str): The operating system of the system.
|
|
||||||
system_os_version (str): The version of the operating system.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hostname, server_port="8080"):
|
def __init__(self, hostname, server_port="8080"):
|
||||||
@@ -55,6 +47,10 @@ class RenderServerProxy:
|
|||||||
self.system_os = None
|
self.system_os = None
|
||||||
self.system_os_version = None
|
self.system_os_version = None
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Basics / Connection:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<RenderServerProxy - {self.hostname}>"
|
return f"<RenderServerProxy - {self.hostname}>"
|
||||||
|
|
||||||
@@ -73,6 +69,10 @@ class RenderServerProxy:
|
|||||||
running_jobs = [x for x in self.__jobs_cache if x['status'] == 'running'] if self.__jobs_cache else []
|
running_jobs = [x for x in self.__jobs_cache if x['status'] == 'running'] if self.__jobs_cache else []
|
||||||
return f"{len(running_jobs)} running" if running_jobs else "Ready"
|
return f"{len(running_jobs)} running" if running_jobs else "Ready"
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Requests:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def request_data(self, payload, timeout=5):
|
def request_data(self, payload, timeout=5):
|
||||||
try:
|
try:
|
||||||
req = self.request(payload, timeout)
|
req = self.request(payload, timeout)
|
||||||
@@ -103,6 +103,10 @@ class RenderServerProxy:
|
|||||||
hostname = LOOPBACK if self.is_localhost else self.hostname
|
hostname = LOOPBACK if self.is_localhost else self.hostname
|
||||||
return requests.get(f'http://{hostname}:{self.port}/api/{payload}', timeout=timeout)
|
return requests.get(f'http://{hostname}:{self.port}/api/{payload}', timeout=timeout)
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Background Updates:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def start_background_update(self):
|
def start_background_update(self):
|
||||||
if self.__update_in_background:
|
if self.__update_in_background:
|
||||||
return
|
return
|
||||||
@@ -119,17 +123,6 @@ class RenderServerProxy:
|
|||||||
self.__background_thread.daemon = True
|
self.__background_thread.daemon = True
|
||||||
self.__background_thread.start()
|
self.__background_thread.start()
|
||||||
|
|
||||||
def stop_background_update(self):
|
|
||||||
self.__update_in_background = False
|
|
||||||
|
|
||||||
def get_job_info(self, job_id, timeout=5):
|
|
||||||
return self.request_data(f'job/{job_id}', timeout=timeout)
|
|
||||||
|
|
||||||
def get_all_jobs(self, timeout=5, ignore_token=False):
|
|
||||||
if not self.__update_in_background or ignore_token:
|
|
||||||
self.__update_job_cache(timeout, ignore_token)
|
|
||||||
return self.__jobs_cache.copy() if self.__jobs_cache else None
|
|
||||||
|
|
||||||
def __update_job_cache(self, timeout=40, ignore_token=False):
|
def __update_job_cache(self, timeout=40, ignore_token=False):
|
||||||
|
|
||||||
if self.__offline_flags: # if we're offline, don't bother with the long poll
|
if self.__offline_flags: # if we're offline, don't bother with the long poll
|
||||||
@@ -147,15 +140,21 @@ class RenderServerProxy:
|
|||||||
self.__jobs_cache = sorted_jobs
|
self.__jobs_cache = sorted_jobs
|
||||||
self.__jobs_cache_token = status_result['token']
|
self.__jobs_cache_token = status_result['token']
|
||||||
|
|
||||||
|
def stop_background_update(self):
|
||||||
|
self.__update_in_background = False
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Get System Info:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
def get_all_jobs(self, timeout=5, ignore_token=False):
|
||||||
|
if not self.__update_in_background or ignore_token:
|
||||||
|
self.__update_job_cache(timeout, ignore_token)
|
||||||
|
return self.__jobs_cache.copy() if self.__jobs_cache else None
|
||||||
|
|
||||||
def get_data(self, timeout=5):
|
def get_data(self, timeout=5):
|
||||||
return self.request_data('full_status', timeout=timeout)
|
return self.request_data('full_status', timeout=timeout)
|
||||||
|
|
||||||
def cancel_job(self, job_id, confirm=False):
|
|
||||||
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
|
|
||||||
|
|
||||||
def delete_job(self, job_id, confirm=False):
|
|
||||||
return self.request_data(f'job/{job_id}/delete?confirm={confirm}')
|
|
||||||
|
|
||||||
def get_status(self):
|
def get_status(self):
|
||||||
status = self.request_data('status')
|
status = self.request_data('status')
|
||||||
if status and not self.system_cpu:
|
if status and not self.system_cpu:
|
||||||
@@ -165,26 +164,19 @@ class RenderServerProxy:
|
|||||||
self.system_os_version = status['system_os_version']
|
self.system_os_version = status['system_os_version']
|
||||||
return status
|
return status
|
||||||
|
|
||||||
def is_engine_available(self, engine_name):
|
# --------------------------------------------
|
||||||
return self.request_data(f'{engine_name}/is_available')
|
# Get Job Info:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def get_all_engines(self):
|
def get_job_info(self, job_id, timeout=5):
|
||||||
return self.request_data('all_engines')
|
return self.request_data(f'job/{job_id}', timeout=timeout)
|
||||||
|
|
||||||
def send_subjob_update_notification(self, parent_id, subjob):
|
def get_job_files_list(self, job_id):
|
||||||
"""
|
return self.request_data(f"job/{job_id}/file_list")
|
||||||
Notifies the parent job of an update in a subjob.
|
|
||||||
|
|
||||||
Args:
|
# --------------------------------------------
|
||||||
parent_id (str): The ID of the parent job.
|
# Job Lifecycle:
|
||||||
subjob (Job): The subjob that has updated.
|
# --------------------------------------------
|
||||||
|
|
||||||
Returns:
|
|
||||||
Response: The response from the server.
|
|
||||||
"""
|
|
||||||
hostname = LOOPBACK if self.is_localhost else self.hostname
|
|
||||||
return requests.post(f'http://{hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification',
|
|
||||||
json=subjob.json())
|
|
||||||
|
|
||||||
def post_job_to_server(self, file_path, job_list, callback=None):
|
def post_job_to_server(self, file_path, job_list, callback=None):
|
||||||
"""
|
"""
|
||||||
@@ -232,29 +224,36 @@ class RenderServerProxy:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"An error occurred: {e}")
|
logger.error(f"An error occurred: {e}")
|
||||||
|
|
||||||
def get_job_files_list(self, job_id):
|
def cancel_job(self, job_id, confirm=False):
|
||||||
return self.request_data(f"job/{job_id}/file_list")
|
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
|
||||||
|
|
||||||
def download_all_job_files(self, job_id, save_path):
|
def delete_job(self, job_id, confirm=False):
|
||||||
|
return self.request_data(f'job/{job_id}/delete?confirm={confirm}')
|
||||||
|
|
||||||
|
def send_subjob_update_notification(self, parent_id, subjob):
|
||||||
|
"""
|
||||||
|
Notifies the parent job of an update in a subjob.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent_id (str): The ID of the parent job.
|
||||||
|
subjob (Job): The subjob that has updated.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: The response from the server.
|
||||||
|
"""
|
||||||
hostname = LOOPBACK if self.is_localhost else self.hostname
|
hostname = LOOPBACK if self.is_localhost else self.hostname
|
||||||
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download_all"
|
return requests.post(f'http://{hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification',
|
||||||
return self.__download_file_from_url(url, output_filepath=save_path)
|
json=subjob.json())
|
||||||
|
|
||||||
def download_job_file(self, job_id, job_filename, save_path):
|
# --------------------------------------------
|
||||||
hostname = LOOPBACK if self.is_localhost else self.hostname
|
# Renderers:
|
||||||
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
|
# --------------------------------------------
|
||||||
return self.__download_file_from_url(url, output_filepath=save_path)
|
|
||||||
|
|
||||||
@staticmethod
|
def is_engine_available(self, engine_name):
|
||||||
def __download_file_from_url(url, output_filepath):
|
return self.request_data(f'{engine_name}/is_available')
|
||||||
with requests.get(url, stream=True) as r:
|
|
||||||
r.raise_for_status()
|
|
||||||
with open(output_filepath, 'wb') as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=8192):
|
|
||||||
f.write(chunk)
|
|
||||||
return output_filepath
|
|
||||||
|
|
||||||
# --- Renderer --- #
|
def get_all_engines(self):
|
||||||
|
return self.request_data('all_engines')
|
||||||
|
|
||||||
def get_renderer_info(self, response_type='standard', timeout=5):
|
def get_renderer_info(self, response_type='standard', timeout=5):
|
||||||
"""
|
"""
|
||||||
@@ -285,3 +284,26 @@ class RenderServerProxy:
|
|||||||
form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu}
|
form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu}
|
||||||
hostname = LOOPBACK if self.is_localhost else self.hostname
|
hostname = LOOPBACK if self.is_localhost else self.hostname
|
||||||
return requests.post(f'http://{hostname}:{self.port}/api/delete_engine', json=form_data)
|
return requests.post(f'http://{hostname}:{self.port}/api/delete_engine', json=form_data)
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Download Files:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
def download_all_job_files(self, job_id, save_path):
|
||||||
|
hostname = LOOPBACK if self.is_localhost else self.hostname
|
||||||
|
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download_all"
|
||||||
|
return self.__download_file_from_url(url, output_filepath=save_path)
|
||||||
|
|
||||||
|
def download_job_file(self, job_id, job_filename, save_path):
|
||||||
|
hostname = LOOPBACK if self.is_localhost else self.hostname
|
||||||
|
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
|
||||||
|
return self.__download_file_from_url(url, output_filepath=save_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __download_file_from_url(url, output_filepath):
|
||||||
|
with requests.get(url, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(output_filepath, 'wb') as f:
|
||||||
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
|
f.write(chunk)
|
||||||
|
return output_filepath
|
||||||
|
|||||||
@@ -39,10 +39,10 @@ class DistributedJobManager:
|
|||||||
"""
|
"""
|
||||||
Responds to the 'frame_complete' pubsub message for local jobs.
|
Responds to the 'frame_complete' pubsub message for local jobs.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
job_id (str): The ID of the job that has changed status.
|
job_id (str): The ID of the job that has changed status.
|
||||||
old_status (str): The previous status of the job.
|
old_status (str): The previous status of the job.
|
||||||
new_status (str): The new (current) status of the job.
|
new_status (str): The new (current) status of the job.
|
||||||
|
|
||||||
Note: Do not call directly. Instead, call via the 'frame_complete' pubsub message.
|
Note: Do not call directly. Instead, call via the 'frame_complete' pubsub message.
|
||||||
"""
|
"""
|
||||||
@@ -75,10 +75,10 @@ class DistributedJobManager:
|
|||||||
Responds to the 'status_change' pubsub message for local jobs.
|
Responds to the 'status_change' pubsub message for local jobs.
|
||||||
If it's a child job, it notifies the parent job about the status change.
|
If it's a child job, it notifies the parent job about the status change.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
job_id (str): The ID of the job that has changed status.
|
job_id (str): The ID of the job that has changed status.
|
||||||
old_status (str): The previous status of the job.
|
old_status (str): The previous status of the job.
|
||||||
new_status (str): The new (current) status of the job.
|
new_status (str): The new (current) status of the job.
|
||||||
|
|
||||||
Note: Do not call directly. Instead, call via the 'status_change' pubsub message.
|
Note: Do not call directly. Instead, call via the 'status_change' pubsub message.
|
||||||
"""
|
"""
|
||||||
@@ -129,14 +129,12 @@ class DistributedJobManager:
|
|||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_render_job(cls, job_data, loaded_project_local_path):
|
def create_render_job(cls, new_job_attributes, loaded_project_local_path):
|
||||||
"""
|
"""Creates render jobs. Pass in dict of job_data and the local path to the project. It creates and returns a new
|
||||||
Creates render jobs.
|
render job.
|
||||||
|
|
||||||
This method job data and a local path to a loaded project. It creates and returns new a render job.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
job_data (dict): Job data.
|
new_job_attributes (dict): Dict of desired attributes for new job (frame count, renderer, output path, etc)
|
||||||
loaded_project_local_path (str): The local path to the loaded project.
|
loaded_project_local_path (str): The local path to the loaded project.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -144,7 +142,7 @@ class DistributedJobManager:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# get new output path in output_dir
|
# get new output path in output_dir
|
||||||
output_path = job_data.get('output_path')
|
output_path = new_job_attributes.get('output_path')
|
||||||
if not output_path:
|
if not output_path:
|
||||||
loaded_project_filename = os.path.basename(loaded_project_local_path)
|
loaded_project_filename = os.path.basename(loaded_project_local_path)
|
||||||
output_filename = os.path.splitext(loaded_project_filename)[0]
|
output_filename = os.path.splitext(loaded_project_filename)[0]
|
||||||
@@ -158,27 +156,27 @@ class DistributedJobManager:
|
|||||||
logger.debug(f"New job output path: {output_path}")
|
logger.debug(f"New job output path: {output_path}")
|
||||||
|
|
||||||
# create & configure jobs
|
# create & configure jobs
|
||||||
worker = EngineManager.create_worker(renderer=job_data['renderer'],
|
worker = EngineManager.create_worker(renderer=new_job_attributes['renderer'],
|
||||||
input_path=loaded_project_local_path,
|
input_path=loaded_project_local_path,
|
||||||
output_path=output_path,
|
output_path=output_path,
|
||||||
engine_version=job_data.get('engine_version'),
|
engine_version=new_job_attributes.get('engine_version'),
|
||||||
args=job_data.get('args', {}),
|
args=new_job_attributes.get('args', {}),
|
||||||
parent=job_data.get('parent'),
|
parent=new_job_attributes.get('parent'),
|
||||||
name=job_data.get('name'))
|
name=new_job_attributes.get('name'))
|
||||||
worker.status = job_data.get("initial_status", worker.status) # todo: is this necessary?
|
worker.status = new_job_attributes.get("initial_status", worker.status) # todo: is this necessary?
|
||||||
worker.priority = int(job_data.get('priority', worker.priority))
|
worker.priority = int(new_job_attributes.get('priority', worker.priority))
|
||||||
worker.start_frame = int(job_data.get("start_frame", worker.start_frame))
|
worker.start_frame = int(new_job_attributes.get("start_frame", worker.start_frame))
|
||||||
worker.end_frame = int(job_data.get("end_frame", worker.end_frame))
|
worker.end_frame = int(new_job_attributes.get("end_frame", worker.end_frame))
|
||||||
worker.watchdog_timeout = Config.worker_process_timeout
|
worker.watchdog_timeout = Config.worker_process_timeout
|
||||||
worker.hostname = socket.gethostname()
|
worker.hostname = socket.gethostname()
|
||||||
|
|
||||||
# determine if we can / should split the job
|
# determine if we can / should split the job
|
||||||
if job_data.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent:
|
if new_job_attributes.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent:
|
||||||
cls.split_into_subjobs_async(worker, job_data, loaded_project_local_path)
|
cls.split_into_subjobs_async(worker, new_job_attributes, loaded_project_local_path)
|
||||||
else:
|
else:
|
||||||
worker.status = RenderStatus.NOT_STARTED
|
worker.status = RenderStatus.NOT_STARTED
|
||||||
|
|
||||||
RenderQueue.add_to_render_queue(worker, force_start=job_data.get('force_start', False))
|
RenderQueue.add_to_render_queue(worker, force_start=new_job_attributes.get('force_start', False))
|
||||||
PreviewManager.update_previews_for_job(worker)
|
PreviewManager.update_previews_for_job(worker)
|
||||||
|
|
||||||
return worker
|
return worker
|
||||||
@@ -189,8 +187,7 @@ class DistributedJobManager:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_subjob_update_notification(cls, local_job, subjob_data):
|
def handle_subjob_update_notification(cls, local_job, subjob_data):
|
||||||
"""
|
"""Responds to a notification from a remote subjob and the host requests any subsequent updates from the subjob.
|
||||||
Responds to a notification from a remote subjob and the host requests any subsequent updates from the subjob.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
local_job (BaseRenderWorker): The local parent job worker.
|
local_job (BaseRenderWorker): The local parent job worker.
|
||||||
@@ -214,6 +211,14 @@ class DistributedJobManager:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def wait_for_subjobs(cls, parent_job):
|
def wait_for_subjobs(cls, parent_job):
|
||||||
|
"""Check the status of subjobs and waits until they are all finished. Download rendered frames from subjobs
|
||||||
|
when they are completed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent_job: Worker object that has child jobs
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
"""
|
||||||
logger.debug(f"Waiting for subjobs for job {parent_job}")
|
logger.debug(f"Waiting for subjobs for job {parent_job}")
|
||||||
parent_job.status = RenderStatus.WAITING_FOR_SUBJOBS
|
parent_job.status = RenderStatus.WAITING_FOR_SUBJOBS
|
||||||
statuses_to_download = [RenderStatus.CANCELLED, RenderStatus.ERROR, RenderStatus.COMPLETED]
|
statuses_to_download = [RenderStatus.CANCELLED, RenderStatus.ERROR, RenderStatus.COMPLETED]
|
||||||
@@ -280,15 +285,15 @@ class DistributedJobManager:
|
|||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def split_into_subjobs_async(cls, parent_worker, job_data, project_path, system_os=None):
|
def split_into_subjobs_async(cls, parent_worker, new_job_attributes, project_path, system_os=None):
|
||||||
# todo: I don't love this
|
# todo: I don't love this
|
||||||
parent_worker.status = RenderStatus.CONFIGURING
|
parent_worker.status = RenderStatus.CONFIGURING
|
||||||
cls.background_worker = threading.Thread(target=cls.split_into_subjobs, args=(parent_worker, job_data,
|
cls.background_worker = threading.Thread(target=cls.split_into_subjobs, args=(parent_worker, new_job_attributes,
|
||||||
project_path, system_os))
|
project_path, system_os))
|
||||||
cls.background_worker.start()
|
cls.background_worker.start()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def split_into_subjobs(cls, parent_worker, job_data, project_path, system_os=None, specific_servers=None):
|
def split_into_subjobs(cls, parent_worker, new_job_attributes, project_path, system_os=None, specific_servers=None):
|
||||||
"""
|
"""
|
||||||
Splits a job into subjobs and distributes them among available servers.
|
Splits a job into subjobs and distributes them among available servers.
|
||||||
|
|
||||||
@@ -297,10 +302,10 @@ class DistributedJobManager:
|
|||||||
subjob.
|
subjob.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
parent_worker (Worker): The worker that is handling the job.
|
parent_worker (Worker): The parent job what we're creating the subjobs for.
|
||||||
job_data (dict): The data for the job to be split.
|
new_job_attributes (dict): Dict of desired attributes for new job (frame count, renderer, output path, etc)
|
||||||
project_path (str): The path to the project associated with the job.
|
project_path (str): The path to the project.
|
||||||
system_os (str, optional): The operating system of the servers. Default is any OS.
|
system_os (str, optional): Required OS. Default is any.
|
||||||
specific_servers (list, optional): List of specific servers to split work between. Defaults to all found.
|
specific_servers (list, optional): List of specific servers to split work between. Defaults to all found.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -321,7 +326,7 @@ class DistributedJobManager:
|
|||||||
try:
|
try:
|
||||||
for subjob_data in all_subjob_server_data:
|
for subjob_data in all_subjob_server_data:
|
||||||
subjob_hostname = subjob_data['hostname']
|
subjob_hostname = subjob_data['hostname']
|
||||||
post_results = cls.__create_subjob(job_data, project_path, subjob_data, subjob_hostname,
|
post_results = cls.__create_subjob(new_job_attributes, project_path, subjob_data, subjob_hostname,
|
||||||
parent_worker)
|
parent_worker)
|
||||||
if not post_results.ok:
|
if not post_results.ok:
|
||||||
ValueError(f"Failed to create subjob on {subjob_hostname}")
|
ValueError(f"Failed to create subjob on {subjob_hostname}")
|
||||||
@@ -342,8 +347,9 @@ class DistributedJobManager:
|
|||||||
RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True)
|
RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __create_subjob(job_data, project_path, server_data, server_hostname, parent_worker):
|
def __create_subjob(new_job_attributes, project_path, server_data, server_hostname, parent_worker):
|
||||||
subjob = job_data.copy()
|
"""Convenience method to create subjobs for a parent worker"""
|
||||||
|
subjob = new_job_attributes.copy()
|
||||||
subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
|
subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
|
||||||
subjob['parent'] = f"{parent_worker.id}@{parent_worker.hostname}"
|
subjob['parent'] = f"{parent_worker.id}@{parent_worker.hostname}"
|
||||||
subjob['start_frame'] = server_data['frame_range'][0]
|
subjob['start_frame'] = server_data['frame_range'][0]
|
||||||
|
|||||||
@@ -8,8 +8,7 @@ from src.engines.blender.blender_engine import Blender
|
|||||||
from src.engines.core.base_downloader import EngineDownloader
|
from src.engines.core.base_downloader import EngineDownloader
|
||||||
from src.utilities.misc_helper import current_system_os, current_system_cpu
|
from src.utilities.misc_helper import current_system_os, current_system_cpu
|
||||||
|
|
||||||
# url = "https://download.blender.org/release/"
|
url = "https://download.blender.org/release/"
|
||||||
url = "https://ftp.nluug.nl/pub/graphics/blender/release/" # much faster mirror for testing
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
supported_formats = ['.zip', '.tar.xz', '.dmg']
|
supported_formats = ['.zip', '.tar.xz', '.dmg']
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
@@ -12,26 +10,150 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
|
|
||||||
class EngineDownloader:
|
class EngineDownloader:
|
||||||
|
"""A class responsible for downloading and extracting rendering engines from publicly available URLs.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
supported_formats (list[str]): A list of file formats supported by the downloader.
|
||||||
|
"""
|
||||||
|
|
||||||
supported_formats = ['.zip', '.tar.xz', '.dmg']
|
supported_formats = ['.zip', '.tar.xz', '.dmg']
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Required Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False):
|
def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False):
|
||||||
raise NotImplementedError # implement this method in your engine subclass
|
"""
|
||||||
|
Finds the most recent version of the rendering engine available for download.
|
||||||
|
|
||||||
|
This method should be overridden in a subclass to implement the logic for determining
|
||||||
|
the most recent version of the rendering engine, optionally filtering by long-term
|
||||||
|
support (LTS) versions, the operating system, and CPU architecture.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
|
||||||
|
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
|
||||||
|
lts_only (bool, optional): Limit the search to LTS (long-term support) versions only. Default is False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dict with the following keys:
|
||||||
|
- 'cpu' (str): The CPU architecture.
|
||||||
|
- 'system_os' (str): The operating system.
|
||||||
|
- 'file' (str): The filename of the version's download file.
|
||||||
|
- 'url' (str): The remote URL for downloading the version.
|
||||||
|
- 'version' (str): The version number.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If the method is not overridden in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"find_most_recent_version not implemented for {cls.__class__.__name__}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def version_is_available_to_download(cls, version, system_os=None, cpu=None):
|
def version_is_available_to_download(cls, version, system_os=None, cpu=None):
|
||||||
raise NotImplementedError # implement this method in your engine subclass
|
"""Checks if a requested version of the rendering engine is available for download.
|
||||||
|
|
||||||
|
This method should be overridden in a subclass to implement the logic for determining
|
||||||
|
whether a given version of the rendering engine is available for download, based on the
|
||||||
|
operating system and CPU architecture.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (str): The requested renderer version to download.
|
||||||
|
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
|
||||||
|
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the version is available for download, False otherwise.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If the method is not overridden in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"version_is_available_to_download not implemented for {cls.__class__.__name__}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120):
|
def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120):
|
||||||
raise NotImplementedError # implement this method in your engine subclass
|
"""Downloads the requested version of the rendering engine to the given download location.
|
||||||
|
|
||||||
|
This method should be overridden in a subclass to implement the logic for downloading
|
||||||
|
a specific version of the rendering engine. The method is intended to handle the
|
||||||
|
downloading process based on the version, operating system, CPU architecture, and
|
||||||
|
timeout parameters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (str): The requested renderer version to download.
|
||||||
|
download_location (str): The directory where the engine should be downloaded.
|
||||||
|
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
|
||||||
|
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
|
||||||
|
timeout (int, optional): The maximum time in seconds to wait for the download. Default is 120 seconds.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If the method is not overridden in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"download_engine not implemented for {cls.__class__.__name__}")
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Optional Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def all_versions(cls, system_os=None, cpu=None):
|
||||||
|
"""Retrieves a list of available versions of the software for a specific operating system and CPU architecture.
|
||||||
|
|
||||||
|
This method fetches all available versions for the given operating system and CPU type, constructing
|
||||||
|
a list of dictionaries containing details such as the version, CPU architecture, system OS, and the
|
||||||
|
remote URL for downloading each version.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
|
||||||
|
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict]: A list of dictionaries, each containing:
|
||||||
|
- 'cpu' (str): The CPU architecture.
|
||||||
|
- 'file' (str): The filename of the version's download file.
|
||||||
|
- 'system_os' (str): The operating system.
|
||||||
|
- 'url' (str): The remote URL for downloading the version.
|
||||||
|
- 'version' (str): The version number.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Do Not Override These Methods:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def download_and_extract_app(cls, remote_url, download_location, timeout=120):
|
def download_and_extract_app(cls, remote_url, download_location, timeout=120):
|
||||||
|
"""Downloads an application from the given remote URL and extracts it to the specified location.
|
||||||
|
|
||||||
|
This method handles the downloading of the application, supports multiple archive formats,
|
||||||
|
and extracts the contents to the specified `download_location`. It also manages temporary
|
||||||
|
files and logs progress throughout the process.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
remote_url (str): The URL of the application to download.
|
||||||
|
download_location (str): The directory where the application should be extracted.
|
||||||
|
timeout (int, optional): The maximum time in seconds to wait for the download. Default is 120 seconds.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The path to the directory where the application was extracted.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: Catches and logs any exceptions that occur during the download or extraction process.
|
||||||
|
|
||||||
|
Supported Formats:
|
||||||
|
- `.tar.xz`: Extracted using the `tarfile` module.
|
||||||
|
- `.zip`: Extracted using the `zipfile` module.
|
||||||
|
- `.dmg`: macOS disk image files, handled using the `dmglib` library.
|
||||||
|
- Other formats will result in an error being logged.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- If the application already exists in the `download_location`, the method will log an error
|
||||||
|
and return without downloading or extracting.
|
||||||
|
- Temporary files created during the download process are cleaned up after completion.
|
||||||
|
"""
|
||||||
|
|
||||||
# Create a temp download directory
|
# Create a temp download directory
|
||||||
temp_download_dir = tempfile.mkdtemp()
|
temp_download_dir = tempfile.mkdtemp()
|
||||||
@@ -80,6 +202,7 @@ class EngineDownloader:
|
|||||||
# Extract the downloaded file
|
# Extract the downloaded file
|
||||||
# Process .tar.xz files
|
# Process .tar.xz files
|
||||||
if temp_downloaded_file_path.lower().endswith('.tar.xz'):
|
if temp_downloaded_file_path.lower().endswith('.tar.xz'):
|
||||||
|
import tarfile
|
||||||
try:
|
try:
|
||||||
with tarfile.open(temp_downloaded_file_path, 'r:xz') as tar:
|
with tarfile.open(temp_downloaded_file_path, 'r:xz') as tar:
|
||||||
tar.extractall(path=download_location)
|
tar.extractall(path=download_location)
|
||||||
@@ -93,6 +216,7 @@ class EngineDownloader:
|
|||||||
|
|
||||||
# Process .zip files
|
# Process .zip files
|
||||||
elif temp_downloaded_file_path.lower().endswith('.zip'):
|
elif temp_downloaded_file_path.lower().endswith('.zip'):
|
||||||
|
import zipfile
|
||||||
try:
|
try:
|
||||||
with zipfile.ZipFile(temp_downloaded_file_path, 'r') as zip_ref:
|
with zipfile.ZipFile(temp_downloaded_file_path, 'r') as zip_ref:
|
||||||
zip_ref.extractall(download_location)
|
zip_ref.extractall(download_location)
|
||||||
|
|||||||
@@ -8,9 +8,21 @@ SUBPROCESS_TIMEOUT = 5
|
|||||||
|
|
||||||
|
|
||||||
class BaseRenderEngine(object):
|
class BaseRenderEngine(object):
|
||||||
|
"""Base class for render engines. This class provides common functionality and structure for various rendering
|
||||||
|
engines. Create subclasses and override the methods marked below to add additional renderers
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
install_paths (list): A list of default installation paths where the render engine
|
||||||
|
might be found. This list can be populated with common paths to help locate the
|
||||||
|
executable on different operating systems or environments.
|
||||||
|
"""
|
||||||
|
|
||||||
install_paths = []
|
install_paths = []
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Required Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def __init__(self, custom_path=None):
|
def __init__(self, custom_path=None):
|
||||||
self.custom_renderer_path = custom_path
|
self.custom_renderer_path = custom_path
|
||||||
if not self.renderer_path() or not os.path.exists(self.renderer_path()):
|
if not self.renderer_path() or not os.path.exists(self.renderer_path()):
|
||||||
@@ -20,6 +32,115 @@ class BaseRenderEngine(object):
|
|||||||
logger.warning(f"Path is not executable. Setting permissions to 755 for {self.renderer_path()}")
|
logger.warning(f"Path is not executable. Setting permissions to 755 for {self.renderer_path()}")
|
||||||
os.chmod(self.renderer_path(), 0o755)
|
os.chmod(self.renderer_path(), 0o755)
|
||||||
|
|
||||||
|
def version(self):
|
||||||
|
"""Return the version number as a string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Version number.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If not overridden.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"version not implemented for {self.__class__.__name__}")
|
||||||
|
|
||||||
|
def get_project_info(self, project_path, timeout=10):
|
||||||
|
"""Extracts detailed project information from the given project path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_path (str): The path to the project file.
|
||||||
|
timeout (int, optional): The maximum time (in seconds) to wait for the operation. Default is 10 seconds.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary containing project information (subclasses should define the structure).
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If the method is not overridden in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"get_project_info not implemented for {self.__class__.__name__}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_output_formats(cls):
|
||||||
|
"""Returns a list of available output formats supported by the renderer.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: A list of strings representing the available output formats.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def worker_class(): # override when subclassing to link worker class
|
||||||
|
raise NotImplementedError("Worker class not implemented")
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Optional Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
def supported_extensions(self):
|
||||||
|
"""
|
||||||
|
Returns:
|
||||||
|
list[str]: list of supported extensions
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_help(self):
|
||||||
|
"""Retrieves the help documentation for the renderer.
|
||||||
|
|
||||||
|
This method runs the renderer's help command (default: '-h') and captures the output.
|
||||||
|
Override this method if the renderer uses a different help flag.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The help documentation as a string.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: If the renderer path is not found.
|
||||||
|
"""
|
||||||
|
path = self.renderer_path()
|
||||||
|
if not path:
|
||||||
|
raise FileNotFoundError("renderer path not found")
|
||||||
|
creationflags = subprocess.CREATE_NO_WINDOW if platform.system() == 'Windows' else 0
|
||||||
|
help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
|
||||||
|
timeout=SUBPROCESS_TIMEOUT, creationflags=creationflags).decode('utf-8')
|
||||||
|
return help_doc
|
||||||
|
|
||||||
|
def system_info(self):
|
||||||
|
"""Return additional information about the system specfic to the engine (configured GPUs, render engines, etc)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary with engine-specific system information
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def perform_presubmission_tasks(self, project_path):
|
||||||
|
"""Perform any pre-submission tasks on a project file before uploading it to a server (pack textures, etc.)
|
||||||
|
|
||||||
|
Override this method to:
|
||||||
|
1. Copy the project file to a temporary location (DO NOT MODIFY ORIGINAL PATH).
|
||||||
|
2. Perform additional modifications or tasks.
|
||||||
|
3. Return the path to the modified project file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_path (str): The original project file path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The path to the modified project file.
|
||||||
|
"""
|
||||||
|
return project_path
|
||||||
|
|
||||||
|
def get_arguments(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def downloader(): # override when subclassing if using a downloader class
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ui_options(system_info): # override to return options for ui
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Do Not Override These Methods:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def renderer_path(self):
|
def renderer_path(self):
|
||||||
return self.custom_renderer_path or self.default_renderer_path()
|
return self.custom_renderer_path or self.default_renderer_path()
|
||||||
|
|
||||||
@@ -39,46 +160,3 @@ class BaseRenderEngine(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def version(self):
|
|
||||||
raise NotImplementedError("version not implemented")
|
|
||||||
|
|
||||||
def supported_extensions(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def downloader(): # override when subclassing if using a downloader class
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def worker_class(): # override when subclassing to link worker class
|
|
||||||
raise NotImplementedError("Worker class not implemented")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def ui_options(system_info): # override to return options for ui
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_help(self): # override if renderer uses different help flag
|
|
||||||
path = self.renderer_path()
|
|
||||||
if not path:
|
|
||||||
raise FileNotFoundError("renderer path not found")
|
|
||||||
creationflags = subprocess.CREATE_NO_WINDOW if platform.system() == 'Windows' else 0
|
|
||||||
help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
|
|
||||||
timeout=SUBPROCESS_TIMEOUT, creationflags=creationflags).decode('utf-8')
|
|
||||||
return help_doc
|
|
||||||
|
|
||||||
def get_project_info(self, project_path, timeout=10):
|
|
||||||
raise NotImplementedError(f"get_project_info not implemented for {self.__name__}")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_output_formats(cls):
|
|
||||||
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
|
|
||||||
|
|
||||||
def get_arguments(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def system_info(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def perform_presubmission_tasks(self, project_path):
|
|
||||||
return project_path
|
|
||||||
|
|||||||
@@ -48,6 +48,10 @@ class BaseRenderWorker(Base):
|
|||||||
|
|
||||||
engine = None
|
engine = None
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Required Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def __init__(self, input_path, output_path, engine_path, priority=2, args=None, ignore_extensions=True, parent=None,
|
def __init__(self, input_path, output_path, engine_path, priority=2, args=None, ignore_extensions=True, parent=None,
|
||||||
name=None):
|
name=None):
|
||||||
|
|
||||||
@@ -57,7 +61,7 @@ class BaseRenderWorker(Base):
|
|||||||
logger.error(err_meg)
|
logger.error(err_meg)
|
||||||
raise ValueError(err_meg)
|
raise ValueError(err_meg)
|
||||||
if not self.engine:
|
if not self.engine:
|
||||||
raise NotImplementedError("Engine not defined")
|
raise NotImplementedError(f"Engine not defined for {self.__class__.__name__}")
|
||||||
|
|
||||||
def generate_id():
|
def generate_id():
|
||||||
import uuid
|
import uuid
|
||||||
@@ -103,6 +107,50 @@ class BaseRenderWorker(Base):
|
|||||||
self.__last_output_time = None
|
self.__last_output_time = None
|
||||||
self.watchdog_timeout = 120
|
self.watchdog_timeout = 120
|
||||||
|
|
||||||
|
def generate_worker_subprocess(self):
|
||||||
|
"""Generate a return a list of the command line arguments necessary to perform requested job
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: list of command line arguments
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("generate_worker_subprocess not implemented")
|
||||||
|
|
||||||
|
def _parse_stdout(self, line):
|
||||||
|
"""Parses a line of standard output from the renderer.
|
||||||
|
|
||||||
|
This method should be overridden in a subclass to implement the logic for processing
|
||||||
|
and interpreting a single line of output from the renderer's standard output stream.
|
||||||
|
|
||||||
|
On frame completion, the subclass should:
|
||||||
|
1. Update value of self.current_frame
|
||||||
|
2. Call self._send_frame_complete_notification()
|
||||||
|
|
||||||
|
Args:
|
||||||
|
line (str): A line of text from the renderer's standard output.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotImplementedError: If the method is not overridden in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(f"_parse_stdout not implemented for {self.__class__.__name__}")
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Optional Overrides for Subclasses:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
def percent_complete(self):
|
||||||
|
# todo: fix this
|
||||||
|
if self.status == RenderStatus.COMPLETED:
|
||||||
|
return 1.0
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def post_processing(self):
|
||||||
|
"""Override to perform any engine-specific postprocessing"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Do Not Override These Methods:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Job id:{self.id} p{self.priority} {self.renderer}-{self.renderer_version} '{self.name}' status:{self.status.value}>"
|
return f"<Job id:{self.id} p{self.priority} {self.renderer}-{self.renderer_version} '{self.name}' status:{self.status.value}>"
|
||||||
|
|
||||||
@@ -142,16 +190,13 @@ class BaseRenderWorker(Base):
|
|||||||
return generated_args
|
return generated_args
|
||||||
|
|
||||||
def get_raw_args(self):
|
def get_raw_args(self):
|
||||||
raw_args_string = self.args.get('raw', None)
|
raw_args_string = self.args.get('raw', '')
|
||||||
raw_args = None
|
raw_args = None
|
||||||
if raw_args_string:
|
if raw_args_string:
|
||||||
import shlex
|
import shlex
|
||||||
raw_args = shlex.split(raw_args_string)
|
raw_args = shlex.split(raw_args_string)
|
||||||
return raw_args
|
return raw_args
|
||||||
|
|
||||||
def generate_worker_subprocess(self):
|
|
||||||
raise NotImplementedError("generate_worker_subprocess not implemented")
|
|
||||||
|
|
||||||
def log_path(self):
|
def log_path(self):
|
||||||
filename = (self.name or os.path.basename(self.input_path)) + '_' + \
|
filename = (self.name or os.path.basename(self.input_path)) + '_' + \
|
||||||
self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log'
|
self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log'
|
||||||
@@ -387,9 +432,6 @@ class BaseRenderWorker(Base):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error stopping the process: {e}")
|
logger.error(f"Error stopping the process: {e}")
|
||||||
|
|
||||||
def post_processing(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def is_running(self):
|
def is_running(self):
|
||||||
if hasattr(self, '__thread'):
|
if hasattr(self, '__thread'):
|
||||||
return self.__thread.is_alive()
|
return self.__thread.is_alive()
|
||||||
@@ -418,14 +460,6 @@ class BaseRenderWorker(Base):
|
|||||||
if self.is_running(): # allow the log files to close
|
if self.is_running(): # allow the log files to close
|
||||||
self.__thread.join(timeout=5)
|
self.__thread.join(timeout=5)
|
||||||
|
|
||||||
def percent_complete(self):
|
|
||||||
if self.status == RenderStatus.COMPLETED:
|
|
||||||
return 1.0
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def _parse_stdout(self, line):
|
|
||||||
raise NotImplementedError("_parse_stdout not implemented")
|
|
||||||
|
|
||||||
def time_elapsed(self):
|
def time_elapsed(self):
|
||||||
return get_time_elapsed(self.start_time, self.end_time)
|
return get_time_elapsed(self.start_time, self.end_time)
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
|
|
||||||
class EngineManager:
|
class EngineManager:
|
||||||
|
"""Class that manages different versions of installed renderers and handles fetching and downloading new versions,
|
||||||
|
if possible.
|
||||||
|
"""
|
||||||
|
|
||||||
engines_path = None
|
engines_path = None
|
||||||
download_tasks = []
|
download_tasks = []
|
||||||
@@ -20,6 +23,10 @@ class EngineManager:
|
|||||||
def supported_engines():
|
def supported_engines():
|
||||||
return [Blender, FFMPEG]
|
return [Blender, FFMPEG]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def downloadable_engines(cls):
|
||||||
|
return [engine for engine in cls.supported_engines() if hasattr(engine, "downloader") and engine.downloader()]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def engine_with_name(cls, engine_name):
|
def engine_with_name(cls, engine_name):
|
||||||
for obj in cls.supported_engines():
|
for obj in cls.supported_engines():
|
||||||
@@ -27,7 +34,7 @@ class EngineManager:
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_engines(cls, filter_name=None, include_corrupt=False):
|
def get_engines(cls, filter_name=None, include_corrupt=False, ignore_system=False):
|
||||||
|
|
||||||
if not cls.engines_path:
|
if not cls.engines_path:
|
||||||
raise FileNotFoundError("Engine path is not set")
|
raise FileNotFoundError("Engine path is not set")
|
||||||
@@ -89,46 +96,47 @@ class EngineManager:
|
|||||||
'type': 'system'
|
'type': 'system'
|
||||||
}
|
}
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
if not ignore_system:
|
||||||
futures = {
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
executor.submit(fetch_engine_details, eng, include_corrupt): eng.name()
|
futures = {
|
||||||
for eng in cls.supported_engines()
|
executor.submit(fetch_engine_details, eng, include_corrupt): eng.name()
|
||||||
if eng.default_renderer_path() and (not filter_name or filter_name == eng.name())
|
for eng in cls.supported_engines()
|
||||||
}
|
if eng.default_renderer_path() and (not filter_name or filter_name == eng.name())
|
||||||
|
}
|
||||||
|
|
||||||
for future in concurrent.futures.as_completed(futures):
|
for future in concurrent.futures.as_completed(futures):
|
||||||
result = future.result()
|
result = future.result()
|
||||||
if result:
|
if result:
|
||||||
results.append(result)
|
results.append(result)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def all_versions_for_engine(cls, engine_name, include_corrupt=False):
|
def all_versions_for_engine(cls, engine_name, include_corrupt=False, ignore_system=False):
|
||||||
versions = cls.get_engines(filter_name=engine_name, include_corrupt=include_corrupt)
|
versions = cls.get_engines(filter_name=engine_name, include_corrupt=include_corrupt, ignore_system=ignore_system)
|
||||||
sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True)
|
sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True)
|
||||||
return sorted_versions
|
return sorted_versions
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def newest_engine_version(cls, engine, system_os=None, cpu=None):
|
def newest_engine_version(cls, engine, system_os=None, cpu=None, ignore_system=None):
|
||||||
system_os = system_os or current_system_os()
|
system_os = system_os or current_system_os()
|
||||||
cpu = cpu or current_system_cpu()
|
cpu = cpu or current_system_cpu()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
filtered = [x for x in cls.all_versions_for_engine(engine) if x['system_os'] == system_os and
|
filtered = [x for x in cls.all_versions_for_engine(engine, ignore_system=ignore_system)
|
||||||
x['cpu'] == cpu]
|
if x['system_os'] == system_os and x['cpu'] == cpu]
|
||||||
return filtered[0]
|
return filtered[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}")
|
logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def is_version_downloaded(cls, engine, version, system_os=None, cpu=None):
|
def is_version_downloaded(cls, engine, version, system_os=None, cpu=None, ignore_system=False):
|
||||||
system_os = system_os or current_system_os()
|
system_os = system_os or current_system_os()
|
||||||
cpu = cpu or current_system_cpu()
|
cpu = cpu or current_system_cpu()
|
||||||
|
|
||||||
filtered = [x for x in cls.get_engines(filter_name=engine) if x['system_os'] == system_os and
|
filtered = [x for x in cls.get_engines(filter_name=engine, ignore_system=ignore_system) if
|
||||||
x['cpu'] == cpu and x['version'] == version]
|
x['system_os'] == system_os and x['cpu'] == cpu and x['version'] == version]
|
||||||
return filtered[0] if filtered else False
|
return filtered[0] if filtered else False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -161,7 +169,7 @@ class EngineManager:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def download_engine(cls, engine, version, system_os=None, cpu=None, background=False):
|
def download_engine(cls, engine, version, system_os=None, cpu=None, background=False, ignore_system=False):
|
||||||
|
|
||||||
engine_to_download = cls.engine_with_name(engine)
|
engine_to_download = cls.engine_with_name(engine)
|
||||||
existing_task = cls.get_existing_download_task(engine, version, system_os, cpu)
|
existing_task = cls.get_existing_download_task(engine, version, system_os, cpu)
|
||||||
@@ -184,7 +192,7 @@ class EngineManager:
|
|||||||
return thread
|
return thread
|
||||||
|
|
||||||
thread.join()
|
thread.join()
|
||||||
found_engine = cls.is_version_downloaded(engine, version, system_os, cpu) # Check that engine downloaded
|
found_engine = cls.is_version_downloaded(engine, version, system_os, cpu, ignore_system) # Check that engine downloaded
|
||||||
if not found_engine:
|
if not found_engine:
|
||||||
logger.error(f"Error downloading {engine}")
|
logger.error(f"Error downloading {engine}")
|
||||||
return found_engine
|
return found_engine
|
||||||
@@ -210,31 +218,21 @@ class EngineManager:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_all_engines(cls):
|
def is_engine_update_available(cls, engine_class, ignore_system_installs=False):
|
||||||
def engine_update_task(engine_class):
|
logger.debug(f"Checking for updates to {engine_class.name()}")
|
||||||
logger.debug(f"Checking for updates to {engine_class.name()}")
|
latest_version = engine_class.downloader().find_most_recent_version()
|
||||||
latest_version = engine_class.downloader().find_most_recent_version()
|
|
||||||
|
|
||||||
if not latest_version:
|
if not latest_version:
|
||||||
logger.warning(f"Could not find most recent version of {engine.name()} to download")
|
logger.warning(f"Could not find most recent version of {engine_class.name()} to download")
|
||||||
return
|
return
|
||||||
|
|
||||||
version_num = latest_version.get('version')
|
version_num = latest_version.get('version')
|
||||||
if cls.is_version_downloaded(engine_class.name(), version_num):
|
if cls.is_version_downloaded(engine_class.name(), version_num, ignore_system=ignore_system_installs):
|
||||||
logger.debug(f"Latest version of {engine_class.name()} ({version_num}) already downloaded")
|
logger.debug(f"Latest version of {engine_class.name()} ({version_num}) already downloaded")
|
||||||
return
|
return
|
||||||
|
|
||||||
# download the engine
|
return latest_version
|
||||||
logger.info(f"Downloading latest version of {engine_class.name()} ({version_num})...")
|
|
||||||
cls.download_engine(engine=engine_class.name(), version=version_num, background=True)
|
|
||||||
|
|
||||||
logger.info(f"Checking for updates for render engines...")
|
|
||||||
threads = []
|
|
||||||
for engine in cls.supported_engines():
|
|
||||||
if engine.downloader():
|
|
||||||
thread = threading.Thread(target=engine_update_task, args=(engine,))
|
|
||||||
threads.append(thread)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_worker(cls, renderer, input_path, output_path, engine_version=None, args=None, parent=None, name=None):
|
def create_worker(cls, renderer, input_path, output_path, engine_version=None, args=None, parent=None, name=None):
|
||||||
@@ -283,6 +281,17 @@ class EngineManager:
|
|||||||
|
|
||||||
|
|
||||||
class EngineDownloadWorker(threading.Thread):
|
class EngineDownloadWorker(threading.Thread):
|
||||||
|
"""A thread worker for downloading a specific version of a rendering engine.
|
||||||
|
|
||||||
|
This class handles the process of downloading a rendering engine in a separate thread,
|
||||||
|
ensuring that the download process does not block the main application.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
engine (str): The name of the rendering engine to download.
|
||||||
|
version (str): The version of the rendering engine to download.
|
||||||
|
system_os (str, optional): The operating system for which to download the engine. Defaults to current OS type.
|
||||||
|
cpu (str, optional): Requested CPU architecture. Defaults to system CPU type.
|
||||||
|
"""
|
||||||
def __init__(self, engine, version, system_os=None, cpu=None):
|
def __init__(self, engine, version, system_os=None, cpu=None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.engine = engine
|
self.engine = engine
|
||||||
@@ -291,7 +300,8 @@ class EngineDownloadWorker(threading.Thread):
|
|||||||
self.cpu = cpu
|
self.cpu = cpu
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
existing_download = EngineManager.is_version_downloaded(self.engine, self.version, self.system_os, self.cpu)
|
existing_download = EngineManager.is_version_downloaded(self.engine, self.version, self.system_os, self.cpu,
|
||||||
|
ignore_system=True)
|
||||||
if existing_download:
|
if existing_download:
|
||||||
logger.info(f"Requested download of {self.engine} {self.version}, but local copy already exists")
|
logger.info(f"Requested download of {self.engine} {self.version}, but local copy already exists")
|
||||||
return existing_download
|
return existing_download
|
||||||
|
|||||||
75
src/init.py
75
src/init.py
@@ -1,12 +1,13 @@
|
|||||||
''' app/init.py '''
|
|
||||||
import logging
|
import logging
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from PyQt6.QtCore import QSettings
|
||||||
|
|
||||||
from src.api.api_server import start_server
|
from src.api.api_server import start_server
|
||||||
from src.api.preview_manager import PreviewManager
|
from src.api.preview_manager import PreviewManager
|
||||||
@@ -15,30 +16,63 @@ from src.distributed_job_manager import DistributedJobManager
|
|||||||
from src.engines.engine_manager import EngineManager
|
from src.engines.engine_manager import EngineManager
|
||||||
from src.render_queue import RenderQueue
|
from src.render_queue import RenderQueue
|
||||||
from src.utilities.config import Config
|
from src.utilities.config import Config
|
||||||
from src.utilities.misc_helper import system_safe_path, current_system_cpu, current_system_os, current_system_os_version
|
from src.utilities.misc_helper import (system_safe_path, current_system_cpu, current_system_os,
|
||||||
|
current_system_os_version, check_for_updates)
|
||||||
from src.utilities.zeroconf_server import ZeroconfServer
|
from src.utilities.zeroconf_server import ZeroconfServer
|
||||||
|
from version import APP_NAME, APP_VERSION, APP_REPO_NAME, APP_REPO_OWNER, APP_AUTHOR
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def run(server_only=False) -> int:
|
def run(server_only=False) -> int:
|
||||||
"""
|
"""Initializes the application and runs it.
|
||||||
Initializes the application and runs it.
|
|
||||||
|
Args:
|
||||||
|
server_only: Run in server-only CLI mode. Default is False (runs in GUI mode).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
int: The exit status code.
|
int: The exit status code.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def existing_process(process_name):
|
||||||
|
import psutil
|
||||||
|
current_pid = os.getpid()
|
||||||
|
current_process = psutil.Process(current_pid)
|
||||||
|
for proc in psutil.process_iter(['pid', 'name', 'ppid']):
|
||||||
|
proc_name = proc.info['name'].lower().rstrip('.exe')
|
||||||
|
if proc_name == process_name.lower() and proc.info['pid'] != current_pid:
|
||||||
|
if proc.info['pid'] == current_process.ppid():
|
||||||
|
continue # parent process
|
||||||
|
elif proc.info['ppid'] == current_pid:
|
||||||
|
continue # child process
|
||||||
|
else:
|
||||||
|
return proc # unrelated process
|
||||||
|
return None
|
||||||
|
|
||||||
# setup logging
|
# setup logging
|
||||||
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
|
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
|
||||||
level=Config.server_log_level.upper())
|
level=Config.server_log_level.upper())
|
||||||
logging.getLogger("requests").setLevel(logging.WARNING) # suppress noisy requests/urllib3 logging
|
logging.getLogger("requests").setLevel(logging.WARNING) # suppress noisy requests/urllib3 logging
|
||||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
# check for existing instance
|
||||||
|
existing_proc = existing_process(APP_NAME)
|
||||||
|
if existing_proc:
|
||||||
|
logger.fatal(f"Another instance of {APP_NAME} is already running (pid: {existing_proc.pid})")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Setup logging for console ui
|
# Setup logging for console ui
|
||||||
buffer_handler = __setup_buffer_handler() if not server_only else None
|
buffer_handler = __setup_buffer_handler() if not server_only else None
|
||||||
|
|
||||||
logger.info(f"Starting Zordon Render Server")
|
# check for updates
|
||||||
|
update_thread = threading.Thread(target=check_for_updates, args=(APP_REPO_NAME, APP_REPO_OWNER, APP_NAME,
|
||||||
|
APP_VERSION))
|
||||||
|
update_thread.start()
|
||||||
|
|
||||||
|
settings = QSettings(APP_AUTHOR, APP_NAME)
|
||||||
|
|
||||||
|
# main start
|
||||||
|
logger.info(f"Starting {APP_NAME} Render Server")
|
||||||
return_code = 0
|
return_code = 0
|
||||||
try:
|
try:
|
||||||
# Load Config YAML
|
# Load Config YAML
|
||||||
@@ -63,9 +97,16 @@ def run(server_only=False) -> int:
|
|||||||
ServerProxyManager.subscribe_to_listener()
|
ServerProxyManager.subscribe_to_listener()
|
||||||
DistributedJobManager.subscribe_to_listener()
|
DistributedJobManager.subscribe_to_listener()
|
||||||
|
|
||||||
# check for updates for render engines if configured or on first launch
|
# check for updates for render engines if configured
|
||||||
if Config.update_engines_on_launch or not EngineManager.get_engines():
|
ignore_system = settings.value("engines_ignore_system_installs", False)
|
||||||
EngineManager.update_all_engines()
|
if settings.value('check_for_engine_updates_on_launch', False):
|
||||||
|
for engine in EngineManager.downloadable_engines():
|
||||||
|
if settings.value(f'engine_download-{engine.name()}', False):
|
||||||
|
update_result = EngineManager.is_engine_update_available(engine, ignore_system_installs=ignore_system)
|
||||||
|
EngineManager.download_engine(engine=engine.name(), version=update_result['version'],
|
||||||
|
background=True,
|
||||||
|
ignore_system=ignore_system)
|
||||||
|
settings.setValue("engines_last_update_time", datetime.now().isoformat())
|
||||||
|
|
||||||
# get hostname
|
# get hostname
|
||||||
local_hostname = socket.gethostname()
|
local_hostname = socket.gethostname()
|
||||||
@@ -77,16 +118,14 @@ def run(server_only=False) -> int:
|
|||||||
api_server.start()
|
api_server.start()
|
||||||
|
|
||||||
# start zeroconf server
|
# start zeroconf server
|
||||||
ZeroconfServer.configure("_zordon._tcp.local.", local_hostname, Config.port_number)
|
ZeroconfServer.configure(f"_{APP_NAME.lower()}._tcp.local.", local_hostname, Config.port_number)
|
||||||
ZeroconfServer.properties = {'system_cpu': current_system_cpu(),
|
ZeroconfServer.properties = {'system_cpu': current_system_cpu(),
|
||||||
'system_cpu_cores': multiprocessing.cpu_count(),
|
'system_cpu_cores': multiprocessing.cpu_count(),
|
||||||
'system_os': current_system_os(),
|
'system_os': current_system_os(),
|
||||||
'system_os_version': current_system_os_version()}
|
'system_os_version': current_system_os_version()}
|
||||||
ZeroconfServer.start()
|
ZeroconfServer.start()
|
||||||
logger.info(f"Zordon Render Server started - Hostname: {local_hostname}")
|
logger.info(f"{APP_NAME} Render Server started - Hostname: {local_hostname}")
|
||||||
|
RenderQueue.start() # Start evaluating the render queue
|
||||||
RenderQueue.evaluation_inverval = Config.queue_eval_seconds
|
|
||||||
RenderQueue.start()
|
|
||||||
|
|
||||||
# start in gui or server only (cli) mode
|
# start in gui or server only (cli) mode
|
||||||
logger.debug(f"Launching in {'server only' if server_only else 'GUI'} mode")
|
logger.debug(f"Launching in {'server only' if server_only else 'GUI'} mode")
|
||||||
@@ -102,13 +141,13 @@ def run(server_only=False) -> int:
|
|||||||
return_code = 1
|
return_code = 1
|
||||||
finally:
|
finally:
|
||||||
# shut down gracefully
|
# shut down gracefully
|
||||||
logger.info(f"Zordon Render Server is preparing to shut down")
|
logger.info(f"{APP_NAME} Render Server is preparing to shut down")
|
||||||
try:
|
try:
|
||||||
RenderQueue.prepare_for_shutdown()
|
RenderQueue.prepare_for_shutdown()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f"Exception during prepare for shutdown: {e}")
|
logger.exception(f"Exception during prepare for shutdown: {e}")
|
||||||
ZeroconfServer.stop()
|
ZeroconfServer.stop()
|
||||||
logger.info(f"Zordon Render Server has shut down")
|
logger.info(f"{APP_NAME} Render Server has shut down")
|
||||||
return sys.exit(return_code)
|
return sys.exit(return_code)
|
||||||
|
|
||||||
|
|
||||||
@@ -137,8 +176,8 @@ def __setup_buffer_handler():
|
|||||||
|
|
||||||
buffer_handler = BufferingHandler()
|
buffer_handler = BufferingHandler()
|
||||||
buffer_handler.setFormatter(logging.getLogger().handlers[0].formatter)
|
buffer_handler.setFormatter(logging.getLogger().handlers[0].formatter)
|
||||||
logger = logging.getLogger()
|
new_logger = logging.getLogger()
|
||||||
logger.addHandler(buffer_handler)
|
new_logger.addHandler(buffer_handler)
|
||||||
return buffer_handler
|
return buffer_handler
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -29,19 +29,37 @@ class RenderQueue:
|
|||||||
maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
|
maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
|
||||||
last_saved_counts = {}
|
last_saved_counts = {}
|
||||||
is_running = False
|
is_running = False
|
||||||
__eval_thread = None
|
|
||||||
evaluation_inverval = 1
|
|
||||||
|
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
# Start / Stop Background Updates
|
# Render Queue Evaluation:
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def start(cls):
|
def start(cls):
|
||||||
|
"""Start evaluating the render queue"""
|
||||||
logger.debug("Starting render queue updates")
|
logger.debug("Starting render queue updates")
|
||||||
cls.is_running = True
|
cls.is_running = True
|
||||||
cls.evaluate_queue()
|
cls.evaluate_queue()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def evaluate_queue(cls):
|
||||||
|
try:
|
||||||
|
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
||||||
|
for job in not_started:
|
||||||
|
if cls.is_available_for_job(job.renderer, job.priority):
|
||||||
|
cls.start_job(job)
|
||||||
|
|
||||||
|
scheduled = cls.jobs_with_status(RenderStatus.SCHEDULED, priority_sorted=True)
|
||||||
|
for job in scheduled:
|
||||||
|
if job.scheduled_start <= datetime.now():
|
||||||
|
logger.debug(f"Starting scheduled job: {job}")
|
||||||
|
cls.start_job(job)
|
||||||
|
|
||||||
|
if cls.last_saved_counts != cls.job_counts():
|
||||||
|
cls.save_state()
|
||||||
|
except DetachedInstanceError:
|
||||||
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __local_job_status_changed(cls, job_id, old_status, new_status):
|
def __local_job_status_changed(cls, job_id, old_status, new_status):
|
||||||
render_job = RenderQueue.job_with_id(job_id, none_ok=True)
|
render_job = RenderQueue.job_with_id(job_id, none_ok=True)
|
||||||
@@ -55,20 +73,9 @@ class RenderQueue:
|
|||||||
cls.is_running = False
|
cls.is_running = False
|
||||||
|
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
# Queue Management
|
# Fetch Jobs:
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def add_to_render_queue(cls, render_job, force_start=False):
|
|
||||||
logger.info(f"Adding job to render queue: {render_job}")
|
|
||||||
cls.job_queue.append(render_job)
|
|
||||||
if cls.is_running and force_start and render_job.status in (RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED):
|
|
||||||
cls.start_job(render_job)
|
|
||||||
cls.session.add(render_job)
|
|
||||||
cls.save_state()
|
|
||||||
if cls.is_running:
|
|
||||||
cls.evaluate_queue()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def all_jobs(cls):
|
def all_jobs(cls):
|
||||||
return cls.job_queue
|
return cls.job_queue
|
||||||
@@ -98,12 +105,15 @@ class RenderQueue:
|
|||||||
return found_job
|
return found_job
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def clear_history(cls):
|
def job_counts(cls):
|
||||||
to_remove = [x for x in cls.all_jobs() if x.status in [RenderStatus.CANCELLED,
|
job_counts = {}
|
||||||
RenderStatus.COMPLETED, RenderStatus.ERROR]]
|
for job_status in RenderStatus:
|
||||||
for job_to_remove in to_remove:
|
job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
|
||||||
cls.delete_job(job_to_remove)
|
return job_counts
|
||||||
cls.save_state()
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Startup / Shutdown:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_state(cls, database_directory):
|
def load_state(cls, database_directory):
|
||||||
@@ -128,6 +138,16 @@ class RenderQueue:
|
|||||||
cls.save_state()
|
cls.save_state()
|
||||||
cls.session.close()
|
cls.session.close()
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Renderer Availability:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def renderer_instances(cls):
|
||||||
|
from collections import Counter
|
||||||
|
all_instances = [x.renderer for x in cls.running_jobs()]
|
||||||
|
return Counter(all_instances)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def is_available_for_job(cls, renderer, priority=2):
|
def is_available_for_job(cls, renderer, priority=2):
|
||||||
|
|
||||||
@@ -137,24 +157,20 @@ class RenderQueue:
|
|||||||
maxed_out_instances = renderer in instances.keys() and instances[renderer] >= max_allowed_instances
|
maxed_out_instances = renderer in instances.keys() and instances[renderer] >= max_allowed_instances
|
||||||
return not maxed_out_instances and not higher_priority_jobs
|
return not maxed_out_instances and not higher_priority_jobs
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# Job Lifecycle Management:
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def evaluate_queue(cls):
|
def add_to_render_queue(cls, render_job, force_start=False):
|
||||||
try:
|
logger.info(f"Adding job to render queue: {render_job}")
|
||||||
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
cls.job_queue.append(render_job)
|
||||||
for job in not_started:
|
if cls.is_running and force_start and render_job.status in (RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED):
|
||||||
if cls.is_available_for_job(job.renderer, job.priority):
|
cls.start_job(render_job)
|
||||||
cls.start_job(job)
|
cls.session.add(render_job)
|
||||||
|
cls.save_state()
|
||||||
scheduled = cls.jobs_with_status(RenderStatus.SCHEDULED, priority_sorted=True)
|
if cls.is_running:
|
||||||
for job in scheduled:
|
cls.evaluate_queue()
|
||||||
if job.scheduled_start <= datetime.now():
|
|
||||||
logger.debug(f"Starting scheduled job: {job}")
|
|
||||||
cls.start_job(job)
|
|
||||||
|
|
||||||
if cls.last_saved_counts != cls.job_counts():
|
|
||||||
cls.save_state()
|
|
||||||
except DetachedInstanceError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def start_job(cls, job):
|
def start_job(cls, job):
|
||||||
@@ -177,15 +193,14 @@ class RenderQueue:
|
|||||||
cls.save_state()
|
cls.save_state()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@classmethod
|
# --------------------------------------------
|
||||||
def renderer_instances(cls):
|
# Miscellaneous:
|
||||||
from collections import Counter
|
# --------------------------------------------
|
||||||
all_instances = [x.renderer for x in cls.running_jobs()]
|
|
||||||
return Counter(all_instances)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def job_counts(cls):
|
def clear_history(cls):
|
||||||
job_counts = {}
|
to_remove = [x for x in cls.all_jobs() if x.status in [RenderStatus.CANCELLED,
|
||||||
for job_status in RenderStatus:
|
RenderStatus.COMPLETED, RenderStatus.ERROR]]
|
||||||
job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
|
for job_to_remove in to_remove:
|
||||||
return job_counts
|
cls.delete_job(job_to_remove)
|
||||||
|
cls.save_state()
|
||||||
|
|||||||
@@ -20,14 +20,14 @@ from src.render_queue import RenderQueue
|
|||||||
from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost
|
from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost
|
||||||
from src.utilities.status_utils import RenderStatus
|
from src.utilities.status_utils import RenderStatus
|
||||||
from src.utilities.zeroconf_server import ZeroconfServer
|
from src.utilities.zeroconf_server import ZeroconfServer
|
||||||
from .add_job import NewRenderJobForm
|
from src.ui.add_job import NewRenderJobForm
|
||||||
from .console import ConsoleWindow
|
from src.ui.console import ConsoleWindow
|
||||||
from .engine_browser import EngineBrowserWindow
|
from src.ui.engine_browser import EngineBrowserWindow
|
||||||
from .log_viewer import LogViewer
|
from src.ui.log_viewer import LogViewer
|
||||||
from .widgets.menubar import MenuBar
|
from src.ui.widgets.menubar import MenuBar
|
||||||
from .widgets.proportional_image_label import ProportionalImageLabel
|
from src.ui.widgets.proportional_image_label import ProportionalImageLabel
|
||||||
from .widgets.statusbar import StatusBar
|
from src.ui.widgets.statusbar import StatusBar
|
||||||
from .widgets.toolbar import ToolBar
|
from src.ui.widgets.toolbar import ToolBar
|
||||||
from src.api.serverproxy_manager import ServerProxyManager
|
from src.api.serverproxy_manager import ServerProxyManager
|
||||||
from src.utilities.misc_helper import launch_url
|
from src.utilities.misc_helper import launch_url
|
||||||
|
|
||||||
|
|||||||
481
src/ui/settings_window.py
Normal file
481
src/ui/settings_window.py
Normal file
@@ -0,0 +1,481 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import humanize
|
||||||
|
import socket
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from PyQt6 import QtCore
|
||||||
|
from PyQt6.QtCore import Qt, QSettings, pyqtSignal as Signal
|
||||||
|
from PyQt6.QtGui import QIcon
|
||||||
|
from PyQt6.QtWidgets import QApplication, QMainWindow, QListWidget, QListWidgetItem, QStackedWidget, QVBoxLayout, \
|
||||||
|
QWidget, QLabel, QCheckBox, QLineEdit, \
|
||||||
|
QComboBox, QPushButton, QHBoxLayout, QGroupBox, QTableWidget, QAbstractItemView, QTableWidgetItem, QHeaderView, \
|
||||||
|
QMessageBox
|
||||||
|
|
||||||
|
from api.server_proxy import RenderServerProxy
|
||||||
|
from engines.engine_manager import EngineManager
|
||||||
|
from utilities.config import Config
|
||||||
|
from utilities.misc_helper import launch_url, system_safe_path
|
||||||
|
from version import APP_AUTHOR, APP_NAME
|
||||||
|
|
||||||
|
settings = QSettings(APP_AUTHOR, APP_NAME)
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsWindow(QMainWindow):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
if not EngineManager.engines_path: # fix issue where sometimes path was not set
|
||||||
|
EngineManager.engines_path = system_safe_path(
|
||||||
|
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
|
||||||
|
'engines')))
|
||||||
|
|
||||||
|
self.installed_engines_table = None
|
||||||
|
|
||||||
|
self.setWindowTitle("Settings")
|
||||||
|
|
||||||
|
# Create the main layout
|
||||||
|
main_layout = QVBoxLayout()
|
||||||
|
|
||||||
|
# Create the sidebar (QListWidget) for navigation
|
||||||
|
self.sidebar = QListWidget()
|
||||||
|
self.sidebar.setFixedWidth(150)
|
||||||
|
|
||||||
|
# Set the icon size
|
||||||
|
self.sidebar.setIconSize(QtCore.QSize(32, 32)) # Increase the icon size to 32x32 pixels
|
||||||
|
|
||||||
|
# Adjust the font size for the sidebar items
|
||||||
|
font = self.sidebar.font()
|
||||||
|
font.setPointSize(12) # Increase the font size
|
||||||
|
self.sidebar.setFont(font)
|
||||||
|
|
||||||
|
# Add items with icons to the sidebar
|
||||||
|
resources_dir = os.path.join(Path(__file__).resolve().parent.parent.parent, 'resources')
|
||||||
|
self.add_sidebar_item("General", os.path.join(resources_dir, "Gear.png"))
|
||||||
|
self.add_sidebar_item("Server", os.path.join(resources_dir, "Server.png"))
|
||||||
|
self.add_sidebar_item("Engines", os.path.join(resources_dir, "Blender.png"))
|
||||||
|
self.sidebar.setCurrentRow(0)
|
||||||
|
|
||||||
|
# Create the stacked widget to hold different settings pages
|
||||||
|
self.stacked_widget = QStackedWidget()
|
||||||
|
|
||||||
|
# Create pages for each section
|
||||||
|
general_page = self.create_general_page()
|
||||||
|
network_page = self.create_network_page()
|
||||||
|
engines_page = self.create_engines_page()
|
||||||
|
|
||||||
|
# Add pages to the stacked widget
|
||||||
|
self.stacked_widget.addWidget(general_page)
|
||||||
|
self.stacked_widget.addWidget(network_page)
|
||||||
|
self.stacked_widget.addWidget(engines_page)
|
||||||
|
|
||||||
|
# Connect the sidebar to the stacked widget
|
||||||
|
self.sidebar.currentRowChanged.connect(self.stacked_widget.setCurrentIndex)
|
||||||
|
|
||||||
|
# Create a horizontal layout to hold the sidebar and stacked widget
|
||||||
|
content_layout = QHBoxLayout()
|
||||||
|
content_layout.addWidget(self.sidebar)
|
||||||
|
content_layout.addWidget(self.stacked_widget)
|
||||||
|
|
||||||
|
# Add the content layout to the main layout
|
||||||
|
main_layout.addLayout(content_layout)
|
||||||
|
|
||||||
|
# Add the "OK" button at the bottom
|
||||||
|
ok_button = QPushButton("OK")
|
||||||
|
ok_button.clicked.connect(self.close)
|
||||||
|
ok_button.setFixedWidth(80)
|
||||||
|
ok_button.setDefault(True)
|
||||||
|
main_layout.addWidget(ok_button, alignment=Qt.AlignmentFlag.AlignRight)
|
||||||
|
|
||||||
|
# Create a central widget and set the layout
|
||||||
|
central_widget = QWidget()
|
||||||
|
central_widget.setLayout(main_layout)
|
||||||
|
self.setCentralWidget(central_widget)
|
||||||
|
|
||||||
|
self.setMinimumSize(700, 400)
|
||||||
|
|
||||||
|
def add_sidebar_item(self, name, icon_path):
|
||||||
|
"""Add an item with an icon to the sidebar."""
|
||||||
|
item = QListWidgetItem(QIcon(icon_path), name)
|
||||||
|
self.sidebar.addItem(item)
|
||||||
|
|
||||||
|
def create_general_page(self):
|
||||||
|
"""Create the General settings page."""
|
||||||
|
page = QWidget()
|
||||||
|
layout = QVBoxLayout()
|
||||||
|
|
||||||
|
# Startup Settings Group
|
||||||
|
startup_group = QGroupBox("Startup Settings")
|
||||||
|
startup_layout = QVBoxLayout()
|
||||||
|
# startup_layout.addWidget(QCheckBox("Start application on system startup"))
|
||||||
|
check_for_updates_checkbox = QCheckBox("Check for updates automatically")
|
||||||
|
check_for_updates_checkbox.setChecked(settings.value("auto_check_for_updates", True))
|
||||||
|
check_for_updates_checkbox.stateChanged.connect(lambda state: settings.setValue("auto_check_for_updates", bool(state)))
|
||||||
|
startup_layout.addWidget(check_for_updates_checkbox)
|
||||||
|
startup_group.setLayout(startup_layout)
|
||||||
|
|
||||||
|
# Local Files Group
|
||||||
|
data_path = os.path.expanduser(Config.upload_folder)
|
||||||
|
path_size = sum(f.stat().st_size for f in Path(data_path).rglob('*') if f.is_file())
|
||||||
|
database_group = QGroupBox("Local Files")
|
||||||
|
database_layout = QVBoxLayout()
|
||||||
|
database_layout.addWidget(QLabel(f"Local Directory: {data_path}"))
|
||||||
|
database_layout.addWidget(QLabel(f"Size: {humanize.naturalsize(path_size, binary=True)}"))
|
||||||
|
open_database_path_button = QPushButton("Open Directory")
|
||||||
|
open_database_path_button.clicked.connect(lambda: launch_url(data_path))
|
||||||
|
open_database_path_button.setFixedWidth(200)
|
||||||
|
database_layout.addWidget(open_database_path_button)
|
||||||
|
database_group.setLayout(database_layout)
|
||||||
|
|
||||||
|
|
||||||
|
# Render Settings Group
|
||||||
|
render_settings_group = QGroupBox("Render Settings")
|
||||||
|
render_settings_layout = QVBoxLayout()
|
||||||
|
render_settings_layout.addWidget(QLabel("Restrict to render nodes with same:"))
|
||||||
|
require_same_engine_checkbox = QCheckBox("Renderer Version")
|
||||||
|
require_same_engine_checkbox.setChecked(settings.value("render_require_same_engine_version"))
|
||||||
|
require_same_engine_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_engine_version", bool(state)))
|
||||||
|
render_settings_layout.addWidget(require_same_engine_checkbox)
|
||||||
|
require_same_cpu_checkbox = QCheckBox("CPU Architecture")
|
||||||
|
require_same_cpu_checkbox.setChecked(settings.value("render_require_same_cpu_type"))
|
||||||
|
require_same_cpu_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_cpu_type", bool(state)))
|
||||||
|
render_settings_layout.addWidget(require_same_cpu_checkbox)
|
||||||
|
require_same_os_checkbox = QCheckBox("Operating System")
|
||||||
|
require_same_os_checkbox.setChecked(settings.value("render_require_same_os"))
|
||||||
|
require_same_os_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_os", bool(state)))
|
||||||
|
render_settings_layout.addWidget(require_same_os_checkbox)
|
||||||
|
render_settings_group.setLayout(render_settings_layout)
|
||||||
|
|
||||||
|
layout.addWidget(startup_group)
|
||||||
|
layout.addWidget(database_group)
|
||||||
|
layout.addWidget(render_settings_group)
|
||||||
|
|
||||||
|
layout.addStretch() # Add a stretch to push content to the top
|
||||||
|
page.setLayout(layout)
|
||||||
|
return page
|
||||||
|
|
||||||
|
def create_network_page(self):
|
||||||
|
"""Create the Network settings page."""
|
||||||
|
page = QWidget()
|
||||||
|
layout = QVBoxLayout()
|
||||||
|
|
||||||
|
# Sharing Settings Group
|
||||||
|
sharing_group = QGroupBox("Sharing Settings")
|
||||||
|
sharing_layout = QVBoxLayout()
|
||||||
|
|
||||||
|
enable_sharing_checkbox = QCheckBox("Enable other computers on the network to render to this machine")
|
||||||
|
enable_sharing_checkbox.setChecked(settings.value("enable_network_sharing", False))
|
||||||
|
enable_sharing_checkbox.stateChanged.connect(self.toggle_render_sharing)
|
||||||
|
sharing_layout.addWidget(enable_sharing_checkbox)
|
||||||
|
|
||||||
|
password_layout = QHBoxLayout()
|
||||||
|
password_layout.setContentsMargins(0, 0, 0, 0)
|
||||||
|
self.enable_network_password_checkbox = QCheckBox("Enable network password:")
|
||||||
|
self.enable_network_password_checkbox.setChecked(settings.value("enable_network_password", False))
|
||||||
|
self.enable_network_password_checkbox.stateChanged.connect(self.enable_network_password_changed)
|
||||||
|
sharing_layout.addWidget(self.enable_network_password_checkbox)
|
||||||
|
self.network_password_line = QLineEdit()
|
||||||
|
self.network_password_line.setPlaceholderText("Enter a password")
|
||||||
|
self.network_password_line.setEchoMode(QLineEdit.EchoMode.Password)
|
||||||
|
self.network_password_line.setEnabled(settings.value("enable_network_password", False))
|
||||||
|
password_layout.addWidget(self.network_password_line)
|
||||||
|
self.show_password_button = QPushButton("Show")
|
||||||
|
self.show_password_button.setEnabled(settings.value("enable_network_password", False))
|
||||||
|
self.show_password_button.clicked.connect(self.show_password_button_pressed)
|
||||||
|
password_layout.addWidget(self.show_password_button)
|
||||||
|
sharing_layout.addLayout(password_layout)
|
||||||
|
|
||||||
|
sharing_group.setLayout(sharing_layout)
|
||||||
|
|
||||||
|
layout.addWidget(sharing_group)
|
||||||
|
|
||||||
|
layout.addStretch() # Add a stretch to push content to the top
|
||||||
|
page.setLayout(layout)
|
||||||
|
return page
|
||||||
|
|
||||||
|
def toggle_render_sharing(self, enable_sharing):
|
||||||
|
settings.setValue("enable_network_sharing", enable_sharing)
|
||||||
|
self.enable_network_password_checkbox.setEnabled(enable_sharing)
|
||||||
|
enable_password = enable_sharing and settings.value("enable_network_password", False)
|
||||||
|
self.network_password_line.setEnabled(enable_password)
|
||||||
|
self.show_password_button.setEnabled(enable_password)
|
||||||
|
|
||||||
|
def enable_network_password_changed(self, new_value):
|
||||||
|
settings.setValue("enable_network_password", new_value)
|
||||||
|
self.network_password_line.setEnabled(new_value)
|
||||||
|
self.show_password_button.setEnabled(new_value)
|
||||||
|
|
||||||
|
def show_password_button_pressed(self):
|
||||||
|
# toggle showing / hiding the password
|
||||||
|
show_pass = self.show_password_button.text() == "Show"
|
||||||
|
self.show_password_button.setText("Hide" if show_pass else "Show")
|
||||||
|
self.network_password_line.setEchoMode(QLineEdit.EchoMode.Normal if show_pass else QLineEdit.EchoMode.Normal)
|
||||||
|
|
||||||
|
def create_engines_page(self):
|
||||||
|
"""Create the Engines settings page."""
|
||||||
|
page = QWidget()
|
||||||
|
layout = QVBoxLayout()
|
||||||
|
|
||||||
|
# Installed Engines Group
|
||||||
|
installed_group = QGroupBox("Installed Engines")
|
||||||
|
installed_layout = QVBoxLayout()
|
||||||
|
|
||||||
|
# Setup table
|
||||||
|
self.installed_engines_table = EngineTableWidget()
|
||||||
|
self.installed_engines_table.row_selected.connect(self.engine_table_selected)
|
||||||
|
installed_layout.addWidget(self.installed_engines_table)
|
||||||
|
|
||||||
|
# Ignore system installs
|
||||||
|
engine_ignore_system_installs_checkbox = QCheckBox("Ignore system installs")
|
||||||
|
engine_ignore_system_installs_checkbox.setChecked(settings.value("engines_ignore_system_installs", False))
|
||||||
|
engine_ignore_system_installs_checkbox.stateChanged.connect(self.change_ignore_system_installs)
|
||||||
|
installed_layout.addWidget(engine_ignore_system_installs_checkbox)
|
||||||
|
|
||||||
|
# Engine Launch / Delete buttons
|
||||||
|
installed_buttons_layout = QHBoxLayout()
|
||||||
|
self.launch_engine_button = QPushButton("Launch")
|
||||||
|
self.launch_engine_button.setEnabled(False)
|
||||||
|
self.launch_engine_button.clicked.connect(self.launch_selected_engine)
|
||||||
|
self.delete_engine_button = QPushButton("Delete")
|
||||||
|
self.delete_engine_button.setEnabled(False)
|
||||||
|
self.delete_engine_button.clicked.connect(self.delete_selected_engine)
|
||||||
|
|
||||||
|
installed_buttons_layout.addWidget(self.launch_engine_button)
|
||||||
|
installed_buttons_layout.addWidget(self.delete_engine_button)
|
||||||
|
installed_layout.addLayout(installed_buttons_layout)
|
||||||
|
installed_group.setLayout(installed_layout)
|
||||||
|
|
||||||
|
# Engine Updates Group
|
||||||
|
engine_updates_group = QGroupBox("Auto-Install")
|
||||||
|
engine_updates_layout = QVBoxLayout()
|
||||||
|
|
||||||
|
engine_download_layout = QHBoxLayout()
|
||||||
|
engine_download_layout.addWidget(QLabel("Enable Downloads for:"))
|
||||||
|
|
||||||
|
at_least_one_downloadable = False
|
||||||
|
for engine in EngineManager.downloadable_engines():
|
||||||
|
engine_download_check = QCheckBox(engine.name())
|
||||||
|
is_checked = settings.value(f"engine_download-{engine.name()}", False)
|
||||||
|
at_least_one_downloadable |= is_checked
|
||||||
|
engine_download_check.setChecked(is_checked)
|
||||||
|
# Capture the checkbox correctly using a default argument in lambda
|
||||||
|
engine_download_check.clicked.connect(
|
||||||
|
lambda state, checkbox=engine_download_check: self.engine_download_settings_changed(state, checkbox.text())
|
||||||
|
)
|
||||||
|
engine_download_layout.addWidget(engine_download_check)
|
||||||
|
|
||||||
|
engine_updates_layout.addLayout(engine_download_layout)
|
||||||
|
|
||||||
|
check_for_engine_updates_checkbox = QCheckBox("Check for new versions on launch")
|
||||||
|
check_for_engine_updates_checkbox.setChecked(settings.value('check_for_engine_updates_on_launch', True))
|
||||||
|
check_for_engine_updates_checkbox.setEnabled(at_least_one_downloadable)
|
||||||
|
check_for_engine_updates_checkbox.stateChanged.connect(
|
||||||
|
lambda state: settings.setValue("check_for_engine_updates_on_launch", bool(state)))
|
||||||
|
engine_updates_layout.addWidget(check_for_engine_updates_checkbox)
|
||||||
|
self.engines_last_update_label = QLabel()
|
||||||
|
self.update_last_checked_label()
|
||||||
|
self.engines_last_update_label.setEnabled(at_least_one_downloadable)
|
||||||
|
engine_updates_layout.addWidget(self.engines_last_update_label)
|
||||||
|
self.check_for_new_engines_button = QPushButton("Check for New Versions...")
|
||||||
|
self.check_for_new_engines_button.setEnabled(at_least_one_downloadable)
|
||||||
|
self.check_for_new_engines_button.clicked.connect(self.check_for_new_engines)
|
||||||
|
engine_updates_layout.addWidget(self.check_for_new_engines_button)
|
||||||
|
engine_updates_group.setLayout(engine_updates_layout)
|
||||||
|
|
||||||
|
layout.addWidget(installed_group)
|
||||||
|
layout.addWidget(engine_updates_group)
|
||||||
|
|
||||||
|
layout.addStretch() # Add a stretch to push content to the top
|
||||||
|
page.setLayout(layout)
|
||||||
|
return page
|
||||||
|
|
||||||
|
def change_ignore_system_installs(self, value):
|
||||||
|
settings.setValue("engines_ignore_system_installs", bool(value))
|
||||||
|
self.installed_engines_table.update_table()
|
||||||
|
|
||||||
|
def update_last_checked_label(self):
|
||||||
|
"""Retrieve the last check timestamp and return a human-friendly string."""
|
||||||
|
last_checked_str = settings.value("engines_last_update_time", None)
|
||||||
|
if not last_checked_str:
|
||||||
|
time_string = "Never"
|
||||||
|
else:
|
||||||
|
last_checked_dt = datetime.fromisoformat(last_checked_str)
|
||||||
|
now = datetime.now()
|
||||||
|
time_string = humanize.naturaltime(now - last_checked_dt)
|
||||||
|
self.engines_last_update_label.setText(f"Last Updated: {time_string}")
|
||||||
|
|
||||||
|
def engine_download_settings_changed(self, state, engine_name):
|
||||||
|
settings.setValue(f"engine_download-{engine_name}", state)
|
||||||
|
|
||||||
|
at_least_one_downloadable = False
|
||||||
|
for engine in EngineManager.downloadable_engines():
|
||||||
|
at_least_one_downloadable |= settings.value(f"engine_download-{engine.name()}", False)
|
||||||
|
self.check_for_new_engines_button.setEnabled(at_least_one_downloadable)
|
||||||
|
self.check_for_engine_updates_checkbox.setEnabled(at_least_one_downloadable)
|
||||||
|
self.engines_last_update_label.setEnabled(at_least_one_downloadable)
|
||||||
|
|
||||||
|
def delete_selected_engine(self):
|
||||||
|
engine_info = self.installed_engines_table.selected_engine_data()
|
||||||
|
reply = QMessageBox.question(self, f"Delete {engine_info['engine']} {engine_info['version']}?",
|
||||||
|
f"Do you want to delete {engine_info['engine']} {engine_info['version']}?",
|
||||||
|
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
|
||||||
|
|
||||||
|
if reply is not QMessageBox.StandardButton.Yes:
|
||||||
|
return
|
||||||
|
|
||||||
|
delete_result = EngineManager.delete_engine_download(engine_info.get('engine'),
|
||||||
|
engine_info.get('version'),
|
||||||
|
engine_info.get('system_os'),
|
||||||
|
engine_info.get('cpu'))
|
||||||
|
|
||||||
|
if delete_result:
|
||||||
|
QMessageBox.information(self, f"{engine_info['engine']} {engine_info['version']} Deleted",
|
||||||
|
f"{engine_info['engine']} {engine_info['version']} deleted successfully",
|
||||||
|
QMessageBox.StandardButton.Ok)
|
||||||
|
else:
|
||||||
|
QMessageBox.warning(self, f"Unknown Error",
|
||||||
|
f"Unknown error while deleting {engine_info['engine']} {engine_info['version']}.",
|
||||||
|
QMessageBox.StandardButton.Ok)
|
||||||
|
|
||||||
|
self.installed_engines_table.update_table(use_cached=False)
|
||||||
|
|
||||||
|
def launch_selected_engine(self):
|
||||||
|
engine_info = self.installed_engines_table.selected_engine_data()
|
||||||
|
if engine_info:
|
||||||
|
launch_url(engine_info['path'])
|
||||||
|
|
||||||
|
def engine_table_selected(self):
|
||||||
|
engine_data = self.installed_engines_table.selected_engine_data()
|
||||||
|
if engine_data:
|
||||||
|
self.launch_engine_button.setEnabled(bool(engine_data.get('path') or True))
|
||||||
|
self.delete_engine_button.setEnabled(engine_data.get('type') == 'managed')
|
||||||
|
else:
|
||||||
|
self.launch_engine_button.setEnabled(False)
|
||||||
|
self.delete_engine_button.setEnabled(False)
|
||||||
|
|
||||||
|
def check_for_new_engines(self):
|
||||||
|
|
||||||
|
ignore_system = settings.value("engines_ignore_system_installs", False)
|
||||||
|
messagebox_shown = False
|
||||||
|
for engine in EngineManager.downloadable_engines():
|
||||||
|
if settings.value(f'engine_download-{engine.name()}', False):
|
||||||
|
result = EngineManager.is_engine_update_available(engine, ignore_system_installs=ignore_system)
|
||||||
|
if result:
|
||||||
|
result['name'] = engine.name()
|
||||||
|
msg_box = QMessageBox()
|
||||||
|
msg_box.setWindowTitle(f"{result['name']} ({result['version']}) Available")
|
||||||
|
msg_box.setText(f"A new version of {result['name']} is available ({result['version']}).\n\n"
|
||||||
|
f"Would you like to download it now?")
|
||||||
|
msg_box.setIcon(QMessageBox.Icon.Question)
|
||||||
|
msg_box.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
|
||||||
|
msg_result = msg_box.exec()
|
||||||
|
messagebox_shown = True
|
||||||
|
if msg_result == QMessageBox.StandardButton.Yes:
|
||||||
|
EngineManager.download_engine(engine=engine.name(), version=result['version'], background=True,
|
||||||
|
ignore_system=ignore_system)
|
||||||
|
self.update_engine_download_status()
|
||||||
|
|
||||||
|
if not messagebox_shown:
|
||||||
|
msg_box = QMessageBox()
|
||||||
|
msg_box.setWindowTitle("No Updates Available")
|
||||||
|
msg_box.setText("All your render engines are up-to-date.")
|
||||||
|
msg_box.setIcon(QMessageBox.Icon.Information)
|
||||||
|
msg_box.setStandardButtons(QMessageBox.StandardButton.Ok)
|
||||||
|
msg_box.exec()
|
||||||
|
|
||||||
|
settings.setValue("engines_last_update_time", datetime.now().isoformat())
|
||||||
|
self.update_engine_download_status()
|
||||||
|
|
||||||
|
def update_engine_download_status(self):
|
||||||
|
running_tasks = [x for x in EngineManager.download_tasks if x.is_alive()]
|
||||||
|
if not running_tasks:
|
||||||
|
self.update_last_checked_label()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.engines_last_update_label.setText(f"Downloading {running_tasks[0].engine} ({running_tasks[0].version})...")
|
||||||
|
|
||||||
|
|
||||||
|
class EngineTableWidget(QWidget):
|
||||||
|
row_selected = Signal()
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.table = QTableWidget(0, 4)
|
||||||
|
self.table.setHorizontalHeaderLabels(["Engine", "Version", "Type", "Path"])
|
||||||
|
self.table.setSelectionBehavior(QAbstractItemView.SelectionBehavior.SelectRows)
|
||||||
|
self.table.verticalHeader().setVisible(False)
|
||||||
|
# self.table_widget.itemSelectionChanged.connect(self.engine_picked)
|
||||||
|
self.table.setEditTriggers(QAbstractItemView.EditTrigger.NoEditTriggers)
|
||||||
|
self.table.selectionModel().selectionChanged.connect(self.on_selection_changed)
|
||||||
|
|
||||||
|
layout = QVBoxLayout(self)
|
||||||
|
layout.setContentsMargins(0, 0, 0, 0)
|
||||||
|
layout.setSpacing(0)
|
||||||
|
layout.addWidget(self.table)
|
||||||
|
|
||||||
|
self.raw_server_data = None
|
||||||
|
|
||||||
|
def showEvent(self, event):
|
||||||
|
"""Runs when the widget is about to be shown."""
|
||||||
|
self.update_table()
|
||||||
|
super().showEvent(event) # Ensure normal event processing
|
||||||
|
|
||||||
|
def update_table(self, use_cached=True):
|
||||||
|
if not self.raw_server_data or not use_cached:
|
||||||
|
self.raw_server_data = RenderServerProxy(socket.gethostname()).get_renderer_info()
|
||||||
|
if not self.raw_server_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
table_data = [] # convert the data into a flat list
|
||||||
|
for _, engine_data in self.raw_server_data.items():
|
||||||
|
table_data.extend(engine_data['versions'])
|
||||||
|
|
||||||
|
if settings.value("engines_ignore_system_installs", False):
|
||||||
|
table_data = [x for x in table_data if x['type'] != 'system']
|
||||||
|
|
||||||
|
self.table.clear()
|
||||||
|
self.table.setRowCount(len(table_data))
|
||||||
|
self.table.setColumnCount(4)
|
||||||
|
|
||||||
|
self.table.setHorizontalHeaderLabels(['Engine', 'Version', 'Type', 'Path'])
|
||||||
|
self.table.horizontalHeader().setSectionResizeMode(0, QHeaderView.ResizeMode.Fixed)
|
||||||
|
self.table.horizontalHeader().setSectionResizeMode(1, QHeaderView.ResizeMode.Fixed)
|
||||||
|
self.table.horizontalHeader().setSectionResizeMode(2, QHeaderView.ResizeMode.Fixed)
|
||||||
|
self.table.horizontalHeader().setSectionResizeMode(3, QHeaderView.ResizeMode.Stretch)
|
||||||
|
|
||||||
|
for row, engine in enumerate(table_data):
|
||||||
|
self.table.setItem(row, 0, QTableWidgetItem(engine['engine']))
|
||||||
|
self.table.setItem(row, 1, QTableWidgetItem(engine['version']))
|
||||||
|
self.table.setItem(row, 2, QTableWidgetItem(engine['type']))
|
||||||
|
self.table.setItem(row, 3, QTableWidgetItem(engine['path']))
|
||||||
|
|
||||||
|
self.table.selectRow(0)
|
||||||
|
|
||||||
|
def selected_engine_data(self):
|
||||||
|
"""Returns the data from the selected row as a dictionary."""
|
||||||
|
row = self.table.currentRow() # Get the selected row index
|
||||||
|
|
||||||
|
if row < 0 or not len(self.table.selectedItems()): # No row selected
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"engine": self.table.item(row, 0).text(),
|
||||||
|
"version": self.table.item(row, 1).text(),
|
||||||
|
"type": self.table.item(row, 2).text(),
|
||||||
|
"path": self.table.item(row, 3).text(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def on_selection_changed(self):
|
||||||
|
self.row_selected.emit()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app = QApplication([])
|
||||||
|
window = SettingsWindow()
|
||||||
|
window.show()
|
||||||
|
app.exec()
|
||||||
@@ -1,8 +1,6 @@
|
|||||||
''' app/ui/widgets/menubar.py '''
|
''' app/ui/widgets/menubar.py '''
|
||||||
import sys
|
|
||||||
|
|
||||||
from PyQt6.QtGui import QAction
|
from PyQt6.QtGui import QAction
|
||||||
from PyQt6.QtWidgets import QMenuBar, QApplication
|
from PyQt6.QtWidgets import QMenuBar, QApplication, QMessageBox, QDialog, QVBoxLayout, QLabel, QPushButton
|
||||||
|
|
||||||
|
|
||||||
class MenuBar(QMenuBar):
|
class MenuBar(QMenuBar):
|
||||||
@@ -16,6 +14,8 @@ class MenuBar(QMenuBar):
|
|||||||
def __init__(self, parent=None) -> None:
|
def __init__(self, parent=None) -> None:
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
|
self.settings_window = None
|
||||||
|
|
||||||
# setup menus
|
# setup menus
|
||||||
file_menu = self.addMenu("File")
|
file_menu = self.addMenu("File")
|
||||||
# edit_menu = self.addMenu("Edit")
|
# edit_menu = self.addMenu("Edit")
|
||||||
@@ -32,7 +32,7 @@ class MenuBar(QMenuBar):
|
|||||||
settings_action = QAction("Settings...", self)
|
settings_action = QAction("Settings...", self)
|
||||||
settings_action.triggered.connect(self.show_settings)
|
settings_action.triggered.connect(self.show_settings)
|
||||||
settings_action.setShortcut(f'Ctrl+,')
|
settings_action.setShortcut(f'Ctrl+,')
|
||||||
# file_menu.addAction(settings_action) # todo: enable once we have a setting screen
|
file_menu.addAction(settings_action)
|
||||||
# exit
|
# exit
|
||||||
exit_action = QAction('&Exit', self)
|
exit_action = QAction('&Exit', self)
|
||||||
exit_action.setShortcut('Ctrl+Q')
|
exit_action.setShortcut('Ctrl+Q')
|
||||||
@@ -43,15 +43,63 @@ class MenuBar(QMenuBar):
|
|||||||
about_action = QAction("About", self)
|
about_action = QAction("About", self)
|
||||||
about_action.triggered.connect(self.show_about)
|
about_action.triggered.connect(self.show_about)
|
||||||
help_menu.addAction(about_action)
|
help_menu.addAction(about_action)
|
||||||
|
update_action = QAction("Check for Updates...", self)
|
||||||
|
update_action.triggered.connect(self.check_for_updates)
|
||||||
|
help_menu.addAction(update_action)
|
||||||
|
|
||||||
def new_job(self):
|
def new_job(self):
|
||||||
self.parent().new_job()
|
self.parent().new_job()
|
||||||
|
|
||||||
def show_settings(self):
|
def show_settings(self):
|
||||||
pass
|
from src.ui.settings_window import SettingsWindow
|
||||||
|
self.settings_window = SettingsWindow()
|
||||||
|
self.settings_window.show()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def show_about():
|
def show_about():
|
||||||
from src.ui.about_window import AboutDialog
|
from src.ui.about_window import AboutDialog
|
||||||
dialog = AboutDialog()
|
dialog = AboutDialog()
|
||||||
dialog.exec()
|
dialog.exec()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_for_updates():
|
||||||
|
from src.utilities.misc_helper import check_for_updates
|
||||||
|
from version import APP_NAME, APP_VERSION, APP_REPO_NAME, APP_REPO_OWNER
|
||||||
|
found_update = check_for_updates(APP_REPO_NAME, APP_REPO_OWNER, APP_NAME, APP_VERSION)
|
||||||
|
if found_update:
|
||||||
|
dialog = UpdateDialog(found_update, APP_VERSION)
|
||||||
|
dialog.exec()
|
||||||
|
else:
|
||||||
|
QMessageBox.information(None, "No Update", "No updates available.")
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateDialog(QDialog):
|
||||||
|
def __init__(self, release_info, current_version, parent=None):
|
||||||
|
super().__init__(parent)
|
||||||
|
self.setWindowTitle(f"Update Available ({current_version} -> {release_info['tag_name']})")
|
||||||
|
|
||||||
|
layout = QVBoxLayout()
|
||||||
|
label = QLabel(f"A new version ({release_info['tag_name']}) is available! Current version: {current_version}")
|
||||||
|
layout.addWidget(label)
|
||||||
|
|
||||||
|
# Label to show the release notes
|
||||||
|
description = QLabel(release_info["body"])
|
||||||
|
layout.addWidget(description)
|
||||||
|
|
||||||
|
# Button to download the latest version
|
||||||
|
download_button = QPushButton(f"Download Latest Version ({release_info['tag_name']})")
|
||||||
|
download_button.clicked.connect(lambda: self.open_url(release_info["html_url"]))
|
||||||
|
layout.addWidget(download_button)
|
||||||
|
|
||||||
|
# OK button to dismiss the dialog
|
||||||
|
ok_button = QPushButton("Dismiss")
|
||||||
|
ok_button.clicked.connect(self.accept) # Close the dialog when clicked
|
||||||
|
layout.addWidget(ok_button)
|
||||||
|
|
||||||
|
self.setLayout(layout)
|
||||||
|
|
||||||
|
def open_url(self, url):
|
||||||
|
from PyQt6.QtCore import QUrl
|
||||||
|
from PyQt6.QtGui import QDesktopServices
|
||||||
|
QDesktopServices.openUrl(QUrl(url))
|
||||||
|
self.accept()
|
||||||
|
|||||||
@@ -159,6 +159,33 @@ def copy_directory_contents(src_dir, dst_dir):
|
|||||||
shutil.copy2(src_path, dst_path)
|
shutil.copy2(src_path, dst_path)
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_updates(repo_name, repo_owner, app_name, current_version):
|
||||||
|
def get_github_releases(owner, repo):
|
||||||
|
import requests
|
||||||
|
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
||||||
|
try:
|
||||||
|
response = requests.get(url, timeout=3)
|
||||||
|
response.raise_for_status()
|
||||||
|
releases = response.json()
|
||||||
|
return releases
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking for updates: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
releases = get_github_releases(repo_owner, repo_name)
|
||||||
|
if not releases:
|
||||||
|
return
|
||||||
|
|
||||||
|
latest_version = releases[0]
|
||||||
|
latest_version_tag = latest_version['tag_name']
|
||||||
|
|
||||||
|
from packaging import version
|
||||||
|
if version.parse(latest_version_tag) > version.parse(current_version):
|
||||||
|
logger.info(f"Newer version of {app_name} available. "
|
||||||
|
f"Latest: {latest_version_tag}, Current: {current_version}")
|
||||||
|
return latest_version
|
||||||
|
|
||||||
|
|
||||||
def is_localhost(comparison_hostname):
|
def is_localhost(comparison_hostname):
|
||||||
# this is necessary because socket.gethostname() does not always include '.local' - This is a sanitized comparison
|
# this is necessary because socket.gethostname() does not always include '.local' - This is a sanitized comparison
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -4,3 +4,5 @@ APP_AUTHOR = "Brett Williams"
|
|||||||
APP_DESCRIPTION = "Distributed Render Farm Tools"
|
APP_DESCRIPTION = "Distributed Render Farm Tools"
|
||||||
APP_COPYRIGHT_YEAR = "2024"
|
APP_COPYRIGHT_YEAR = "2024"
|
||||||
APP_LICENSE = "MIT License"
|
APP_LICENSE = "MIT License"
|
||||||
|
APP_REPO_NAME = APP_NAME
|
||||||
|
APP_REPO_OWNER = "blw1138"
|
||||||
|
|||||||
Reference in New Issue
Block a user