2 Commits

Author SHA1 Message Date
Brett Williams a464910426 Always add render engine to Blender args 2024-08-08 23:20:38 -05:00
Brett Williams a4ff36ac56 Add render engines to system_info 2024-08-08 23:13:31 -05:00
53 changed files with 1607 additions and 3453 deletions
-11
View File
@@ -1,11 +0,0 @@
[flake8]
exclude =
src/engines/aerender
.git
build
dist
*.egg
venv
.venv
max-complexity = 10
max-line-length = 127
-38
View File
@@ -1,38 +0,0 @@
name: Create Executables
on:
workflow_dispatch:
release:
- types: [created]
jobs:
pyinstaller-build-windows:
runs-on: windows-latest
steps:
- name: Create Executables (Windows)
uses: sayyid5416/pyinstaller@v1
with:
python_ver: '3.11'
spec: 'client.spec'
requirements: 'requirements.txt'
upload_exe_with_name: 'Zordon'
pyinstaller-build-linux:
runs-on: ubuntu-latest
steps:
- name: Create Executables (Linux)
uses: sayyid5416/pyinstaller@v1
with:
python_ver: '3.11'
spec: 'client.spec'
requirements: 'requirements.txt'
upload_exe_with_name: 'Zordon'
pyinstaller-build-macos:
runs-on: macos-latest
steps:
- name: Create Executables (macOS)
uses: sayyid5416/pyinstaller@v1
with:
python_ver: '3.11'
spec: 'client.spec'
requirements: 'requirements.txt'
upload_exe_with_name: 'Zordon'
+23
View File
@@ -0,0 +1,23 @@
name: Pylint
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pylint
- name: Analysing the code with pylint
run: |
pylint $(git ls-files '*.py')
+3 -4
View File
@@ -34,7 +34,6 @@ jobs:
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
continue-on-error: false - name: Test with pytest
# - name: Test with pytest run: |
# run: | pytest
# pytest
-8
View File
@@ -6,11 +6,3 @@
/dist/ /dist/
/build/ /build/
/.github/ /.github/
*.idea
.DS_Store
/venv/
.env
venv/
/.eggs/
/.ai/
/.github/
-1
View File
@@ -1,5 +1,4 @@
[MASTER] [MASTER]
max-line-length = 120 max-line-length = 120
ignore-paths=^src/engines/aerender/
[MESSAGES CONTROL] [MESSAGES CONTROL]
disable = missing-docstring, invalid-name, import-error, logging-fstring-interpolation disable = missing-docstring, invalid-name, import-error, logging-fstring-interpolation
+6 -38
View File
@@ -1,46 +1,14 @@
![Zordon Screenshot](docs/screenshot.png) # 🎬 Zordon - Render Management Tools
--- Welcome to Zordon! It's a local network render farm manager, aiming to streamline and simplify the rendering process across multiple home computers.
# Zordon ## 📦 Installation
A lightweight, zero-install, distributed rendering and management tool designed to streamline and optimize rendering workflows across multiple machines Install the necessary dependencies: `pip3 install -r requirements.txt`
## What is Zordon? ## 🎨 Supported Renderers
Zordon is tool designed for small render farms, such as those used in home studios or small businesses, to efficiently manage and run render jobs for Blender, FFMPEG, and other video renderers. It simplifies the process of distributing rendering tasks across multiple available machines, optimizing the rendering workflow for artists, animators, and video professionals. Zordon currently supports the following renderers:
Notice: This should be considered a beta and is meant for casual / hobbiest use. Do not use in mission critical environments!
## Supported Renderers
Zordon supports or plans to support the following renderers:
- **Blender** - **Blender**
- **FFMPEG** - **FFMPEG**
- **Adobe After Effects** ([coming soon](https://github.com/blw1138/Zordon/issues/84))
- **Cinema 4D** ([planned](https://github.com/blw1138/Zordon/issues/105))
- **Autodesk Maya** ([planned](https://github.com/blw1138/Zordon/issues/106))
## System Requirements
- Windows 10 or later
- macOS Ventura (13.0) or later
- Linux (Supported versions TBD)
## Build using Pyinstaller
Zordon is regularly tested with Python 3.11 and later. It's packaged and distributed with pyinstaller. It is supported on Windows, macOS and Linux.
```
git clone https://github.com/blw1138/Zordon.git
pip3 install -r requirements.txt
pip3 install pyinstaller
pip3 install pyinstaller_versionfile
pyinstaller main.spec
```
## License
Zordon is licensed under the MIT License. See the [LICENSE](LICENSE.txt) file for more details.
-131
View File
@@ -1,131 +0,0 @@
#!/usr/bin/env python3
import argparse
import logging
import os
import socket
import sys
import time
from server import ZordonServer
from src.api.serverproxy_manager import ServerProxyManager
logger = logging.getLogger()
def main():
parser = argparse.ArgumentParser(
description="Zordon CLI tool for preparing/submitting a render job",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
# Required arguments
parser.add_argument("scene_file", help="Path to the scene file (e.g., .blend, .max, .mp4)")
parser.add_argument("engine", help="Desired render engine", choices=['blender', 'ffmpeg'])
# Frame range
parser.add_argument("--start", type=int, default=1, help="Start frame")
parser.add_argument("--end", type=int, default=1, help="End frame")
# Job metadata
parser.add_argument("--name", default=None, help="Job name")
# Output
parser.add_argument("--output", default="", help="Output path/pattern (e.g., /renders/frame_####.exr)")
# Target OS and Engine Version
parser.add_argument(
"--os",
choices=["any", "windows", "linux", "macos"],
default="any",
help="Target operating system for render workers"
)
parser.add_argument(
"--engine-version",
default="latest",
help="Required renderer/engine version number (e.g., '4.2', '5.0')"
)
# Optional flags
parser.add_argument("--dry-run", action="store_true", help="Print job details without submitting")
args = parser.parse_args()
# Basic validation
if not os.path.exists(args.scene_file):
print(f"Error: Scene file '{args.scene_file}' not found!", file=sys.stderr)
sys.exit(1)
if args.start > args.end:
print("Error: Start frame cannot be greater than end frame!", file=sys.stderr)
sys.exit(1)
# Calculate total frames
total_frames = len(range(args.start, args.end + 1))
job_name = args.name or os.path.basename(args.scene_file)
file_path = os.path.abspath(args.scene_file)
# Print job summary
print("Render Job Summary:")
print(f" Job Name : {job_name}")
print(f" Scene File : {file_path}")
print(f" Engine : {args.engine}")
print(f" Frames : {args.start}-{args.end}{total_frames} frames")
print(f" Output Path : {args.output or '(default from scene)'}")
print(f" Target OS : {args.os}")
print(f" Engine Version : {args.engine_version}")
if args.dry_run:
print("\nDry run complete (no submission performed).")
return
local_hostname = socket.gethostname()
local_hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
found_proxy = ServerProxyManager.get_proxy_for_hostname(local_hostname)
is_connected = found_proxy.check_connection()
adhoc_server = None
if not is_connected:
adhoc_server = ZordonServer()
adhoc_server.start_server()
found_proxy = ServerProxyManager.get_proxy_for_hostname(adhoc_server.server_hostname)
while not is_connected:
# todo: add timeout
is_connected = found_proxy.check_connection()
time.sleep(1)
new_job = {"name": job_name, "engine_name": args.engine}
try:
response = found_proxy.post_job_to_server(file_path, new_job)
except Exception as e:
print(f"Error creating job: {e}")
exit(1)
if response and response.ok:
print(f"Uploaded to {found_proxy.hostname} successfully!")
running_job_data = response.json()
job_id = running_job_data.get('id')
print(f"Job {job_id} Summary:")
print(f" Status : {running_job_data.get('status')}")
print(f" Engine : {running_job_data.get('engine_name')}-{running_job_data.get('engine_version')}")
print("\nWaiting for render to complete...")
percent_complete = 0.0
while percent_complete < 1.0:
# add checks for errors
time.sleep(1)
running_job_data = found_proxy.get_job_info(job_id)
percent_complete = running_job_data['percent_complete']
sys.stdout.write("\x1b[1A") # Move up 1
sys.stdout.write("\x1b[0J") # Clear from cursor to end of screen (optional)
print(f"Percent Complete: {percent_complete:.2%}")
sys.stdout.flush()
print("Finished rendering successfully!")
else:
print(f"Failed to upload job. {response.text} !")
if adhoc_server:
adhoc_server.stop_server()
if __name__ == "__main__":
main()
-77
View File
@@ -1,77 +0,0 @@
#!/usr/bin/env python3
import logging
import threading
from collections import deque
from server import ZordonServer
logger = logging.getLogger()
def __setup_buffer_handler():
# lazy load GUI frameworks
from PyQt6.QtCore import QObject, pyqtSignal
class BufferingHandler(logging.Handler, QObject):
new_record = pyqtSignal(str)
flushOnClose = True
def __init__(self, capacity=100):
logging.Handler.__init__(self)
QObject.__init__(self)
self.buffer = deque(maxlen=capacity) # Define a buffer with a fixed capacity
def emit(self, record):
try:
msg = self.format(record)
self.buffer.append(msg) # Add message to the buffer
self.new_record.emit(msg) # Emit signal
except RuntimeError:
pass
def get_buffer(self):
return list(self.buffer) # Return a copy of the buffer
buffer_handler = BufferingHandler()
buffer_handler.setFormatter(logging.getLogger().handlers[0].formatter)
new_logger = logging.getLogger()
new_logger.addHandler(buffer_handler)
return buffer_handler
def __show_gui(buffer_handler):
# lazy load GUI frameworks
from PyQt6.QtWidgets import QApplication
# load application
app: QApplication = QApplication(sys.argv)
if app.style().objectName() != 'macos':
app.setStyle('Fusion')
# configure main window
from src.ui.main_window import MainWindow
window: MainWindow = MainWindow()
window.buffer_handler = buffer_handler
window.show()
exit_code = app.exec()
# cleanup: remove and close the GUI logging handler before interpreter shutdown
root_logger = logging.getLogger()
if buffer_handler in root_logger.handlers:
root_logger.removeHandler(buffer_handler)
try:
buffer_handler.close()
except Exception:
# never let logging cleanup throw during shutdown
pass
return exit_code
if __name__ == '__main__':
import sys
server = ZordonServer()
server.start_server()
__show_gui(__setup_buffer_handler())
server.stop_server()
sys.exit()
-121
View File
@@ -1,121 +0,0 @@
# -*- mode: python ; coding: utf-8 -*-
from PyInstaller.utils.hooks import collect_all
# - get version from version file
import os
import sys
import platform
src_path = os.path.abspath("src")
sys.path.insert(0, src_path)
from version import APP_NAME, APP_VERSION, APP_AUTHOR
sys.path.insert(0, os.path.abspath('.'))
datas = [('resources', 'resources'), ('src/engines/blender/scripts/', 'src/engines/blender/scripts')]
binaries = []
hiddenimports = ['zeroconf']
tmp_ret = collect_all('zeroconf')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
a = Analysis(
['client.py'],
pathex=[],
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
noarchive=False,
optimize=1, # fyi: optim level 2 breaks on windows
)
pyz = PYZ(a.pure)
if platform.system() == 'Darwin': # macOS
exe = EXE(
pyz,
a.scripts,
[],
exclude_binaries=True,
name=APP_NAME,
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
)
app = BUNDLE(
exe,
a.binaries,
a.datas,
strip=True,
name=f'{APP_NAME}.app',
icon='resources/Server.png',
bundle_identifier=None,
version=APP_VERSION
)
elif platform.system() == 'Windows':
import pyinstaller_versionfile
import tempfile
version_file_path = os.path.join(tempfile.gettempdir(), 'versionfile.txt')
pyinstaller_versionfile.create_versionfile(
output_file=version_file_path,
version=APP_VERSION,
company_name=APP_AUTHOR,
file_description=APP_NAME,
internal_name=APP_NAME,
legal_copyright=f"© {APP_AUTHOR}",
original_filename=f"{APP_NAME}.exe",
product_name=APP_NAME
)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name=APP_NAME,
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
version=version_file_path
)
else: # linux
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name=APP_NAME,
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None
)
Binary file not shown.

Before

Width:  |  Height:  |  Size: 838 KiB

Executable
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env python3
from src import init
if __name__ == '__main__':
import sys
sys.exit(init.run())
-2
View File
@@ -1,2 +0,0 @@
[pytest]
norecursedirs = src/engines/aerender .git build dist *.egg venv .venv env .env __pycache__ .pytest_cache
+33 -16
View File
@@ -1,20 +1,37 @@
PyQt6>=6.7.0 PyQt6>=6.6.1
psutil>=5.9.8 psutil>=5.9.8
requests>=2.32.2 requests>=2.31.0
Pillow>=10.3.0 Pillow>=10.2.0
PyYAML>=6.0.1 PyYAML>=6.0.1
flask>=3.0.3 flask>=3.0.2
tqdm>=4.66.4 tqdm>=4.66.2
werkzeug>=3.0.3 werkzeug>=3.0.1
Pypubsub>=4.0.3 Pypubsub>=4.0.3
zeroconf>=0.132.2 zeroconf>=0.131.0
SQLAlchemy>=2.0.30 SQLAlchemy>=2.0.25
plyer>=2.1.0 plyer>=2.1.0
rich>=13.7.1 pytz>=2023.3.post1
setuptools>=70.0.0 future>=0.18.3
py-cpuinfo>=9.0.0 rich>=13.7.0
requests-toolbelt>=1.0.0 pytest>=8.0.0
PyQt6-sip>=13.6.0 numpy>=1.26.3
humanize>=4.12.1 setuptools>=69.0.3
macholib>=1.16.3 pandas>=2.2.0
altgraph>=0.17.4 matplotlib>=3.8.2
MarkupSafe>=2.1.4
dmglib>=0.9.5; sys_platform == 'darwin'
python-dateutil>=2.8.2
certifi>=2023.11.17
shiboken6>=6.6.1
Pygments>=2.17.2
cycler>=0.12.1
contourpy>=1.2.0
packaging>=23.2
fonttools>=4.47.2
Jinja2>=3.1.3
pyparsing>=3.1.1
kiwisolver>=1.4.5
attrs>=23.2.0
lxml>=5.1.0
click>=8.1.7
requests_toolbelt>=1.0.0
+3 -125
View File
@@ -1,127 +1,5 @@
import logging #!/usr/bin/env python3
import multiprocessing from init import run
import os
import socket
import threading
import psutil
from src.api.api_server import API_VERSION
from src.api.api_server import start_api_server
from src.api.preview_manager import PreviewManager
from src.api.serverproxy_manager import ServerProxyManager
from src.distributed_job_manager import DistributedJobManager
from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue
from src.utilities.config import Config
from src.utilities.misc_helper import (get_gpu_info, system_safe_path, current_system_cpu, current_system_os,
current_system_os_version, current_system_cpu_brand, check_for_updates)
from src.utilities.zeroconf_server import ZeroconfServer
from src.version import APP_NAME, APP_VERSION
logger = logging.getLogger()
class ZordonServer:
def __init__(self):
# setup logging
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
level=Config.server_log_level.upper())
logging.getLogger("requests").setLevel(logging.WARNING) # suppress noisy requests/urllib3 logging
logging.getLogger("urllib3").setLevel(logging.WARNING)
# Load Config YAML
Config.setup_config_dir()
Config.load_config(system_safe_path(os.path.join(Config.config_dir(), 'config.yaml')))
# configure default paths
EngineManager.engines_path = system_safe_path(
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
'engines')))
os.makedirs(EngineManager.engines_path, exist_ok=True)
PreviewManager.storage_path = system_safe_path(
os.path.join(os.path.expanduser(Config.upload_folder), 'previews'))
self.api_server = None
self.server_hostname = None
def start_server(self):
def existing_process(process_name):
import psutil
current_pid = os.getpid()
current_process = psutil.Process(current_pid)
for proc in psutil.process_iter(['pid', 'name', 'ppid']):
proc_name = proc.info['name'].lower().rstrip('.exe')
if proc_name == process_name.lower() and proc.info['pid'] != current_pid:
if proc.info['pid'] == current_process.ppid():
continue # parent process
elif proc.info['ppid'] == current_pid:
continue # child process
else:
return proc # unrelated process
return None
# check for existing instance
existing_proc = existing_process(APP_NAME)
if existing_proc:
err_msg = f"Another instance of {APP_NAME} is already running (pid: {existing_proc.pid})"
logger.fatal(err_msg)
raise ProcessLookupError(err_msg)
# main start
logger.info(f"Starting {APP_NAME} Render Server ({APP_VERSION})")
logger.debug(f"Upload directory: {os.path.expanduser(Config.upload_folder)}")
logger.debug(f"Thumbs directory: {PreviewManager.storage_path}")
logger.debug(f"Engines directory: {EngineManager.engines_path}")
# Set up the RenderQueue object
RenderQueue.load_state(database_directory=system_safe_path(os.path.expanduser(Config.upload_folder)))
ServerProxyManager.subscribe_to_listener()
DistributedJobManager.subscribe_to_listener()
# get hostname
self.server_hostname = socket.gethostname()
# configure and start API server
self.api_server = threading.Thread(target=start_api_server, args=(self.server_hostname,))
self.api_server.daemon = True
self.api_server.start()
# start zeroconf server
ZeroconfServer.configure(f"_{APP_NAME.lower()}._tcp.local.", self.server_hostname, Config.port_number)
ZeroconfServer.properties = {'system_cpu': current_system_cpu(),
'system_cpu_brand': current_system_cpu_brand(),
'system_cpu_cores': multiprocessing.cpu_count(),
'system_os': current_system_os(),
'system_os_version': current_system_os_version(),
'system_memory': round(psutil.virtual_memory().total / (1024**3)), # in GB
'gpu_info': get_gpu_info(),
'api_version': API_VERSION}
ZeroconfServer.start()
logger.info(f"{APP_NAME} Render Server started - Hostname: {self.server_hostname}")
RenderQueue.start() # Start evaluating the render queue
def is_running(self):
return self.api_server and self.api_server.is_alive()
def stop_server(self):
logger.info(f"{APP_NAME} Render Server is preparing to stop")
try:
ZeroconfServer.stop()
RenderQueue.prepare_for_shutdown()
except Exception as e:
logger.exception(f"Exception during prepare for shutdown: {e}")
logger.info(f"{APP_NAME} Render Server has shut down")
if __name__ == '__main__': if __name__ == '__main__':
server = ZordonServer() run(server_only=True)
try:
server.start_server()
server.api_server.join()
except KeyboardInterrupt:
pass
except Exception as e:
logger.fatal(f"Unhandled exception: {e}")
finally:
server.stop_server()
-90
View File
@@ -1,90 +0,0 @@
# -*- mode: python ; coding: utf-8 -*-
from PyInstaller.utils.hooks import collect_all
# - get version from version file
import os
import sys
import platform
sys.path.insert(0, os.path.abspath('.'))
from version import APP_NAME, APP_VERSION, APP_AUTHOR
APP_NAME = APP_NAME + " Server"
datas = [('resources', 'resources'), ('src/engines/blender/scripts/', 'src/engines/blender/scripts')]
binaries = []
hiddenimports = ['zeroconf']
tmp_ret = collect_all('zeroconf')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
a = Analysis(
['server.py'],
pathex=[],
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
noarchive=False,
optimize=1, # fyi: optim level 2 breaks on windows
)
pyz = PYZ(a.pure)
if platform.system() == 'Windows':
import pyinstaller_versionfile
import tempfile
version_file_path = os.path.join(tempfile.gettempdir(), 'versionfile.txt')
pyinstaller_versionfile.create_versionfile(
output_file=version_file_path,
version=APP_VERSION,
company_name=APP_AUTHOR,
file_description=APP_NAME,
internal_name=APP_NAME,
legal_copyright=f"© {APP_AUTHOR}",
original_filename=f"{APP_NAME}.exe",
product_name=APP_NAME
)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name=APP_NAME,
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
console=True,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
version=version_file_path
)
else: # linux / macOS
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name=APP_NAME,
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None
)
+22
View File
@@ -0,0 +1,22 @@
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
import glob
from setuptools import setup
APP = ['main.py']
DATA_FILES = [('config', glob.glob('config/*.*')),
('resources', glob.glob('resources/*.*'))]
OPTIONS = {}
setup(
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
name='Zordon'
)
+150
View File
@@ -0,0 +1,150 @@
#!/usr/bin/env python3
import logging
import os
import shutil
import tempfile
import zipfile
from datetime import datetime
import requests
from tqdm import tqdm
from werkzeug.utils import secure_filename
logger = logging.getLogger()
def handle_uploaded_project_files(request, jobs_list, upload_directory):
"""
Handles the uploaded project files.
This method takes a request with a file, a list of jobs, and an upload directory. It checks if the file was uploaded
directly, if it needs to be downloaded from a URL, or if it's already present on the local file system. It then
moves the file to the appropriate directory and returns the local path to the file and its name.
Args:
request (Request): The request object containing the file.
jobs_list (list): A list of jobs. The first job in the list is used to get the file's URL and local path.
upload_directory (str): The directory where the file should be uploaded.
Raises:
ValueError: If no valid project paths are found.
Returns:
tuple: A tuple containing the local path to the loaded project file and its name.
"""
# Initialize default values
loaded_project_local_path = None
uploaded_project = request.files.get('file', None)
project_url = jobs_list[0].get('url', None)
local_path = jobs_list[0].get('local_path', None)
renderer = jobs_list[0].get('renderer')
downloaded_file_url = None
if uploaded_project and uploaded_project.filename:
referred_name = os.path.basename(uploaded_project.filename)
elif project_url:
referred_name, downloaded_file_url = download_project_from_url(project_url)
if not referred_name:
raise ValueError(f"Error downloading file from URL: {project_url}")
elif local_path and os.path.exists(local_path):
referred_name = os.path.basename(local_path)
else:
raise ValueError("Cannot find any valid project paths")
# Prepare the local filepath
cleaned_path_name = jobs_list[0].get('name', os.path.splitext(referred_name)[0]).replace(' ', '-')
job_dir = os.path.join(upload_directory, '-'.join(
[datetime.now().strftime("%Y.%m.%d_%H.%M.%S"), renderer, cleaned_path_name]))
os.makedirs(job_dir, exist_ok=True)
project_source_dir = os.path.join(job_dir, 'source')
os.makedirs(project_source_dir, exist_ok=True)
# Move projects to their work directories
if uploaded_project and uploaded_project.filename:
loaded_project_local_path = os.path.join(project_source_dir, secure_filename(uploaded_project.filename))
uploaded_project.save(loaded_project_local_path)
logger.info(f"Transfer complete for {loaded_project_local_path.split(upload_directory)[-1]}")
elif project_url:
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
shutil.move(downloaded_file_url, loaded_project_local_path)
logger.info(f"Download complete for {loaded_project_local_path.split(upload_directory)[-1]}")
elif local_path:
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
shutil.copy(local_path, loaded_project_local_path)
logger.info(f"Import complete for {loaded_project_local_path.split(upload_directory)[-1]}")
return loaded_project_local_path, referred_name
def download_project_from_url(project_url):
# This nested function is to handle downloading from a URL
logger.info(f"Downloading project from url: {project_url}")
referred_name = os.path.basename(project_url)
try:
response = requests.get(project_url, stream=True)
if response.status_code == 200:
# Get the total file size from the "Content-Length" header
file_size = int(response.headers.get("Content-Length", 0))
# Create a progress bar using tqdm
progress_bar = tqdm(total=file_size, unit="B", unit_scale=True)
# Open a file for writing in binary mode
downloaded_file_url = os.path.join(tempfile.gettempdir(), referred_name)
with open(downloaded_file_url, "wb") as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
# Write the chunk to the file
file.write(chunk)
# Update the progress bar
progress_bar.update(len(chunk))
# Close the progress bar
progress_bar.close()
return referred_name, downloaded_file_url
except Exception as e:
logger.error(f"Error downloading file: {e}")
return None, None
def process_zipped_project(zip_path):
"""
Processes a zipped project.
This method takes a path to a zip file, extracts its contents, and returns the path to the extracted project file.
If the zip file contains more than one project file or none, an error is raised.
Args:
zip_path (str): The path to the zip file.
Raises:
ValueError: If there's more than 1 project file or none in the zip file.
Returns:
str: The path to the main project file.
"""
work_path = os.path.dirname(zip_path)
try:
with zipfile.ZipFile(zip_path, 'r') as myzip:
myzip.extractall(work_path)
project_files = [x for x in os.listdir(work_path) if os.path.isfile(os.path.join(work_path, x))]
project_files = [x for x in project_files if '.zip' not in x]
logger.debug(f"Zip files: {project_files}")
# supported_exts = RenderWorkerFactory.class_for_name(renderer).engine.supported_extensions
# if supported_exts:
# project_files = [file for file in project_files if any(file.endswith(ext) for ext in supported_exts)]
# If there's more than 1 project file or none, raise an error
if len(project_files) != 1:
raise ValueError(f'Cannot find a valid project file in {os.path.basename(zip_path)}')
extracted_project_path = os.path.join(work_path, project_files[0])
logger.info(f"Extracted zip file to {extracted_project_path}")
except (zipfile.BadZipFile, zipfile.LargeZipFile) as e:
logger.error(f"Error processing zip file: {e}")
raise ValueError(f"Error processing zip file: {e}")
return extracted_project_path
+243 -304
View File
@@ -10,90 +10,143 @@ import ssl
import tempfile import tempfile
import time import time
from datetime import datetime from datetime import datetime
from zipfile import ZipFile
import cpuinfo
import psutil import psutil
import yaml import yaml
from flask import Flask, request, send_file, after_this_request, Response, redirect, url_for from flask import Flask, request, send_file, after_this_request, Response, redirect, url_for, abort
from sqlalchemy.orm.exc import DetachedInstanceError from sqlalchemy.orm.exc import DetachedInstanceError
from src.api.job_import_handler import JobImportHandler from src.api.add_job_helpers import handle_uploaded_project_files, process_zipped_project
from src.api.preview_manager import PreviewManager from src.api.preview_manager import PreviewManager
from src.distributed_job_manager import DistributedJobManager from src.distributed_job_manager import DistributedJobManager
from src.engines.core.base_worker import string_to_status, RenderStatus
from src.engines.engine_manager import EngineManager from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue, JobNotFoundError from src.render_queue import RenderQueue, JobNotFoundError
from src.utilities.benchmark import cpu_benchmark, disk_io_benchmark
from src.utilities.config import Config from src.utilities.config import Config
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \ from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \
current_system_os_version, num_to_alphanumeric, get_gpu_info current_system_os_version, num_to_alphanumeric
from src.utilities.status_utils import string_to_status from src.utilities.zeroconf_server import ZeroconfServer
from src.version import APP_VERSION
logger = logging.getLogger() logger = logging.getLogger()
server = Flask(__name__) server = Flask(__name__)
ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads
API_VERSION = "0.1" categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
RenderStatus.COMPLETED, RenderStatus.CANCELLED]
def start_api_server(hostname=None):
# get hostname
if not hostname:
local_hostname = socket.gethostname()
hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
# load flask settings
server.config['HOSTNAME'] = hostname
server.config['PORT'] = int(Config.port_number)
server.config['UPLOAD_FOLDER'] = system_safe_path(os.path.expanduser(Config.upload_folder))
server.config['MAX_CONTENT_PATH'] = Config.max_content_path
server.config['enable_split_jobs'] = Config.enable_split_jobs
# disable most Flask logging
flask_log = logging.getLogger('werkzeug')
flask_log.setLevel(Config.flask_log_level.upper())
logger.debug('Starting API server')
try:
server.run(host=hostname, port=server.config['PORT'], debug=Config.flask_debug_enable, use_reloader=False,
threaded=True)
finally:
logger.debug('Stopping API server')
# -------------------------------------------- # -- Error Handlers --
# Get All Jobs
# -------------------------------------------- @server.errorhandler(JobNotFoundError)
def handle_job_not_found(job_error):
return str(job_error), 400
@server.errorhandler(DetachedInstanceError)
def handle_detached_instance(error):
# logger.debug(f"detached instance: {error}")
return "Unavailable", 503
@server.errorhandler(Exception)
def handle_general_error(general_error):
err_msg = f"Server error: {general_error}"
logger.error(err_msg)
return err_msg, 500
# -- Jobs --
def sorted_jobs(all_jobs, sort_by_date=True):
if not sort_by_date:
sorted_job_list = []
if all_jobs:
for status_category in categories:
found_jobs = [x for x in all_jobs if x.status == status_category.value]
if found_jobs:
sorted_found_jobs = sorted(found_jobs, key=lambda d: d.date_created, reverse=True)
sorted_job_list.extend(sorted_found_jobs)
else:
sorted_job_list = sorted(all_jobs, key=lambda d: d.date_created, reverse=True)
return sorted_job_list
@server.get('/api/jobs') @server.get('/api/jobs')
def jobs_json(): def jobs_json():
"""Retrieves all jobs from the render queue in JSON format. try:
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
This endpoint fetches all jobs currently in the render queue, converts them to JSON format, job_cache_int = int(json.dumps(all_jobs).__hash__())
and returns them along with a cache token that represents the current state of the job list. job_cache_token = num_to_alphanumeric(job_cache_int)
return {'jobs': all_jobs, 'token': job_cache_token}
Returns: except DetachedInstanceError as e:
dict: A dictionary containing: raise e
- 'jobs' (list[dict]): A list of job dictionaries, each representing a job in the queue. except Exception as e:
- 'token' (str): A cache token generated from the hash of the job list. logger.error(f"Error fetching jobs_json: {e}")
""" raise e
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
job_cache_int = int(json.dumps(all_jobs).__hash__())
job_cache_token = num_to_alphanumeric(job_cache_int)
return {'jobs': all_jobs, 'token': job_cache_token}
@server.get('/api/jobs_long_poll') @server.get('/api/jobs_long_poll')
def long_polling_jobs(): def long_polling_jobs():
hash_token = request.args.get('token', None) try:
start_time = time.time() hash_token = request.args.get('token', None)
while True: start_time = time.time()
all_jobs = jobs_json() while True:
if all_jobs['token'] != hash_token: all_jobs = jobs_json()
return all_jobs if all_jobs['token'] != hash_token:
# Break after 30 seconds to avoid gateway timeout return all_jobs
if time.time() - start_time > 30: # Break after 30 seconds to avoid gateway timeout
return {}, 204 if time.time() - start_time > 30:
time.sleep(1) return {}, 204
time.sleep(1)
except DetachedInstanceError as e:
raise e
except Exception as e:
logger.error(f"Error fetching long_polling_jobs: {e}")
raise e
@server.route('/api/job/<job_id>/thumbnail')
def job_thumbnail(job_id):
try:
big_thumb = request.args.get('size', False) == "big"
video_ok = request.args.get('video_ok', False)
found_job = RenderQueue.job_with_id(job_id, none_ok=False)
# trigger a thumbnail update - just in case
PreviewManager.update_previews_for_job(found_job, wait_until_completion=True, timeout=60)
previews = PreviewManager.get_previews_for_job(found_job)
all_previews_list = previews.get('output', previews.get('input', []))
video_previews = [x for x in all_previews_list if x['kind'] == 'video']
image_previews = [x for x in all_previews_list if x['kind'] == 'image']
filtered_list = video_previews if video_previews and video_ok else image_previews
# todo - sort by size or other metrics here
if filtered_list:
preview_to_send = filtered_list[0]
mime_types = {'image': 'image/jpeg', 'video': 'video/mp4'}
file_mime_type = mime_types.get(preview_to_send['kind'], 'unknown')
return send_file(preview_to_send['filename'], mimetype=file_mime_type)
except Exception as e:
logger.error(f'Error getting thumbnail: {e}')
return f'Error getting thumbnail: {e}', 500
return "No thumbnail available", 404
# Get job file routing
@server.route('/api/job/<job_id>/file/<filename>', methods=['GET'])
def get_job_file(job_id, filename):
found_job = RenderQueue.job_with_id(job_id)
try:
for full_path in found_job.file_list():
if filename in full_path:
return send_file(path_or_file=full_path)
except FileNotFoundError:
abort(404)
@server.get('/api/jobs/<status_val>') @server.get('/api/jobs/<status_val>')
@@ -106,33 +159,24 @@ def filtered_jobs_json(status_val):
return f'Cannot find jobs with status {status_val}', 400 return f'Cannot find jobs with status {status_val}', 400
# -------------------------------------------- @server.post('/api/job/<job_id>/send_subjob_update_notification')
# Job Details / File Handling def subjob_update_notification(job_id):
# -------------------------------------------- try:
subjob_details = request.json
logger.info(f"Subjob to job id: {job_id} is now {subjob_details['status']}")
DistributedJobManager.handle_subjob_update_notification(RenderQueue.job_with_id(job_id), subjob_data=subjob_details)
return Response(status=200)
except JobNotFoundError:
return "Job not found", 404
@server.get('/api/job/<job_id>') @server.get('/api/job/<job_id>')
def get_job_details(job_id): def get_job_status(job_id):
"""Retrieves the details of a requested job in JSON format
Args:
job_id (str): The ID of the render job.
Returns:
dict: A JSON representation of the job's details.
"""
return RenderQueue.job_with_id(job_id).json() return RenderQueue.job_with_id(job_id).json()
@server.get('/api/job/<job_id>/logs') @server.get('/api/job/<job_id>/logs')
def get_job_logs(job_id): def get_job_logs(job_id):
"""Retrieves the log file for a specific render job.
Args:
job_id (str): The ID of the render job.
Returns:
Response: The log file's content as plain text, or an empty response if the log file is not found.
"""
found_job = RenderQueue.job_with_id(job_id) found_job = RenderQueue.job_with_id(job_id)
log_path = system_safe_path(found_job.log_path()) log_path = system_safe_path(found_job.log_path())
log_data = None log_data = None
@@ -148,7 +192,7 @@ def get_file_list(job_id):
@server.route('/api/job/<job_id>/download') @server.route('/api/job/<job_id>/download')
def download_requested_file(job_id): def download_file(job_id):
requested_filename = request.args.get('filename') requested_filename = request.args.get('filename')
if not requested_filename: if not requested_filename:
@@ -163,7 +207,7 @@ def download_requested_file(job_id):
@server.route('/api/job/<job_id>/download_all') @server.route('/api/job/<job_id>/download_all')
def download_all_files(job_id): def download_all(job_id):
zip_filename = None zip_filename = None
@after_this_request @after_this_request
@@ -178,7 +222,6 @@ def download_all_files(job_id):
found_job = RenderQueue.job_with_id(job_id) found_job = RenderQueue.job_with_id(job_id)
output_dir = os.path.dirname(found_job.output_path) output_dir = os.path.dirname(found_job.output_path)
if os.path.exists(output_dir): if os.path.exists(output_dir):
from zipfile import ZipFile
zip_filename = system_safe_path(os.path.join(tempfile.gettempdir(), zip_filename = system_safe_path(os.path.join(tempfile.gettempdir(),
pathlib.Path(found_job.input_path).stem + '.zip')) pathlib.Path(found_job.input_path).stem + '.zip'))
with ZipFile(zip_filename, 'w') as zipObj: with ZipFile(zip_filename, 'w') as zipObj:
@@ -190,10 +233,6 @@ def download_all_files(job_id):
return f'Cannot find project files for job {job_id}', 500 return f'Cannot find project files for job {job_id}', 500
# --------------------------------------------
# System Environment / Status
# --------------------------------------------
@server.get('/api/presets') @server.get('/api/presets')
def presets(): def presets():
presets_path = system_safe_path('config/presets.yaml') presets_path = system_safe_path('config/presets.yaml')
@@ -225,93 +264,41 @@ def snapshot():
return server_data return server_data
@server.route('/api/status') @server.get('/api/_detected_clients')
def status(): def detected_clients():
return {"timestamp": datetime.now().isoformat(), # todo: dev/debug only. Should not ship this - probably.
"system_os": current_system_os(), return ZeroconfServer.found_hostnames()
"system_os_version": current_system_os_version(),
"system_cpu": current_system_cpu(),
"system_cpu_brand": cpuinfo.get_cpu_info()['brand_raw'],
"cpu_percent": psutil.cpu_percent(percpu=False),
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
"cpu_count": psutil.cpu_count(logical=False),
"memory_total": psutil.virtual_memory().total,
"memory_available": psutil.virtual_memory().available,
"memory_percent": psutil.virtual_memory().percent,
"job_counts": RenderQueue.job_counts(),
"hostname": server.config['HOSTNAME'],
"port": server.config['PORT'],
"app_version": APP_VERSION,
"api_version": API_VERSION
}
# -------------------------------------------- # New version
# Job Lifecyle (Create, Cancel, Delete)
# --------------------------------------------
@server.post('/api/add_job') @server.post('/api/add_job')
def add_job_handler(): def add_job_handler():
""" # Process request data
POST /api/add_job
Add a render job to the queue.
**Request Formats**
- JSON body:
{
"name": "example.blend",
"engine": "blender",
"frame_start": 1,
"frame_end": 100,
"render_settings": {...}
"child_jobs"; [...]
}
**Responses**
200 Success
400 Invalid or missing input
500 Internal server error while parsing or creating jobs
"""
try: try:
if request.is_json: if request.is_json:
new_job_data = request.get_json() jobs_list = [request.json] if not isinstance(request.json, list) else request.json
elif request.form.get('json', None): elif request.form.get('json', None):
new_job_data = json.loads(request.form['json']) jobs_list = json.loads(request.form['json'])
else: else:
return "Cannot find valid job data", 400 return "Invalid data", 400
except Exception as e: except Exception as e:
err_msg = f"Error processing job data: {e}" err_msg = f"Error processing job data: {e}"
logger.error(err_msg) logger.error(err_msg)
return err_msg, 500 return err_msg, 500
# Validate Job Data - check for required values and download or unzip project files
try: try:
processed_job_data = JobImportHandler.validate_job_data(new_job_data, server.config['UPLOAD_FOLDER'], loaded_project_local_path, referred_name = handle_uploaded_project_files(request, jobs_list,
uploaded_file=request.files.get('file')) server.config['UPLOAD_FOLDER'])
except (KeyError, FileNotFoundError) as e: if loaded_project_local_path.lower().endswith('.zip'):
err_msg = f"Error processing job data: {e}" loaded_project_local_path = process_zipped_project(loaded_project_local_path)
return err_msg, 400
except Exception as e:
err_msg = f"Unknown error processing data: {e}"
return err_msg, 500
try: results = []
loaded_project_local_path = processed_job_data['__loaded_project_local_path'] for new_job_data in jobs_list:
created_jobs = [] new_job = DistributedJobManager.create_render_job(new_job_data, loaded_project_local_path)
if processed_job_data.get("child_jobs"): results.append(new_job.json())
for child_job_diffs in processed_job_data["child_jobs"]: return results, 200
processed_child_job_data = processed_job_data.copy()
processed_child_job_data.pop("child_jobs")
processed_child_job_data.update(child_job_diffs)
child_job = DistributedJobManager.create_render_job(processed_child_job_data, loaded_project_local_path)
created_jobs.append(child_job)
else:
new_job = DistributedJobManager.create_render_job(processed_job_data, loaded_project_local_path)
created_jobs.append(new_job)
return [x.json() for x in created_jobs]
except Exception as e: except Exception as e:
logger.exception(f"Error creating render job: {e}") logger.exception(f"Error adding job: {e}")
return 'unknown error', 500 return 'unknown error', 500
@@ -337,95 +324,111 @@ def delete_job(job_id):
# Check if we can remove the 'output' directory # Check if we can remove the 'output' directory
found_job = RenderQueue.job_with_id(job_id) found_job = RenderQueue.job_with_id(job_id)
project_dir = os.path.dirname(os.path.dirname(found_job.input_path))
output_dir = os.path.dirname(found_job.output_path) output_dir = os.path.dirname(found_job.output_path)
found_job.stop() if server.config['UPLOAD_FOLDER'] in output_dir and os.path.exists(output_dir):
shutil.rmtree(output_dir)
try: try:
PreviewManager.delete_previews_for_job(found_job) PreviewManager.delete_previews_for_job(found_job)
except Exception as e: except Exception as e:
logger.error(f"Error deleting previews for {found_job}: {e}") logger.error(f"Error deleting previews for {found_job}: {e}")
# finally delete the job # See if we own the project_dir (i.e. was it uploaded)
project_dir = os.path.dirname(os.path.dirname(found_job.input_path))
if server.config['UPLOAD_FOLDER'] in project_dir and os.path.exists(project_dir):
# check to see if any other projects are sharing the same project file
project_dir_files = [f for f in os.listdir(project_dir) if not f.startswith('.')]
if len(project_dir_files) == 0 or (len(project_dir_files) == 1 and 'source' in project_dir_files[0]):
logger.info(f"Removing project directory: {project_dir}")
shutil.rmtree(project_dir)
RenderQueue.delete_job(found_job) RenderQueue.delete_job(found_job)
if request.args.get('redirect', False):
return redirect(url_for('index'))
else:
return "Job deleted", 200
# delete the output_dir
if server.config['UPLOAD_FOLDER'] in output_dir and os.path.exists(output_dir):
shutil.rmtree(output_dir)
# See if we own the project_dir (i.e. was it uploaded) - if so delete the directory
try:
if server.config['UPLOAD_FOLDER'] in project_dir and os.path.exists(project_dir):
# check to see if any other projects are sharing the same project file
project_dir_files = [f for f in os.listdir(project_dir) if not f.startswith('.')]
if len(project_dir_files) == 0 or (len(project_dir_files) == 1 and 'source' in project_dir_files[0]):
logger.info(f"Removing project directory: {project_dir}")
shutil.rmtree(project_dir)
except Exception as e:
logger.error(f"Error removing project files: {e}")
return "Job deleted", 200
except Exception as e: except Exception as e:
logger.error(f"Error deleting job: {e}") logger.error(f"Error deleting job: {e}")
return f"Error deleting job: {e}", 500 return f"Error deleting job: {e}", 500
# -------------------------------------------- @server.get('/api/clear_history')
# Engine Info and Management: def clear_history():
# -------------------------------------------- RenderQueue.clear_history()
return 'success'
@server.route('/api/status')
def status():
# Get system info
return {"timestamp": datetime.now().isoformat(),
"system_os": current_system_os(),
"system_os_version": current_system_os_version(),
"system_cpu": current_system_cpu(),
"cpu_percent": psutil.cpu_percent(percpu=False),
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
"cpu_count": psutil.cpu_count(logical=False),
"memory_total": psutil.virtual_memory().total,
"memory_available": psutil.virtual_memory().available,
"memory_percent": psutil.virtual_memory().percent,
"job_counts": RenderQueue.job_counts(),
"hostname": server.config['HOSTNAME'],
"port": server.config['PORT']
}
@server.get('/api/renderer_info')
def renderer_info():
@server.get('/api/engine_info')
def engine_info():
response_type = request.args.get('response_type', 'standard') response_type = request.args.get('response_type', 'standard')
if response_type not in ['full', 'standard']:
raise ValueError(f"Invalid response_type: {response_type}")
def process_engine(engine): def process_engine(engine):
try: try:
# Get all installed versions of the engine # Get all installed versions of the engine
installed_versions = EngineManager.all_versions_for_engine(engine.name()) installed_versions = EngineManager.all_versions_for_engine(engine.name())
if not installed_versions: if installed_versions:
return None # Use system-installed versions to avoid permission issues
system_installed_versions = [x for x in installed_versions if x['type'] == 'system']
install_path = system_installed_versions[0]['path'] if system_installed_versions else \
installed_versions[0]['path']
system_installed_versions = [v for v in installed_versions if v['type'] == 'system'] en = engine(install_path)
install_path = system_installed_versions[0]['path'] if system_installed_versions else installed_versions[0]['path']
en = engine(install_path) if response_type == 'full': # Full dataset - Can be slow
engine_name = en.name() return {
result = { en.name(): {
engine_name: { 'is_available': RenderQueue.is_available_for_job(en.name()),
'is_available': RenderQueue.is_available_for_job(engine_name), 'versions': installed_versions,
'versions': installed_versions 'supported_extensions': engine.supported_extensions(),
} 'supported_export_formats': en.get_output_formats(),
} 'system_info': en.system_info()
}
if response_type == 'full':
with concurrent.futures.ThreadPoolExecutor() as executor:
future_results = {
'supported_extensions': executor.submit(en.supported_extensions),
'supported_export_formats': executor.submit(en.get_output_formats),
'system_info': executor.submit(en.system_info)
} }
elif response_type == 'standard': # Simpler dataset to reduce response times
for key, future in future_results.items(): return {
result[engine_name][key] = future.result() en.name(): {
'is_available': RenderQueue.is_available_for_job(en.name()),
return result 'versions': installed_versions,
}
}
else:
raise AttributeError(f"Invalid response_type: {response_type}")
except Exception as e: except Exception as e:
logger.error(f"Error fetching details for engine '{engine.name()}': {e}") logger.error(f'Error fetching details for {engine.name()} renderer: {e}')
raise e return {}
engine_data = {} renderer_data = {}
with concurrent.futures.ThreadPoolExecutor() as executor: with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {executor.submit(process_engine, engine): engine.name() for engine in EngineManager.supported_engines()} futures = {executor.submit(process_engine, engine): engine.name() for engine in EngineManager.supported_engines()}
for future in concurrent.futures.as_completed(futures): for future in concurrent.futures.as_completed(futures):
result = future.result() result = future.result()
if result: if result:
engine_data.update(result) renderer_data.update(result)
return engine_data return renderer_data
@server.get('/api/<engine_name>/is_available') @server.get('/api/<engine_name>/is_available')
@@ -476,117 +479,53 @@ def delete_engine_download():
(f"Error deleting {json_data.get('engine')} {json_data.get('version')}", 500) (f"Error deleting {json_data.get('engine')} {json_data.get('version')}", 500)
@server.get('/api/engine/<engine_name>/args') @server.get('/api/renderer/<renderer>/args')
def get_engine_args(engine_name): def get_renderer_args(renderer):
try: try:
engine_class = EngineManager.engine_with_name(engine_name) renderer_engine_class = EngineManager.engine_with_name(renderer)
return engine_class().get_arguments() return renderer_engine_class().get_arguments()
except LookupError: except LookupError:
return f"Cannot find engine '{engine_name}'", 400 return f"Cannot find renderer '{renderer}'", 400
@server.get('/api/engine/<engine_name>/help') @server.get('/api/renderer/<renderer>/help')
def get_engine_help(engine_name): def get_renderer_help(renderer):
try: try:
engine_class = EngineManager.engine_with_name(engine_name) renderer_engine_class = EngineManager.engine_with_name(renderer)
return engine_class().get_help() return renderer_engine_class().get_help()
except LookupError: except LookupError:
return f"Cannot find engine '{engine_name}'", 400 return f"Cannot find renderer '{renderer}'", 400
# --------------------------------------------
# Miscellaneous:
# --------------------------------------------
@server.get('/api/heartbeat')
def heartbeat():
return datetime.now().isoformat(), 200
@server.post('/api/job/<job_id>/send_subjob_update_notification')
def subjob_update_notification(job_id):
subjob_details = request.json
DistributedJobManager.handle_subjob_update_notification(RenderQueue.job_with_id(job_id), subjob_data=subjob_details)
return Response(status=200)
@server.route('/api/job/<job_id>/thumbnail')
def job_thumbnail(job_id):
try:
big_thumb = request.args.get('size', False) == "big"
video_ok = request.args.get('video_ok', False)
found_job = RenderQueue.job_with_id(job_id, none_ok=False)
# trigger a thumbnail update - just in case
PreviewManager.update_previews_for_job(found_job, wait_until_completion=True, timeout=60)
previews = PreviewManager.get_previews_for_job(found_job)
all_previews_list = previews.get('output', previews.get('input', []))
video_previews = [x for x in all_previews_list if x['kind'] == 'video']
image_previews = [x for x in all_previews_list if x['kind'] == 'image']
filtered_list = video_previews if video_previews and video_ok else image_previews
# todo - sort by size or other metrics here
if filtered_list:
preview_to_send = filtered_list[0]
mime_types = {'image': 'image/jpeg', 'video': 'video/mp4'}
file_mime_type = mime_types.get(preview_to_send['kind'], 'unknown')
return send_file(preview_to_send['filename'], mimetype=file_mime_type)
except Exception as e:
logger.error(f'Error getting thumbnail: {e}')
return f'Error getting thumbnail: {e}', 500
return "No thumbnail available", 404
# --------------------------------------------
# System Benchmarks:
# --------------------------------------------
@server.get('/api/cpu_benchmark') @server.get('/api/cpu_benchmark')
def get_cpu_benchmark_score(): def get_cpu_benchmark_score():
from src.utilities.benchmark import cpu_benchmark
return str(cpu_benchmark(10)) return str(cpu_benchmark(10))
@server.get('/api/disk_benchmark') @server.get('/api/disk_benchmark')
def get_disk_benchmark(): def get_disk_benchmark():
from src.utilities.benchmark import disk_io_benchmark
results = disk_io_benchmark() results = disk_io_benchmark()
return {'write_speed': results[0], 'read_speed': results[-1]} return {'write_speed': results[0], 'read_speed': results[-1]}
# -------------------------------------------- def start_server(hostname=None):
# Error Handlers:
# --------------------------------------------
@server.errorhandler(JobNotFoundError) # get hostname
def handle_job_not_found(job_error): if not hostname:
return str(job_error), 400 local_hostname = socket.gethostname()
hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
# load flask settings
server.config['HOSTNAME'] = hostname
server.config['PORT'] = int(Config.port_number)
server.config['UPLOAD_FOLDER'] = system_safe_path(os.path.expanduser(Config.upload_folder))
server.config['MAX_CONTENT_PATH'] = Config.max_content_path
server.config['enable_split_jobs'] = Config.enable_split_jobs
@server.errorhandler(DetachedInstanceError) # disable most Flask logging
def handle_detached_instance(_): flask_log = logging.getLogger('werkzeug')
return "Unavailable", 503 flask_log.setLevel(Config.flask_log_level.upper())
logger.debug('Starting API server')
@server.errorhandler(Exception) server.run(host='0.0.0.0', port=server.config['PORT'], debug=Config.flask_debug_enable, use_reloader=False,
def handle_general_error(general_error): threaded=True)
err_msg = f"Server error: {general_error}"
logger.error(err_msg)
return err_msg, 500
# --------------------------------------------
# Debug / Development Only:
# --------------------------------------------
@server.get('/api/_debug/detected_clients')
def detected_clients():
# todo: dev/debug only. Should not ship this - probably.
from src.utilities.zeroconf_server import ZeroconfServer
return ZeroconfServer.found_hostnames()
@server.get('/api/_debug/clear_history')
def clear_history():
RenderQueue.clear_history()
return 'success'
-145
View File
@@ -1,145 +0,0 @@
#!/usr/bin/env python3
import logging
import os
import shutil
import tempfile
import zipfile
from datetime import datetime
import requests
from tqdm import tqdm
from werkzeug.utils import secure_filename
logger = logging.getLogger()
class JobImportHandler:
@classmethod
def validate_job_data(cls, new_job_data, upload_directory, uploaded_file=None):
loaded_project_local_path = None
# check for required keys
job_name = new_job_data.get('name')
engine_name = new_job_data.get('engine_name')
if not job_name:
raise KeyError("Missing job name")
elif not engine_name:
raise KeyError("Missing engine name")
project_url = new_job_data.get('url', None)
local_path = new_job_data.get('local_path', None)
downloaded_file_url = None
if uploaded_file and uploaded_file.filename:
referred_name = os.path.basename(uploaded_file.filename)
elif project_url:
referred_name, downloaded_file_url = cls.download_project_from_url(project_url)
if not referred_name:
raise FileNotFoundError(f"Error downloading file from URL: {project_url}")
elif local_path and os.path.exists(local_path):
referred_name = os.path.basename(local_path)
else:
raise FileNotFoundError("Cannot find any valid project paths")
# Prepare the local filepath
cleaned_path_name = os.path.splitext(referred_name)[0].replace(' ', '-')
job_dir = os.path.join(upload_directory, '-'.join(
[datetime.now().strftime("%Y.%m.%d_%H.%M.%S"), engine_name, cleaned_path_name]))
os.makedirs(job_dir, exist_ok=True)
project_source_dir = os.path.join(job_dir, 'source')
os.makedirs(project_source_dir, exist_ok=True)
# Move projects to their work directories
if uploaded_file and uploaded_file.filename:
loaded_project_local_path = os.path.join(project_source_dir, secure_filename(uploaded_file.filename))
uploaded_file.save(loaded_project_local_path)
logger.info(f"Transfer complete for {loaded_project_local_path.split(upload_directory)[-1]}")
elif project_url:
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
shutil.move(downloaded_file_url, loaded_project_local_path)
logger.info(f"Download complete for {loaded_project_local_path.split(upload_directory)[-1]}")
elif local_path:
loaded_project_local_path = os.path.join(project_source_dir, referred_name)
shutil.copy(local_path, loaded_project_local_path)
logger.info(f"Import complete for {loaded_project_local_path.split(upload_directory)[-1]}")
if loaded_project_local_path.lower().endswith('.zip'):
loaded_project_local_path = cls.process_zipped_project(loaded_project_local_path)
new_job_data["__loaded_project_local_path"] = loaded_project_local_path
return new_job_data
@staticmethod
def download_project_from_url(project_url):
# This nested function is to handle downloading from a URL
logger.info(f"Downloading project from url: {project_url}")
referred_name = os.path.basename(project_url)
try:
response = requests.get(project_url, stream=True)
if response.status_code == 200:
# Get the total file size from the "Content-Length" header
file_size = int(response.headers.get("Content-Length", 0))
# Create a progress bar using tqdm
progress_bar = tqdm(total=file_size, unit="B", unit_scale=True)
# Open a file for writing in binary mode
downloaded_file_url = os.path.join(tempfile.gettempdir(), referred_name)
with open(downloaded_file_url, "wb") as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
# Write the chunk to the file
file.write(chunk)
# Update the progress bar
progress_bar.update(len(chunk))
# Close the progress bar
progress_bar.close()
return referred_name, downloaded_file_url
except Exception as e:
logger.error(f"Error downloading file: {e}")
return None, None
@staticmethod
def process_zipped_project(zip_path):
"""
Processes a zipped project.
This method takes a path to a zip file, extracts its contents, and returns the path to the extracted project file.
If the zip file contains more than one project file or none, an error is raised.
Args:
zip_path (str): The path to the zip file.
Raises:
ValueError: If there's more than 1 project file or none in the zip file.
Returns:
str: The path to the main project file.
"""
work_path = os.path.dirname(zip_path)
try:
with zipfile.ZipFile(zip_path, 'r') as myzip:
myzip.extractall(work_path)
project_files = [x for x in os.listdir(work_path) if os.path.isfile(os.path.join(work_path, x))]
project_files = [x for x in project_files if '.zip' not in x]
logger.debug(f"Zip files: {project_files}")
# supported_exts = RenderWorkerFactory.class_for_name(engine).engine.supported_extensions
# if supported_exts:
# project_files = [file for file in project_files if any(file.endswith(ext) for ext in supported_exts)]
# If there's more than 1 project file or none, raise an error
if len(project_files) != 1:
raise ValueError(f'Cannot find a valid project file in {os.path.basename(zip_path)}')
extracted_project_path = os.path.join(work_path, project_files[0])
logger.info(f"Extracted zip file to {extracted_project_path}")
except (zipfile.BadZipFile, zipfile.LargeZipFile) as e:
logger.error(f"Error processing zip file: {e}")
raise ValueError(f"Error processing zip file: {e}")
return extracted_project_path
+3 -3
View File
@@ -7,8 +7,8 @@ from pathlib import Path
from src.utilities.ffmpeg_helper import generate_thumbnail, save_first_frame from src.utilities.ffmpeg_helper import generate_thumbnail, save_first_frame
logger = logging.getLogger() logger = logging.getLogger()
supported_video_formats = ['.mp4', '.mov', '.avi', '.mpg', '.mpeg', '.mxf', '.m4v', '.mkv', '.webm'] supported_video_formats = ['.mp4', '.mov', '.avi', '.mpg', '.mpeg', '.mxf', '.m4v', 'mkv']
supported_image_formats = ['.jpg', '.png', '.exr', '.tif', '.tga', '.bmp', '.webp'] supported_image_formats = ['.jpg', '.png', '.exr', '.tif']
class PreviewManager: class PreviewManager:
@@ -17,7 +17,7 @@ class PreviewManager:
_running_jobs = {} _running_jobs = {}
@classmethod @classmethod
def __generate_job_preview_worker(cls, job, replace_existing=False, max_width=480): def __generate_job_preview_worker(cls, job, replace_existing=False, max_width=320):
# Determine best source file to use for thumbs # Determine best source file to use for thumbs
job_file_list = job.file_list() job_file_list = job.file_list()
+116 -134
View File
@@ -10,6 +10,7 @@ from urllib.parse import urljoin
from src.utilities.misc_helper import is_localhost from src.utilities.misc_helper import is_localhost
from src.utilities.status_utils import RenderStatus from src.utilities.status_utils import RenderStatus
from src.utilities.zeroconf_server import ZeroconfServer
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green', status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple', RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
@@ -21,11 +22,19 @@ categories = [RenderStatus.RUNNING, RenderStatus.WAITING_FOR_SUBJOBS, RenderStat
logger = logging.getLogger() logger = logging.getLogger()
OFFLINE_MAX = 4 OFFLINE_MAX = 4
LOOPBACK = '127.0.0.1'
class RenderServerProxy: class RenderServerProxy:
"""The ServerProxy class is responsible for interacting with a remote server. """
It provides convenience methods to request data from the server and store the status of the server. The ServerProxy class is responsible for interacting with a remote server.
It provides methods to request data from the server and store the status of the server.
Attributes:
system_cpu (str): The CPU type of the system.
system_cpu_count (int): The number of CPUs in the system.
system_os (str): The operating system of the system.
system_os_version (str): The version of the operating system.
""" """
def __init__(self, hostname, server_port="8080"): def __init__(self, hostname, server_port="8080"):
@@ -45,27 +54,18 @@ class RenderServerProxy:
self.system_cpu_count = None self.system_cpu_count = None
self.system_os = None self.system_os = None
self.system_os_version = None self.system_os_version = None
self.system_api_version = None
# --------------------------------------------
# Basics / Connection:
# --------------------------------------------
def __repr__(self): def __repr__(self):
return f"<RenderServerProxy - {self.hostname}>" return f"<RenderServerProxy - {self.hostname}>"
def check_connection(self): def connect(self):
try: return self.status()
return self.request("heartbeat").ok
except Exception:
pass
return False
def is_online(self): def is_online(self):
if self.__update_in_background: if self.__update_in_background:
return self.__offline_flags < OFFLINE_MAX return self.__offline_flags < OFFLINE_MAX
else: else:
return self.check_connection() return self.get_status() is not None
def status(self): def status(self):
if not self.is_online(): if not self.is_online():
@@ -73,10 +73,6 @@ class RenderServerProxy:
running_jobs = [x for x in self.__jobs_cache if x['status'] == 'running'] if self.__jobs_cache else [] running_jobs = [x for x in self.__jobs_cache if x['status'] == 'running'] if self.__jobs_cache else []
return f"{len(running_jobs)} running" if running_jobs else "Ready" return f"{len(running_jobs)} running" if running_jobs else "Ready"
# --------------------------------------------
# Requests:
# --------------------------------------------
def request_data(self, payload, timeout=5): def request_data(self, payload, timeout=5):
try: try:
req = self.request(payload, timeout) req = self.request(payload, timeout)
@@ -104,13 +100,8 @@ class RenderServerProxy:
return None return None
def request(self, payload, timeout=5): def request(self, payload, timeout=5):
from src.api.api_server import API_VERSION hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.get(f'http://{self.hostname}:{self.port}/api/{payload}', timeout=timeout, return requests.get(f'http://{hostname}:{self.port}/api/{payload}', timeout=timeout)
headers={"X-API-Version": str(API_VERSION)})
# --------------------------------------------
# Background Updates:
# --------------------------------------------
def start_background_update(self): def start_background_update(self):
if self.__update_in_background: if self.__update_in_background:
@@ -128,6 +119,17 @@ class RenderServerProxy:
self.__background_thread.daemon = True self.__background_thread.daemon = True
self.__background_thread.start() self.__background_thread.start()
def stop_background_update(self):
self.__update_in_background = False
def get_job_info(self, job_id, timeout=5):
return self.request_data(f'job/{job_id}', timeout=timeout)
def get_all_jobs(self, timeout=5, ignore_token=False):
if not self.__update_in_background or ignore_token:
self.__update_job_cache(timeout, ignore_token)
return self.__jobs_cache.copy() if self.__jobs_cache else None
def __update_job_cache(self, timeout=40, ignore_token=False): def __update_job_cache(self, timeout=40, ignore_token=False):
if self.__offline_flags: # if we're offline, don't bother with the long poll if self.__offline_flags: # if we're offline, don't bother with the long poll
@@ -145,21 +147,15 @@ class RenderServerProxy:
self.__jobs_cache = sorted_jobs self.__jobs_cache = sorted_jobs
self.__jobs_cache_token = status_result['token'] self.__jobs_cache_token = status_result['token']
def stop_background_update(self):
self.__update_in_background = False
# --------------------------------------------
# Get System Info:
# --------------------------------------------
def get_all_jobs(self, timeout=5, ignore_token=False):
if not self.__update_in_background or ignore_token:
self.__update_job_cache(timeout, ignore_token)
return self.__jobs_cache.copy() if self.__jobs_cache else None
def get_data(self, timeout=5): def get_data(self, timeout=5):
return self.request_data('full_status', timeout=timeout) return self.request_data('full_status', timeout=timeout)
def cancel_job(self, job_id, confirm=False):
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
def delete_job(self, job_id, confirm=False):
return self.request_data(f'job/{job_id}/delete?confirm={confirm}')
def get_status(self): def get_status(self):
status = self.request_data('status') status = self.request_data('status')
if status and not self.system_cpu: if status and not self.system_cpu:
@@ -167,67 +163,13 @@ class RenderServerProxy:
self.system_cpu_count = status['cpu_count'] self.system_cpu_count = status['cpu_count']
self.system_os = status['system_os'] self.system_os = status['system_os']
self.system_os_version = status['system_os_version'] self.system_os_version = status['system_os_version']
self.system_api_version = status['api_version']
return status return status
# -------------------------------------------- def is_engine_available(self, engine_name):
# Get Job Info: return self.request_data(f'{engine_name}/is_available')
# --------------------------------------------
def get_job_info(self, job_id, timeout=5): def get_all_engines(self):
return self.request_data(f'job/{job_id}', timeout=timeout) return self.request_data('all_engines')
def get_job_files_list(self, job_id):
return self.request_data(f"job/{job_id}/file_list")
# --------------------------------------------
# Job Lifecycle:
# --------------------------------------------
def post_job_to_server(self, file_path, job_data, callback=None):
"""
Posts a job to the server.
Args:
file_path (str): The path to the file to upload.
job_data (dict): A dict of jobs data.
callback (function, optional): A callback function to call during the upload. Defaults to None.
Returns:
Response: The response from the server.
"""
# Check if file exists
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")
# Bypass uploading file if posting to localhost
if self.is_localhost:
job_data['local_path'] = file_path
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job')
headers = {'Content-Type': 'application/json'}
return requests.post(url, data=json.dumps(job_data), headers=headers)
# Prepare the form data for remote host
with open(file_path, 'rb') as file:
encoder = MultipartEncoder({
'file': (os.path.basename(file_path), file, 'application/octet-stream'),
'json': (None, json.dumps(job_data), 'application/json'),
})
# Create a monitor that will track the upload progress
monitor = MultipartEncoderMonitor(encoder, callback) if callback else MultipartEncoderMonitor(encoder)
headers = {'Content-Type': monitor.content_type}
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job')
# Send the request with proper resource management
with requests.post(url, data=monitor, headers=headers) as response:
return response
def cancel_job(self, job_id, confirm=False):
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
def delete_job(self, job_id, confirm=False):
return self.request_data(f'job/{job_id}/delete?confirm={confirm}')
def send_subjob_update_notification(self, parent_id, subjob): def send_subjob_update_notification(self, parent_id, subjob):
""" """
@@ -240,32 +182,92 @@ class RenderServerProxy:
Returns: Returns:
Response: The response from the server. Response: The response from the server.
""" """
return requests.post(f'http://{self.hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification', hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.post(f'http://{hostname}:{self.port}/api/job/{parent_id}/send_subjob_update_notification',
json=subjob.json()) json=subjob.json())
# -------------------------------------------- def post_job_to_server(self, file_path, job_list, callback=None):
# Engines:
# --------------------------------------------
def is_engine_available(self, engine_name):
return self.request_data(f'{engine_name}/is_available')
def get_all_engines(self):
# todo: this doesnt work
return self.request_data('all_engines')
def get_engine_info(self, response_type='standard', timeout=5):
""" """
Fetches engine information from the server. Posts a job to the server.
Args: Args:
response_type (str, optional): Returns standard or full version of engine info file_path (str): The path to the file to upload.
job_list (list): A list of jobs to post.
callback (function, optional): A callback function to call during the upload. Defaults to None.
Returns:
Response: The response from the server.
"""
try:
# Check if file exists
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")
# Bypass uploading file if posting to localhost
if self.is_localhost:
jobs_with_path = [{'local_path': file_path, **item} for item in job_list]
job_data = json.dumps(jobs_with_path)
url = urljoin(f'http://{LOOPBACK}:{self.port}', '/api/add_job')
headers = {'Content-Type': 'application/json'}
return requests.post(url, data=job_data, headers=headers)
# Prepare the form data for remote host
with open(file_path, 'rb') as file:
encoder = MultipartEncoder({
'file': (os.path.basename(file_path), file, 'application/octet-stream'),
'json': (None, json.dumps(job_list), 'application/json'),
})
# Create a monitor that will track the upload progress
monitor = MultipartEncoderMonitor(encoder, callback) if callback else MultipartEncoderMonitor(encoder)
headers = {'Content-Type': monitor.content_type}
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job')
# Send the request with proper resource management
with requests.post(url, data=monitor, headers=headers) as response:
return response
except requests.ConnectionError as e:
logger.error(f"Connection error: {e}")
except Exception as e:
logger.error(f"An error occurred: {e}")
def get_job_files_list(self, job_id):
return self.request_data(f"job/{job_id}/file_list")
def download_all_job_files(self, job_id, save_path):
hostname = LOOPBACK if self.is_localhost else self.hostname
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download_all"
return self.__download_file_from_url(url, output_filepath=save_path)
def download_job_file(self, job_id, job_filename, save_path):
hostname = LOOPBACK if self.is_localhost else self.hostname
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
return self.__download_file_from_url(url, output_filepath=save_path)
@staticmethod
def __download_file_from_url(url, output_filepath):
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(output_filepath, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
return output_filepath
# --- Renderer --- #
def get_renderer_info(self, response_type='standard', timeout=5):
"""
Fetches renderer information from the server.
Args:
response_type (str, optional): Returns standard or full version of renderer info
timeout (int, optional): The number of seconds to wait for a response from the server. Defaults to 5. timeout (int, optional): The number of seconds to wait for a response from the server. Defaults to 5.
Returns: Returns:
dict: A dictionary containing the engine information. dict: A dictionary containing the renderer information.
""" """
all_data = self.request_data(f"engine_info?response_type={response_type}", timeout=timeout) all_data = self.request_data(f"renderer_info?response_type={response_type}", timeout=timeout)
return all_data return all_data
def delete_engine(self, engine, version, system_cpu=None): def delete_engine(self, engine, version, system_cpu=None):
@@ -281,25 +283,5 @@ class RenderServerProxy:
Response: The response from the server. Response: The response from the server.
""" """
form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu} form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu}
return requests.post(f'http://{self.hostname}:{self.port}/api/delete_engine', json=form_data) hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.post(f'http://{hostname}:{self.port}/api/delete_engine', json=form_data)
# --------------------------------------------
# Download Files:
# --------------------------------------------
def download_all_job_files(self, job_id, save_path):
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"
return self.__download_file_from_url(url, output_filepath=save_path)
def download_job_file(self, job_id, job_filename, save_path):
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download?filename={job_filename}"
return self.__download_file_from_url(url, output_filepath=save_path)
@staticmethod
def __download_file_from_url(url, output_filepath):
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(output_filepath, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
return output_filepath
+292 -119
View File
@@ -3,7 +3,10 @@ import os
import socket import socket
import threading import threading
import time import time
import zipfile
from concurrent.futures import ThreadPoolExecutor
import requests
from plyer import notification from plyer import notification
from pubsub import pub from pubsub import pub
@@ -12,7 +15,7 @@ from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue from src.render_queue import RenderQueue
from src.utilities.config import Config from src.utilities.config import Config
from src.utilities.server_helper import download_missing_frames_from_subjob, distribute_server_work from src.utilities.misc_helper import get_file_size_human
from src.utilities.status_utils import RenderStatus, string_to_status from src.utilities.status_utils import RenderStatus, string_to_status
from src.utilities.zeroconf_server import ZeroconfServer from src.utilities.zeroconf_server import ZeroconfServer
@@ -39,10 +42,10 @@ class DistributedJobManager:
""" """
Responds to the 'frame_complete' pubsub message for local jobs. Responds to the 'frame_complete' pubsub message for local jobs.
Args: Parameters:
job_id (str): The ID of the job that has changed status. job_id (str): The ID of the job that has changed status.
old_status (str): The previous status of the job. old_status (str): The previous status of the job.
new_status (str): The new (current) status of the job. new_status (str): The new (current) status of the job.
Note: Do not call directly. Instead, call via the 'frame_complete' pubsub message. Note: Do not call directly. Instead, call via the 'frame_complete' pubsub message.
""" """
@@ -75,10 +78,10 @@ class DistributedJobManager:
Responds to the 'status_change' pubsub message for local jobs. Responds to the 'status_change' pubsub message for local jobs.
If it's a child job, it notifies the parent job about the status change. If it's a child job, it notifies the parent job about the status change.
Args: Parameters:
job_id (str): The ID of the job that has changed status. job_id (str): The ID of the job that has changed status.
old_status (str): The previous status of the job. old_status (str): The previous status of the job.
new_status (str): The new (current) status of the job. new_status (str): The new (current) status of the job.
Note: Do not call directly. Instead, call via the 'status_change' pubsub message. Note: Do not call directly. Instead, call via the 'status_change' pubsub message.
""" """
@@ -129,12 +132,14 @@ class DistributedJobManager:
# -------------------------------------------- # --------------------------------------------
@classmethod @classmethod
def create_render_job(cls, new_job_attributes, loaded_project_local_path): def create_render_job(cls, job_data, loaded_project_local_path):
"""Creates render jobs. Pass in dict of job_data and the local path to the project. It creates and returns a new """
render job. Creates render jobs.
This method job data and a local path to a loaded project. It creates and returns new a render job.
Args: Args:
new_job_attributes (dict): Dict of desired attributes for new job (frame count, renderer, output path, etc) job_data (dict): Job data.
loaded_project_local_path (str): The local path to the loaded project. loaded_project_local_path (str): The local path to the loaded project.
Returns: Returns:
@@ -142,7 +147,7 @@ class DistributedJobManager:
""" """
# get new output path in output_dir # get new output path in output_dir
output_path = new_job_attributes.get('output_path') output_path = job_data.get('output_path')
if not output_path: if not output_path:
loaded_project_filename = os.path.basename(loaded_project_local_path) loaded_project_filename = os.path.basename(loaded_project_local_path)
output_filename = os.path.splitext(loaded_project_filename)[0] output_filename = os.path.splitext(loaded_project_filename)[0]
@@ -156,27 +161,27 @@ class DistributedJobManager:
logger.debug(f"New job output path: {output_path}") logger.debug(f"New job output path: {output_path}")
# create & configure jobs # create & configure jobs
worker = EngineManager.create_worker(engine_name=new_job_attributes['engine_name'], worker = EngineManager.create_worker(renderer=job_data['renderer'],
input_path=loaded_project_local_path, input_path=loaded_project_local_path,
output_path=output_path, output_path=output_path,
engine_version=new_job_attributes.get('engine_version'), engine_version=job_data.get('engine_version'),
args=new_job_attributes.get('args', {}), args=job_data.get('args', {}),
parent=new_job_attributes.get('parent'), parent=job_data.get('parent'),
name=new_job_attributes.get('name')) name=job_data.get('name'))
worker.status = new_job_attributes.get("initial_status", worker.status) # todo: is this necessary? worker.status = job_data.get("initial_status", worker.status) # todo: is this necessary?
worker.priority = int(new_job_attributes.get('priority', worker.priority)) worker.priority = int(job_data.get('priority', worker.priority))
worker.start_frame = int(new_job_attributes.get("start_frame", worker.start_frame)) worker.start_frame = int(job_data.get("start_frame", worker.start_frame))
worker.end_frame = int(new_job_attributes.get("end_frame", worker.end_frame)) worker.end_frame = int(job_data.get("end_frame", worker.end_frame))
worker.watchdog_timeout = Config.worker_process_timeout worker.watchdog_timeout = Config.worker_process_timeout
worker.hostname = socket.gethostname() worker.hostname = socket.gethostname()
# determine if we can / should split the job # determine if we can / should split the job
if new_job_attributes.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent: if job_data.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent:
cls.split_into_subjobs_async(worker, new_job_attributes, loaded_project_local_path) cls.split_into_subjobs_async(worker, job_data, loaded_project_local_path)
else: else:
worker.status = RenderStatus.NOT_STARTED logger.debug("Not splitting into subjobs")
RenderQueue.add_to_render_queue(worker, force_start=new_job_attributes.get('force_start', False)) RenderQueue.add_to_render_queue(worker, force_start=job_data.get('force_start', False))
PreviewManager.update_previews_for_job(worker) PreviewManager.update_previews_for_job(worker)
return worker return worker
@@ -187,7 +192,8 @@ class DistributedJobManager:
@classmethod @classmethod
def handle_subjob_update_notification(cls, local_job, subjob_data): def handle_subjob_update_notification(cls, local_job, subjob_data):
"""Responds to a notification from a remote subjob and the host requests any subsequent updates from the subjob. """
Responds to a notification from a remote subjob and the host requests any subsequent updates from the subjob.
Args: Args:
local_job (BaseRenderWorker): The local parent job worker. local_job (BaseRenderWorker): The local parent job worker.
@@ -205,95 +211,145 @@ class DistributedJobManager:
if old_status != subjob_status.value: if old_status != subjob_status.value:
logger.debug(f"Subjob status changed: {logname} -> {subjob_status.value}") logger.debug(f"Subjob status changed: {logname} -> {subjob_status.value}")
download_success = download_missing_frames_from_subjob(local_job, subjob_id, subjob_hostname) cls.download_missing_frames_from_subjob(local_job, subjob_id, subjob_hostname)
if subjob_data['status'] == 'completed' and download_success:
local_job.children[subjob_key]['download_status'] = 'completed'
@classmethod @staticmethod
def wait_for_subjobs(cls, parent_job): def download_missing_frames_from_subjob(local_job, subjob_id, subjob_hostname):
"""Check the status of subjobs and waits until they are all finished. Download rendered frames from subjobs
when they are completed.
Args: try:
parent_job: Worker object that has child jobs local_files = [os.path.basename(x) for x in local_job.file_list()]
subjob_proxy = RenderServerProxy(subjob_hostname)
subjob_files = subjob_proxy.get_job_files_list(job_id=subjob_id) or []
for subjob_filename in subjob_files:
if subjob_filename not in local_files:
try:
logger.debug(f"Downloading new file '{subjob_filename}' from {subjob_hostname}")
local_save_path = os.path.join(os.path.dirname(local_job.output_path), subjob_filename)
subjob_proxy.download_job_file(job_id=subjob_id, job_filename=subjob_filename,
save_path=local_save_path)
logger.debug(f'Downloaded successfully - {local_save_path}')
except Exception as e:
logger.error(f"Error downloading file '{subjob_filename}' from {subjob_hostname}: {e}")
except Exception as e:
logger.exception(f'Uncaught exception while trying to download from subjob: {e}')
@staticmethod
def download_all_from_subjob(local_job, subjob_id, subjob_hostname):
"""
Downloads and extracts files from a completed subjob on a remote server.
Parameters:
local_job (BaseRenderWorker): The local parent job worker.
subjob_id (str or int): The ID of the subjob.
subjob_hostname (str): The hostname of the remote server where the subjob is located.
Returns: Returns:
bool: True if the files have been downloaded and extracted successfully, False otherwise.
""" """
logger.debug(f"Waiting for subjobs for job {parent_job}")
parent_job.status = RenderStatus.WAITING_FOR_SUBJOBS child_key = f'{subjob_id}@{subjob_hostname}'
logname = f"{local_job.id}:{child_key}"
zip_file_path = local_job.output_path + f'_{subjob_hostname}_{subjob_id}.zip'
# download zip file from server
try:
local_job.children[child_key]['download_status'] = 'working'
logger.info(f"Downloading completed subjob files from {subjob_hostname} to localhost")
RenderServerProxy(subjob_hostname).download_all_job_files(subjob_id, zip_file_path)
logger.info(f"File transfer complete for {logname} - Transferred {get_file_size_human(zip_file_path)}")
except Exception as e:
logger.error(f"Error downloading files from remote server: {e}")
local_job.children[child_key]['download_status'] = 'failed'
return False
# extract zip
try:
logger.debug(f"Extracting zip file: {zip_file_path}")
extract_path = os.path.dirname(zip_file_path)
with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
zip_ref.extractall(extract_path)
logger.info(f"Successfully extracted zip to: {extract_path}")
os.remove(zip_file_path)
local_job.children[child_key]['download_status'] = 'complete'
except Exception as e:
logger.exception(f"Exception extracting zip file: {e}")
local_job.children[child_key]['download_status'] = 'failed'
return local_job.children[child_key].get('download_status', None) == 'complete'
@classmethod
def wait_for_subjobs(cls, local_job):
# todo: rewrite this method
logger.debug(f"Waiting for subjobs for job {local_job}")
local_job.status = RenderStatus.WAITING_FOR_SUBJOBS
statuses_to_download = [RenderStatus.CANCELLED, RenderStatus.ERROR, RenderStatus.COMPLETED] statuses_to_download = [RenderStatus.CANCELLED, RenderStatus.ERROR, RenderStatus.COMPLETED]
def subjobs_not_downloaded(): def subjobs_not_downloaded():
return {k: v for k, v in parent_job.children.items() if 'download_status' not in v or return {k: v for k, v in local_job.children.items() if 'download_status' not in v or
v['download_status'] == 'working' or v['download_status'] is None} v['download_status'] == 'working' or v['download_status'] is None}
logger.info(f'Waiting on {len(subjobs_not_downloaded())} subjobs for {parent_job.id}') logger.info(f'Waiting on {len(subjobs_not_downloaded())} subjobs for {local_job.id}')
server_delay = 10 while len(subjobs_not_downloaded()):
sleep_counter = 0 for child_key, subjob_cached_data in subjobs_not_downloaded().items():
while parent_job.status == RenderStatus.WAITING_FOR_SUBJOBS:
if sleep_counter % server_delay == 0: # only ping servers every x seconds subjob_id = child_key.split('@')[0]
for child_key, subjob_cached_data in subjobs_not_downloaded().items(): subjob_hostname = child_key.split('@')[-1]
subjob_id = child_key.split('@')[0] # Fetch info from server and handle failing case
subjob_hostname = child_key.split('@')[-1] subjob_data = RenderServerProxy(subjob_hostname).get_job_info(subjob_id)
if not subjob_data:
logger.warning(f"No response from: {subjob_hostname}")
# todo: handle timeout / missing server situations
continue
# Fetch info from server and handle failing case # Update parent job cache but keep the download status
subjob_data = RenderServerProxy(subjob_hostname).get_job_info(subjob_id) download_status = local_job.children[child_key].get('download_status', None)
if not subjob_data: local_job.children[child_key] = subjob_data
logger.warning(f"No response from {subjob_hostname}") local_job.children[child_key]['download_status'] = download_status
# timeout / missing server situations
parent_job.children[child_key]['download_status'] = f'error: No response from {subjob_hostname}'
continue
# Update parent job cache but keep the download status status = string_to_status(subjob_data.get('status', ''))
download_status = parent_job.children[child_key].get('download_status', None) status_msg = f"Subjob {child_key} | {status} | " \
parent_job.children[child_key] = subjob_data f"{float(subjob_data.get('percent_complete')) * 100.0}%"
parent_job.children[child_key]['download_status'] = download_status logger.debug(status_msg)
status = string_to_status(subjob_data.get('status', '')) # Still working in another thread - keep waiting
status_msg = f"Subjob {child_key} | {status} | " \ if download_status == 'working':
f"{float(subjob_data.get('percent_complete')) * 100.0}%" continue
logger.debug(status_msg)
# Check if job is finished, but has not had files copied yet over yet # Check if job is finished, but has not had files copied yet over yet
if download_status is None and subjob_data['file_count'] and status in statuses_to_download: if download_status is None and subjob_data['file_count'] and status in statuses_to_download:
try: try:
download_missing_frames_from_subjob(parent_job, subjob_id, subjob_hostname) cls.download_missing_frames_from_subjob(local_job, subjob_id, subjob_hostname)
parent_job.children[child_key]['download_status'] = 'complete' except Exception as e:
except Exception as e: logger.error(f"Error downloading missing frames from subjob: {e}")
logger.error(f"Error downloading missing frames from subjob: {e}")
parent_job.children[child_key]['download_status'] = 'error: {}'
# Any finished jobs not successfully downloaded at this point are skipped # Any finished jobs not successfully downloaded at this point are skipped
if parent_job.children[child_key].get('download_status', None) is None and \ if local_job.children[child_key].get('download_status', None) is None and \
status in statuses_to_download: status in statuses_to_download:
logger.warning(f"Skipping waiting on downloading from subjob: {child_key}") logger.warning(f"Skipping waiting on downloading from subjob: {child_key}")
parent_job.children[child_key]['download_status'] = 'skipped' local_job.children[child_key]['download_status'] = 'skipped'
if subjobs_not_downloaded(): if subjobs_not_downloaded():
logger.debug(f"Waiting on {len(subjobs_not_downloaded())} subjobs on " logger.debug(f"Waiting on {len(subjobs_not_downloaded())} subjobs on "
f"{', '.join(list(subjobs_not_downloaded().keys()))}") f"{', '.join(list(subjobs_not_downloaded().keys()))}")
time.sleep(1) time.sleep(5)
sleep_counter += 1
else: # exit the loop
parent_job.status = RenderStatus.RUNNING
# -------------------------------------------- # --------------------------------------------
# Creating Subjobs # Creating Subjobs
# -------------------------------------------- # --------------------------------------------
@classmethod @classmethod
def split_into_subjobs_async(cls, parent_worker, new_job_attributes, project_path, system_os=None): def split_into_subjobs_async(cls, parent_worker, job_data, project_path, system_os=None):
# todo: I don't love this # todo: I don't love this
parent_worker.status = RenderStatus.CONFIGURING parent_worker.status = RenderStatus.CONFIGURING
cls.background_worker = threading.Thread(target=cls.split_into_subjobs, args=(parent_worker, new_job_attributes, cls.background_worker = threading.Thread(target=cls.split_into_subjobs, args=(parent_worker, job_data,
project_path, system_os)) project_path, system_os))
cls.background_worker.start() cls.background_worker.start()
@classmethod @classmethod
def split_into_subjobs(cls, parent_worker, new_job_attributes, project_path, system_os=None, specific_servers=None): def split_into_subjobs(cls, parent_worker, job_data, project_path, system_os=None, specific_servers=None):
""" """
Splits a job into subjobs and distributes them among available servers. Splits a job into subjobs and distributes them among available servers.
@@ -302,43 +358,43 @@ class DistributedJobManager:
subjob. subjob.
Args: Args:
parent_worker (Worker): The parent job what we're creating the subjobs for. parent_worker (Worker): The worker that is handling the job.
new_job_attributes (dict): Dict of desired attributes for new job (frame count, engine, output path, etc) job_data (dict): The data for the job to be split.
project_path (str): The path to the project. project_path (str): The path to the project associated with the job.
system_os (str, optional): Required OS. Default is any. system_os (str, optional): The operating system of the servers. Default is any OS.
specific_servers (list, optional): List of specific servers to split work between. Defaults to all found. specific_servers (list, optional): List of specific servers to split work between. Defaults to all found.
""" """
# Check availability # Check availability
available_servers = specific_servers if specific_servers else cls.find_available_servers(parent_worker.engine_name, parent_worker.status = RenderStatus.CONFIGURING
system_os) available_servers = specific_servers if specific_servers else cls.find_available_servers(parent_worker.renderer, system_os)
# skip if theres no external servers found logger.debug(f"Splitting into subjobs - Available servers: {available_servers}")
external_servers = [x for x in available_servers if x['hostname'] != parent_worker.hostname] all_subjob_server_data = cls.distribute_server_work(parent_worker.start_frame, parent_worker.end_frame, available_servers)
if not external_servers:
parent_worker.status = RenderStatus.NOT_STARTED
return
logger.debug(f"Splitting into subjobs - Available servers: {[x['hostname'] for x in available_servers]}")
all_subjob_server_data = distribute_server_work(parent_worker.start_frame, parent_worker.end_frame, available_servers)
# Prep and submit these sub-jobs # Prep and submit these sub-jobs
logger.info(f"Job {parent_worker.id} split plan: {all_subjob_server_data}") logger.info(f"Job {parent_worker.id} split plan: {all_subjob_server_data}")
try: try:
for subjob_data in all_subjob_server_data: for subjob_data in all_subjob_server_data:
subjob_hostname = subjob_data['hostname'] subjob_hostname = subjob_data['hostname']
post_results = cls.__create_subjob(new_job_attributes, project_path, subjob_data, subjob_hostname, if subjob_hostname != parent_worker.hostname:
parent_worker) post_results = cls.__create_subjob(job_data, project_path, subjob_data, subjob_hostname,
if not post_results.ok: parent_worker)
ValueError(f"Failed to create subjob on {subjob_hostname}") if not post_results.ok:
ValueError(f"Failed to create subjob on {subjob_hostname}")
# save child info # save child info
submission_results = post_results.json()[0] submission_results = post_results.json()[0]
child_key = f"{submission_results['id']}@{subjob_hostname}" child_key = f"{submission_results['id']}@{subjob_hostname}"
parent_worker.children[child_key] = submission_results parent_worker.children[child_key] = submission_results
else:
# truncate parent render_job
parent_worker.start_frame = max(subjob_data['frame_range'][0], parent_worker.start_frame)
parent_worker.end_frame = min(subjob_data['frame_range'][-1], parent_worker.end_frame)
logger.info(f"Local job now rendering from {parent_worker.start_frame} to {parent_worker.end_frame}")
# start subjobs # start subjobs
logger.debug(f"Created {len(all_subjob_server_data)} subjobs successfully") logger.debug(f"Created {len(all_subjob_server_data) - 1} subjobs successfully")
parent_worker.name = f"{parent_worker.name} (Parent)" parent_worker.name = f"{parent_worker.name}[{parent_worker.start_frame}-{parent_worker.end_frame}]"
parent_worker.status = RenderStatus.NOT_STARTED # todo: this won't work with scheduled starts parent_worker.status = RenderStatus.NOT_STARTED # todo: this won't work with scheduled starts
except Exception as e: except Exception as e:
# cancel all the subjobs # cancel all the subjobs
@@ -347,24 +403,143 @@ class DistributedJobManager:
RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True) RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True)
@staticmethod @staticmethod
def __create_subjob(new_job_attributes, project_path, server_data, server_hostname, parent_worker): def __create_subjob(job_data, project_path, server_data, server_hostname, parent_worker):
"""Convenience method to create subjobs for a parent worker""" subjob = job_data.copy()
subjob = new_job_attributes.copy()
subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]" subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
subjob['parent'] = f"{parent_worker.id}@{parent_worker.hostname}" subjob['parent'] = f"{parent_worker.id}@{parent_worker.hostname}"
subjob['start_frame'] = server_data['frame_range'][0] subjob['start_frame'] = server_data['frame_range'][0]
subjob['end_frame'] = server_data['frame_range'][-1] subjob['end_frame'] = server_data['frame_range'][-1]
subjob['engine_version'] = parent_worker.engine_version subjob['engine_version'] = parent_worker.renderer_version
logger.debug(f"Posting subjob with frames {subjob['start_frame']}-" logger.debug(f"Posting subjob with frames {subjob['start_frame']}-"
f"{subjob['end_frame']} to {server_hostname}") f"{subjob['end_frame']} to {server_hostname}")
post_results = RenderServerProxy(server_hostname).post_job_to_server( post_results = RenderServerProxy(server_hostname).post_job_to_server(
file_path=project_path, job_data=subjob) file_path=project_path, job_list=[subjob])
return post_results return post_results
# -------------------------------------------- # --------------------------------------------
# Server Handling # Server Handling
# -------------------------------------------- # --------------------------------------------
@staticmethod
def distribute_server_work(start_frame, end_frame, available_servers, method='cpu_benchmark'):
"""
Splits the frame range among available servers proportionally based on their performance (CPU count).
Args:
start_frame (int): The start frame number of the animation to be rendered.
end_frame (int): The end frame number of the animation to be rendered.
available_servers (list): A list of available server dictionaries. Each server dictionary should include
'hostname' and 'cpu_count' keys (see find_available_servers).
method (str, optional): Specifies the distribution method. Possible values are 'cpu_benchmark', 'cpu_count'
and 'evenly'.
Defaults to 'cpu_benchmark'.
Returns:
list: A list of server dictionaries where each dictionary includes the frame range and total number of
frames to be rendered by the server.
"""
# Calculate respective frames for each server
def divide_frames_by_cpu_count(frame_start, frame_end, servers):
total_frames = frame_end - frame_start + 1
total_cpus = sum(server['cpu_count'] for server in servers)
frame_ranges = {}
current_frame = frame_start
allocated_frames = 0
for i, server in enumerate(servers):
if i == len(servers) - 1: # if it's the last server
# Give all remaining frames to the last server
num_frames = total_frames - allocated_frames
else:
num_frames = round((server['cpu_count'] / total_cpus) * total_frames)
allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1
if current_frame <= frame_end_for_server:
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
current_frame = frame_end_for_server + 1
return frame_ranges
def divide_frames_by_benchmark(frame_start, frame_end, servers):
def fetch_benchmark(server):
try:
benchmark = requests.get(f'http://{server["hostname"]}:{ZeroconfServer.server_port}'
f'/api/cpu_benchmark').text
server['cpu_benchmark'] = benchmark
logger.debug(f'Benchmark for {server["hostname"]}: {benchmark}')
except requests.exceptions.RequestException as e:
logger.error(f'Error fetching benchmark for {server["hostname"]}: {e}')
# Number of threads to use (can adjust based on your needs or number of servers)
threads = len(servers)
with ThreadPoolExecutor(max_workers=threads) as executor:
executor.map(fetch_benchmark, servers)
total_frames = frame_end - frame_start + 1
total_performance = sum(int(server['cpu_benchmark']) for server in servers)
frame_ranges = {}
current_frame = frame_start
allocated_frames = 0
for i, server in enumerate(servers):
if i == len(servers) - 1: # if it's the last server
# Give all remaining frames to the last server
num_frames = total_frames - allocated_frames
else:
num_frames = round((int(server['cpu_benchmark']) / total_performance) * total_frames)
allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1
if current_frame <= frame_end_for_server:
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
current_frame = frame_end_for_server + 1
return frame_ranges
def divide_frames_equally(frame_start, frame_end, servers):
frame_range = frame_end - frame_start + 1
frames_per_server = frame_range // len(servers)
leftover_frames = frame_range % len(servers)
frame_ranges = {}
current_start = frame_start
for i, server in enumerate(servers):
current_end = current_start + frames_per_server - 1
if leftover_frames > 0:
current_end += 1
leftover_frames -= 1
if current_start <= current_end:
frame_ranges[server['hostname']] = (current_start, current_end)
current_start = current_end + 1
return frame_ranges
if len(available_servers) == 1:
breakdown = {available_servers[0]['hostname']: (start_frame, end_frame)}
else:
logger.debug(f'Splitting between {len(available_servers)} servers by {method} method')
if method == 'evenly':
breakdown = divide_frames_equally(start_frame, end_frame, available_servers)
elif method == 'cpu_benchmark':
breakdown = divide_frames_by_benchmark(start_frame, end_frame, available_servers)
elif method == 'cpu_count':
breakdown = divide_frames_by_cpu_count(start_frame, end_frame, available_servers)
else:
raise ValueError(f"Invalid distribution method: {method}")
server_breakdown = [server for server in available_servers if breakdown.get(server['hostname']) is not None]
for server in server_breakdown:
server['frame_range'] = breakdown[server['hostname']]
server['total_frames'] = breakdown[server['hostname']][-1] - breakdown[server['hostname']][0] + 1
return server_breakdown
@staticmethod @staticmethod
def find_available_servers(engine_name, system_os=None): def find_available_servers(engine_name, system_os=None):
""" """
@@ -374,15 +549,13 @@ class DistributedJobManager:
:param system_os: str, Restrict results to servers running a specific OS :param system_os: str, Restrict results to servers running a specific OS
:return: A list of dictionaries with each dict containing hostname and cpu_count of available servers :return: A list of dictionaries with each dict containing hostname and cpu_count of available servers
""" """
from api.api_server import API_VERSION
available_servers = [] available_servers = []
for hostname in ZeroconfServer.found_hostnames(): for hostname in ZeroconfServer.found_hostnames():
host_properties = ZeroconfServer.get_hostname_properties(hostname) host_properties = ZeroconfServer.get_hostname_properties(hostname)
if host_properties.get('api_version') == API_VERSION: if not system_os or (system_os and system_os == host_properties.get('system_os')):
if not system_os or (system_os and system_os == host_properties.get('system_os')): response = RenderServerProxy(hostname).is_engine_available(engine_name)
response = RenderServerProxy(hostname).is_engine_available(engine_name) if response and response.get('available', False):
if response and response.get('available', False): available_servers.append(response)
available_servers.append(response)
return available_servers return available_servers
@@ -396,6 +569,6 @@ if __name__ == '__main__':
time.sleep(2) time.sleep(2)
available_servers = DistributedJobManager.find_available_servers('blender') available_servers = DistributedJobManager.find_available_servers('blender')
print(f"AVAILABLE SERVERS ({len(available_servers)}): {available_servers}") print(f"AVAILABLE SERVERS ({len(available_servers)}): {available_servers}")
# results = distribute_server_work(1, 100, available_servers) # results = DistributedJobManager.distribute_server_work(1, 100, available_servers)
# print(f"RESULTS: {results}") # print(f"RESULTS: {results}")
ZeroconfServer.stop() ZeroconfServer.stop()
+1 -1
View File
@@ -8,7 +8,7 @@ class AERender(BaseRenderEngine):
def version(self): def version(self):
version = None version = None
try: try:
render_path = self.engine_path() render_path = self.renderer_path()
if render_path: if render_path:
ver_out = subprocess.check_output([render_path, '-version'], timeout=SUBPROCESS_TIMEOUT) ver_out = subprocess.check_output([render_path, '-version'], timeout=SUBPROCESS_TIMEOUT)
version = ver_out.decode('utf-8').split(" ")[-1].strip() version = ver_out.decode('utf-8').split(" ")[-1].strip()
+6 -5
View File
@@ -8,7 +8,8 @@ from src.engines.blender.blender_engine import Blender
from src.engines.core.base_downloader import EngineDownloader from src.engines.core.base_downloader import EngineDownloader
from src.utilities.misc_helper import current_system_os, current_system_cpu from src.utilities.misc_helper import current_system_os, current_system_cpu
url = "https://download.blender.org/release/" # url = "https://download.blender.org/release/"
url = "https://ftp.nluug.nl/pub/graphics/blender/release/" # much faster mirror for testing
logger = logging.getLogger() logger = logging.getLogger()
supported_formats = ['.zip', '.tar.xz', '.dmg'] supported_formats = ['.zip', '.tar.xz', '.dmg']
@@ -87,8 +88,8 @@ class BlenderDownloader(EngineDownloader):
threads = [] threads = []
results = [[] for _ in majors] results = [[] for _ in majors]
def thread_function(major_version, index, system_os_t, cpu_t): def thread_function(major_version, index, system_os, cpu):
results[index] = cls.__get_minor_versions(major_version, system_os_t, cpu_t) results[index] = cls.__get_minor_versions(major_version, system_os, cpu)
for i, m in enumerate(majors): for i, m in enumerate(majors):
thread = threading.Thread(target=thread_function, args=(m, i, system_os, cpu)) thread = threading.Thread(target=thread_function, args=(m, i, system_os, cpu))
@@ -125,7 +126,7 @@ class BlenderDownloader(EngineDownloader):
return None return None
@classmethod @classmethod
def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120, progress_callback=None): def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120):
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
@@ -135,7 +136,7 @@ class BlenderDownloader(EngineDownloader):
minor_versions = [x for x in cls.__get_minor_versions(major_version, system_os, cpu) if minor_versions = [x for x in cls.__get_minor_versions(major_version, system_os, cpu) if
x['version'] == version] x['version'] == version]
cls.download_and_extract_app(remote_url=minor_versions[0]['url'], download_location=download_location, cls.download_and_extract_app(remote_url=minor_versions[0]['url'], download_location=download_location,
timeout=timeout, progress_callback=progress_callback) timeout=timeout)
except IndexError: except IndexError:
logger.error("Cannot find requested engine") logger.error("Cannot find requested engine")
+27 -36
View File
@@ -1,13 +1,12 @@
import json import json
import re import re
from concurrent.futures import ThreadPoolExecutor
from src.engines.core.base_engine import * from src.engines.core.base_engine import *
from src.utilities.misc_helper import system_safe_path from src.utilities.misc_helper import system_safe_path
logger = logging.getLogger() logger = logging.getLogger()
_creationflags = subprocess.CREATE_NO_WINDOW if platform.system() == 'Windows' else 0
class Blender(BaseRenderEngine): class Blender(BaseRenderEngine):
@@ -24,21 +23,20 @@ class Blender(BaseRenderEngine):
from src.engines.blender.blender_worker import BlenderRenderWorker from src.engines.blender.blender_worker import BlenderRenderWorker
return BlenderRenderWorker return BlenderRenderWorker
@staticmethod def ui_options(self):
def ui_options(system_info):
from src.engines.blender.blender_ui import BlenderUI from src.engines.blender.blender_ui import BlenderUI
return BlenderUI.get_options(system_info) return BlenderUI.get_options(self)
def supported_extensions(self): @staticmethod
def supported_extensions():
return ['blend'] return ['blend']
def version(self): def version(self):
version = None version = None
try: try:
render_path = self.engine_path() render_path = self.renderer_path()
if render_path: if render_path:
ver_out = subprocess.check_output([render_path, '-v'], timeout=SUBPROCESS_TIMEOUT, ver_out = subprocess.check_output([render_path, '-v'], timeout=SUBPROCESS_TIMEOUT)
creationflags=_creationflags)
version = ver_out.decode('utf-8').splitlines()[0].replace('Blender', '').strip() version = ver_out.decode('utf-8').splitlines()[0].replace('Blender', '').strip()
except Exception as e: except Exception as e:
logger.error(f'Failed to get Blender version: {e}') logger.error(f'Failed to get Blender version: {e}')
@@ -52,12 +50,10 @@ class Blender(BaseRenderEngine):
def run_python_expression(self, project_path, python_expression, timeout=None): def run_python_expression(self, project_path, python_expression, timeout=None):
if os.path.exists(project_path): if os.path.exists(project_path):
try: try:
return subprocess.run([self.engine_path(), '-b', project_path, '--python-expr', python_expression], return subprocess.run([self.renderer_path(), '-b', project_path, '--python-expr', python_expression],
capture_output=True, timeout=timeout, creationflags=_creationflags) capture_output=True, timeout=timeout)
except Exception as e: except Exception as e:
err_msg = f"Error running python expression in blender: {e}" logger.error(f"Error running python expression in blender: {e}")
logger.error(err_msg)
raise ChildProcessError(err_msg)
else: else:
raise FileNotFoundError(f'Project file not found: {project_path}') raise FileNotFoundError(f'Project file not found: {project_path}')
@@ -69,19 +65,12 @@ class Blender(BaseRenderEngine):
raise FileNotFoundError(f'Python script not found: {script_path}') raise FileNotFoundError(f'Python script not found: {script_path}')
try: try:
command = [self.engine_path(), '-b', '--python', script_path] command = [self.renderer_path(), '-b', '--python', script_path]
if project_path: if project_path:
command.insert(2, project_path) command.insert(2, project_path)
result = subprocess.run(command, capture_output=True, timeout=timeout, creationflags=_creationflags) return subprocess.run(command, capture_output=True, timeout=timeout)
return result
except subprocess.TimeoutExpired:
err_msg = f"Timed out after {timeout}s while running python script in blender: {script_path}"
logger.error(err_msg)
raise TimeoutError(err_msg)
except Exception as e: except Exception as e:
err_msg = f"Error running python script in blender: {e}" logger.exception(f"Error running python script in blender: {e}")
logger.error(err_msg)
raise ChildProcessError(err_msg)
def get_project_info(self, project_path, timeout=10): def get_project_info(self, project_path, timeout=10):
scene_info = {} scene_info = {}
@@ -98,12 +87,10 @@ class Blender(BaseRenderEngine):
elif line.startswith('Error'): elif line.startswith('Error'):
logger.error(f"get_scene_info error: {line.strip()}") logger.error(f"get_scene_info error: {line.strip()}")
except Exception as e: except Exception as e:
msg = f'Error getting file details for .blend file: {e}' logger.error(f'Error getting file details for .blend file: {e}')
logger.error(msg)
raise ChildProcessError(msg)
return scene_info return scene_info
def pack_project_file(self, project_path, timeout=None): def pack_project_file(self, project_path, timeout=30):
# Credit to L0Lock for pack script - https://blender.stackexchange.com/a/243935 # Credit to L0Lock for pack script - https://blender.stackexchange.com/a/243935
try: try:
logger.info(f"Starting to pack Blender file: {project_path}") logger.info(f"Starting to pack Blender file: {project_path}")
@@ -126,13 +113,11 @@ class Blender(BaseRenderEngine):
logger.info(f'Blender file packed successfully to {new_path}') logger.info(f'Blender file packed successfully to {new_path}')
return new_path return new_path
except Exception as e: except Exception as e:
msg = f'Error packing .blend file: {e}' logger.error(f'Error packing .blend file: {e}')
logger.error(msg)
raise ChildProcessError(msg)
return None return None
def get_arguments(self): def get_arguments(self):
help_text = subprocess.check_output([self.engine_path(), '-h'], creationflags=_creationflags).decode('utf-8') help_text = subprocess.check_output([self.renderer_path(), '-h']).decode('utf-8')
lines = help_text.splitlines() lines = help_text.splitlines()
options = {} options = {}
@@ -164,7 +149,13 @@ class Blender(BaseRenderEngine):
return options return options
def system_info(self): def system_info(self):
return {'render_devices': self.get_render_devices()} with ThreadPoolExecutor() as executor:
future_render_devices = executor.submit(self.get_render_devices)
future_engines = executor.submit(self.supported_render_engines)
render_devices = future_render_devices.result()
engines = future_engines.result()
return {'render_devices': render_devices, 'engines': engines}
def get_render_devices(self): def get_render_devices(self):
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_system_info.py') script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_system_info.py')
@@ -179,13 +170,13 @@ class Blender(BaseRenderEngine):
logger.error("GPU data not found in the output.") logger.error("GPU data not found in the output.")
def supported_render_engines(self): def supported_render_engines(self):
engine_output = subprocess.run([self.engine_path(), '-E', 'help'], timeout=SUBPROCESS_TIMEOUT, engine_output = subprocess.run([self.renderer_path(), '-E', 'help'], timeout=SUBPROCESS_TIMEOUT,
capture_output=True, creationflags=_creationflags).stdout.decode('utf-8').strip() capture_output=True).stdout.decode('utf-8').strip()
render_engines = [x.strip() for x in engine_output.split('Blender Engine Listing:')[-1].strip().splitlines()] render_engines = [x.strip() for x in engine_output.split('Blender Engine Listing:')[-1].strip().splitlines()]
return render_engines return render_engines
def perform_presubmission_tasks(self, project_path): def perform_presubmission_tasks(self, project_path):
packed_path = self.pack_project_file(project_path, timeout=120) packed_path = self.pack_project_file(project_path, timeout=30)
return packed_path return packed_path
+2 -2
View File
@@ -1,9 +1,9 @@
class BlenderUI: class BlenderUI:
@staticmethod @staticmethod
def get_options(system_info): def get_options(instance):
options = [ options = [
{'name': 'engine', 'options': system_info.get('engines', [])}, {'name': 'engine', 'options': instance.supported_render_engines()},
{'name': 'render_device', 'options': ['Any', 'GPU', 'CPU']}, {'name': 'render_device', 'options': ['Any', 'GPU', 'CPU']},
] ]
return options return options
+8 -11
View File
@@ -26,17 +26,11 @@ class BlenderRenderWorker(BaseRenderWorker):
def generate_worker_subprocess(self): def generate_worker_subprocess(self):
cmd = [self.engine_path] cmd = [self.renderer_path]
if self.args.get('background', True): # optionally run render not in background if self.args.get('background', True): # optionally run render not in background
cmd.append('-b') cmd.append('-b')
cmd.append(self.input_path) cmd.append(self.input_path)
# Set Render Engine
blender_engine = self.args.get('engine')
if blender_engine:
blender_engine = blender_engine.upper()
cmd.extend(['-E', blender_engine])
# Start Python expressions - # todo: investigate splitting into separate 'setup' script # Start Python expressions - # todo: investigate splitting into separate 'setup' script
cmd.append('--python-expr') cmd.append('--python-expr')
python_exp = 'import bpy; bpy.context.scene.render.use_overwrite = False;' python_exp = 'import bpy; bpy.context.scene.render.use_overwrite = False;'
@@ -46,10 +40,13 @@ class BlenderRenderWorker(BaseRenderWorker):
if custom_camera: if custom_camera:
python_exp = python_exp + f"bpy.context.scene.camera = bpy.data.objects['{custom_camera}'];" python_exp = python_exp + f"bpy.context.scene.camera = bpy.data.objects['{custom_camera}'];"
# Set Render Device for Cycles (gpu/cpu/any) # Setup Render Engines
if blender_engine == 'CYCLES': self.args['engine'] = self.args.get('engine', 'CYCLES').upper() # set default render engine
# Configure Cycles
if self.args['engine'] == 'CYCLES':
# Set Render Device (gpu/cpu/any)
render_device = self.args.get('render_device', 'any').lower() render_device = self.args.get('render_device', 'any').lower()
if render_device not in {'any', 'gpu', 'cpu'}: if render_device not in ['any', 'gpu', 'cpu']:
raise AttributeError(f"Invalid Cycles render device: {render_device}") raise AttributeError(f"Invalid Cycles render device: {render_device}")
use_gpu = render_device in {'any', 'gpu'} use_gpu = render_device in {'any', 'gpu'}
@@ -71,7 +68,7 @@ class BlenderRenderWorker(BaseRenderWorker):
# Remove the extension only if it is not composed entirely of digits # Remove the extension only if it is not composed entirely of digits
path_without_ext = main_part if not ext[1:].isdigit() else self.output_path path_without_ext = main_part if not ext[1:].isdigit() else self.output_path
path_without_ext += "_" path_without_ext += "_"
cmd.extend(['-o', path_without_ext, '-F', export_format]) cmd.extend(['-E', blender_engine, '-o', path_without_ext, '-F', export_format])
# set frame range # set frame range
cmd.extend(['-s', self.start_frame, '-e', self.end_frame, '-a']) cmd.extend(['-s', self.start_frame, '-e', self.end_frame, '-a'])
+9 -143
View File
@@ -1,7 +1,9 @@
import logging import logging
import os import os
import shutil import shutil
import tarfile
import tempfile import tempfile
import zipfile
import requests import requests
from tqdm import tqdm from tqdm import tqdm
@@ -10,154 +12,26 @@ logger = logging.getLogger()
class EngineDownloader: class EngineDownloader:
"""A class responsible for downloading and extracting rendering engines from publicly available URLs.
Attributes:
supported_formats (list[str]): A list of file formats supported by the downloader.
"""
supported_formats = ['.zip', '.tar.xz', '.dmg'] supported_formats = ['.zip', '.tar.xz', '.dmg']
def __init__(self): def __init__(self):
pass pass
# --------------------------------------------
# Required Overrides for Subclasses:
# --------------------------------------------
@classmethod @classmethod
def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False): def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False):
""" raise NotImplementedError # implement this method in your engine subclass
Finds the most recent version of the rendering engine available for download.
This method should be overridden in a subclass to implement the logic for determining
the most recent version of the rendering engine, optionally filtering by long-term
support (LTS) versions, the operating system, and CPU architecture.
Args:
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
lts_only (bool, optional): Limit the search to LTS (long-term support) versions only. Default is False.
Returns:
dict: A dict with the following keys:
- 'cpu' (str): The CPU architecture.
- 'system_os' (str): The operating system.
- 'file' (str): The filename of the version's download file.
- 'url' (str): The remote URL for downloading the version.
- 'version' (str): The version number.
Raises:
NotImplementedError: If the method is not overridden in a subclass.
"""
raise NotImplementedError(f"find_most_recent_version not implemented for {cls.__class__.__name__}")
@classmethod @classmethod
def version_is_available_to_download(cls, version, system_os=None, cpu=None): def version_is_available_to_download(cls, version, system_os=None, cpu=None):
"""Checks if a requested version of the rendering engine is available for download. raise NotImplementedError # implement this method in your engine subclass
This method should be overridden in a subclass to implement the logic for determining
whether a given version of the rendering engine is available for download, based on the
operating system and CPU architecture.
Args:
version (str): The requested renderer version to download.
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
Returns:
bool: True if the version is available for download, False otherwise.
Raises:
NotImplementedError: If the method is not overridden in a subclass.
"""
raise NotImplementedError(f"version_is_available_to_download not implemented for {cls.__class__.__name__}")
@classmethod @classmethod
def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120, progress_callback=None): def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120):
"""Downloads the requested version of the rendering engine to the given download location. raise NotImplementedError # implement this method in your engine subclass
This method should be overridden in a subclass to implement the logic for downloading
a specific version of the rendering engine. The method is intended to handle the
downloading process based on the version, operating system, CPU architecture, and
timeout parameters.
Args:
version (str): The requested renderer version to download.
download_location (str): The directory where the engine should be downloaded.
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
timeout (int, optional): The maximum time in seconds to wait for the download. Default is 120 seconds.
progress_callback (callable, optional): A callback function that is called periodically with the current download progress.
Raises:
NotImplementedError: If the method is not overridden in a subclass.
"""
raise NotImplementedError(f"download_engine not implemented for {cls.__class__.__name__}")
# --------------------------------------------
# Optional Overrides for Subclasses:
# --------------------------------------------
@classmethod @classmethod
def all_versions(cls, system_os=None, cpu=None): def download_and_extract_app(cls, remote_url, download_location, timeout=120):
"""Retrieves a list of available versions of the software for a specific operating system and CPU architecture.
This method fetches all available versions for the given operating system and CPU type, constructing
a list of dictionaries containing details such as the version, CPU architecture, system OS, and the
remote URL for downloading each version.
Args:
system_os (str, optional): Desired OS ('linux', 'macos', 'windows'). Defaults to system os.
cpu (str, optional): The CPU architecture for which to download the engine. Default is system cpu.
Returns:
list[dict]: A list of dictionaries, each containing:
- 'cpu' (str): The CPU architecture.
- 'file' (str): The filename of the version's download file.
- 'system_os' (str): The operating system.
- 'url' (str): The remote URL for downloading the version.
- 'version' (str): The version number.
"""
return []
# --------------------------------------------
# Do Not Override These Methods:
# --------------------------------------------
@classmethod
def download_and_extract_app(cls, remote_url, download_location, timeout=120, progress_callback=None):
"""Downloads an application from the given remote URL and extracts it to the specified location.
This method handles the downloading of the application, supports multiple archive formats,
and extracts the contents to the specified `download_location`. It also manages temporary
files and logs progress throughout the process.
Args:
remote_url (str): The URL of the application to download.
download_location (str): The directory where the application should be extracted.
timeout (int, optional): The maximum time in seconds to wait for the download. Default is 120 seconds.
progress_callback (callable, optional): A callback function that is called periodically with the current download progress.
Returns:
str: The path to the directory where the application was extracted.
Raises:
Exception: Catches and logs any exceptions that occur during the download or extraction process.
Supported Formats:
- `.tar.xz`: Extracted using the `tarfile` module.
- `.zip`: Extracted using the `zipfile` module.
- `.dmg`: macOS disk image files, handled using the `dmglib` library.
- Other formats will result in an error being logged.
Notes:
- If the application already exists in the `download_location`, the method will log an error
and return without downloading or extracting.
- Temporary files created during the download process are cleaned up after completion.
"""
if progress_callback:
progress_callback(0)
# Create a temp download directory # Create a temp download directory
temp_download_dir = tempfile.mkdtemp() temp_download_dir = tempfile.mkdtemp()
@@ -170,7 +44,7 @@ class EngineDownloader:
if os.path.exists(os.path.join(download_location, output_dir_name)): if os.path.exists(os.path.join(download_location, output_dir_name)):
logger.error(f"Engine download for {output_dir_name} already exists") logger.error(f"Engine download for {output_dir_name} already exists")
return None return
if not os.path.exists(temp_downloaded_file_path): if not os.path.exists(temp_downloaded_file_path):
# Make a GET request to the URL with stream=True to enable streaming # Make a GET request to the URL with stream=True to enable streaming
@@ -186,33 +60,26 @@ class EngineDownloader:
progress_bar = tqdm(total=file_size, unit="B", unit_scale=True) progress_bar = tqdm(total=file_size, unit="B", unit_scale=True)
# Open a file for writing in binary mode # Open a file for writing in binary mode
total_saved = 0
with open(temp_downloaded_file_path, "wb") as file: with open(temp_downloaded_file_path, "wb") as file:
for chunk in response.iter_content(chunk_size=1024): for chunk in response.iter_content(chunk_size=1024):
if chunk: if chunk:
# Write the chunk to the file # Write the chunk to the file
file.write(chunk) file.write(chunk)
total_saved += len(chunk)
# Update the progress bar # Update the progress bar
progress_bar.update(len(chunk)) progress_bar.update(len(chunk))
if progress_callback:
percent = float(total_saved) / float(file_size)
progress_callback(percent)
# Close the progress bar # Close the progress bar
progress_callback(1.0)
progress_bar.close() progress_bar.close()
logger.info(f"Successfully downloaded {os.path.basename(temp_downloaded_file_path)}") logger.info(f"Successfully downloaded {os.path.basename(temp_downloaded_file_path)}")
else: else:
logger.error(f"Failed to download the file. Status code: {response.status_code}") logger.error(f"Failed to download the file. Status code: {response.status_code}")
return None return
os.makedirs(download_location, exist_ok=True) os.makedirs(download_location, exist_ok=True)
# Extract the downloaded file # Extract the downloaded file
# Process .tar.xz files # Process .tar.xz files
if temp_downloaded_file_path.lower().endswith('.tar.xz'): if temp_downloaded_file_path.lower().endswith('.tar.xz'):
import tarfile
try: try:
with tarfile.open(temp_downloaded_file_path, 'r:xz') as tar: with tarfile.open(temp_downloaded_file_path, 'r:xz') as tar:
tar.extractall(path=download_location) tar.extractall(path=download_location)
@@ -226,7 +93,6 @@ class EngineDownloader:
# Process .zip files # Process .zip files
elif temp_downloaded_file_path.lower().endswith('.zip'): elif temp_downloaded_file_path.lower().endswith('.zip'):
import zipfile
try: try:
with zipfile.ZipFile(temp_downloaded_file_path, 'r') as zip_ref: with zipfile.ZipFile(temp_downloaded_file_path, 'r') as zip_ref:
zip_ref.extractall(download_location) zip_ref.extractall(download_location)
+48 -131
View File
@@ -1,6 +1,5 @@
import logging import logging
import os import os
import platform
import subprocess import subprocess
logger = logging.getLogger() logger = logging.getLogger()
@@ -8,148 +7,28 @@ SUBPROCESS_TIMEOUT = 5
class BaseRenderEngine(object): class BaseRenderEngine(object):
"""Base class for render engines. This class provides common functionality and structure for various rendering
engines. Create subclasses and override the methods marked below to add additional engines
Attributes:
install_paths (list): A list of default installation paths where the render engine
might be found. This list can be populated with common paths to help locate the
executable on different operating systems or environments.
"""
install_paths = [] install_paths = []
supported_extensions = []
# --------------------------------------------
# Required Overrides for Subclasses:
# --------------------------------------------
def __init__(self, custom_path=None): def __init__(self, custom_path=None):
self.custom_engine_path = custom_path self.custom_renderer_path = custom_path
if not self.engine_path() or not os.path.exists(self.engine_path()): if not self.renderer_path() or not os.path.exists(self.renderer_path()):
raise FileNotFoundError(f"Cannot find path to engine for {self.name()} instance: {self.engine_path()}") raise FileNotFoundError(f"Cannot find path to renderer for {self.name()} instance")
if not os.access(self.engine_path(), os.X_OK): if not os.access(self.renderer_path(), os.X_OK):
logger.warning(f"Path is not executable. Setting permissions to 755 for {self.engine_path()}") logger.warning(f"Path is not executable. Setting permissions to 755 for {self.renderer_path()}")
os.chmod(self.engine_path(), 0o755) os.chmod(self.renderer_path(), 0o755)
def version(self): def renderer_path(self):
"""Return the version number as a string. return self.custom_renderer_path or self.default_renderer_path()
Returns:
str: Version number.
Raises:
NotImplementedError: If not overridden.
"""
raise NotImplementedError(f"version not implemented for {self.__class__.__name__}")
def get_project_info(self, project_path, timeout=10):
"""Extracts detailed project information from the given project path.
Args:
project_path (str): The path to the project file.
timeout (int, optional): The maximum time (in seconds) to wait for the operation. Default is 10 seconds.
Returns:
dict: A dictionary containing project information (subclasses should define the structure).
Raises:
NotImplementedError: If the method is not overridden in a subclass.
"""
raise NotImplementedError(f"get_project_info not implemented for {self.__class__.__name__}")
@classmethod
def get_output_formats(cls):
"""Returns a list of available output formats supported by the engine.
Returns:
list[str]: A list of strings representing the available output formats.
"""
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
@staticmethod
def worker_class(): # override when subclassing to link worker class
raise NotImplementedError("Worker class not implemented")
# --------------------------------------------
# Optional Overrides for Subclasses:
# --------------------------------------------
def supported_extensions(self):
"""
Returns:
list[str]: list of supported extensions
"""
return []
def get_help(self):
"""Retrieves the help documentation for the engine.
This method runs the engine's help command (default: '-h') and captures the output.
Override this method if the engine uses a different help flag.
Returns:
str: The help documentation as a string.
Raises:
FileNotFoundError: If the engine path is not found.
"""
path = self.engine_path()
if not path:
raise FileNotFoundError(f"Engine path not found: {path}")
creationflags = subprocess.CREATE_NO_WINDOW if platform.system() == 'Windows' else 0
help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
timeout=SUBPROCESS_TIMEOUT, creationflags=creationflags).decode('utf-8')
return help_doc
def system_info(self):
"""Return additional information about the system specfic to the engine (configured GPUs, render engines, etc)
Returns:
dict: A dictionary with engine-specific system information
"""
return {}
def perform_presubmission_tasks(self, project_path):
"""Perform any pre-submission tasks on a project file before uploading it to a server (pack textures, etc.)
Override this method to:
1. Copy the project file to a temporary location (DO NOT MODIFY ORIGINAL PATH).
2. Perform additional modifications or tasks.
3. Return the path to the modified project file.
Args:
project_path (str): The original project file path.
Returns:
str: The path to the modified project file.
"""
return project_path
def get_arguments(self):
pass
@staticmethod
def downloader(): # override when subclassing if using a downloader class
return None
@staticmethod
def ui_options(system_info): # override to return options for ui
return {}
# --------------------------------------------
# Do Not Override These Methods:
# --------------------------------------------
def engine_path(self):
return self.custom_engine_path or self.default_engine_path()
@classmethod @classmethod
def name(cls): def name(cls):
return str(cls.__name__).lower() return str(cls.__name__).lower()
@classmethod @classmethod
def default_engine_path(cls): def default_renderer_path(cls):
path = None path = None
try: # Linux and macOS try: # Linux and macOS
path = subprocess.check_output(['which', cls.name()], timeout=SUBPROCESS_TIMEOUT).decode('utf-8').strip() path = subprocess.check_output(['which', cls.name()], timeout=SUBPROCESS_TIMEOUT).decode('utf-8').strip()
@@ -160,3 +39,41 @@ class BaseRenderEngine(object):
except Exception as e: except Exception as e:
logger.exception(e) logger.exception(e)
return path return path
def version(self):
raise NotImplementedError("version not implemented")
@staticmethod
def downloader(): # override when subclassing if using a downloader class
return None
@staticmethod
def worker_class(): # override when subclassing to link worker class
raise NotImplementedError("Worker class not implemented")
def ui_options(self): # override to return options for ui
return {}
def get_help(self): # override if renderer uses different help flag
path = self.renderer_path()
if not path:
raise FileNotFoundError("renderer path not found")
help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
return help_doc
def get_project_info(self, project_path, timeout=10):
raise NotImplementedError(f"get_project_info not implemented for {self.__name__}")
@classmethod
def get_output_formats(cls):
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
def get_arguments(self):
pass
def system_info(self):
pass
def perform_presubmission_tasks(self, project_path):
return project_path
+112 -215
View File
@@ -3,7 +3,6 @@ import io
import json import json
import logging import logging
import os import os
import signal
import subprocess import subprocess
import threading import threading
import time import time
@@ -32,9 +31,9 @@ class BaseRenderWorker(Base):
date_created = Column(DateTime) date_created = Column(DateTime)
start_time = Column(DateTime, nullable=True) start_time = Column(DateTime, nullable=True)
end_time = Column(DateTime, nullable=True) end_time = Column(DateTime, nullable=True)
engine_name = Column(String) renderer = Column(String)
engine_version = Column(String) renderer_version = Column(String)
engine_path = Column(String) renderer_path = Column(String)
priority = Column(Integer) priority = Column(Integer)
project_length = Column(Integer) project_length = Column(Integer)
start_frame = Column(Integer) start_frame = Column(Integer)
@@ -46,20 +45,18 @@ class BaseRenderWorker(Base):
file_hash = Column(String) file_hash = Column(String)
_status = Column(String) _status = Column(String)
# -------------------------------------------- engine = None
# Required Overrides for Subclasses:
# --------------------------------------------
def __init__(self, input_path, output_path, engine_path, priority=2, args=None, ignore_extensions=True, parent=None, def __init__(self, input_path, output_path, engine_path, priority=2, args=None, ignore_extensions=True, parent=None,
name=None): name=None):
if not ignore_extensions: if not ignore_extensions:
if not any(ext in input_path for ext in self.engine.supported_extensions()): if not any(ext in input_path for ext in self.engine.supported_extensions()):
err_meg = f"Cannot find valid project with supported file extension for '{self.engine.name()}'" err_meg = f'Cannot find valid project with supported file extension for {self.engine.name()} renderer'
logger.error(err_meg) logger.error(err_meg)
raise ValueError(err_meg) raise ValueError(err_meg)
if not self.engine: if not self.engine:
raise NotImplementedError(f"Engine not defined for {self.__class__.__name__}") raise NotImplementedError("Engine not defined")
def generate_id(): def generate_id():
import uuid import uuid
@@ -72,10 +69,10 @@ class BaseRenderWorker(Base):
self.output_path = output_path self.output_path = output_path
self.args = args or {} self.args = args or {}
self.date_created = datetime.now() self.date_created = datetime.now()
self.engine_name = self.engine.name() self.renderer = self.engine.name()
self.engine_path = engine_path self.renderer_path = engine_path
self.engine_version = self.engine(engine_path).version() self.renderer_version = self.engine(engine_path).version()
self.custom_engine_path = None self.custom_renderer_path = None
self.priority = priority self.priority = priority
self.parent = parent self.parent = parent
self.children = {} self.children = {}
@@ -93,7 +90,7 @@ class BaseRenderWorker(Base):
self.end_time = None self.end_time = None
# History # History
self.status = RenderStatus.CONFIGURING self.status = RenderStatus.NOT_STARTED
self.warnings = [] self.warnings = []
self.errors = [] self.errors = []
@@ -105,52 +102,8 @@ class BaseRenderWorker(Base):
self.__last_output_time = None self.__last_output_time = None
self.watchdog_timeout = 120 self.watchdog_timeout = 120
def generate_worker_subprocess(self):
"""Generate a return a list of the command line arguments necessary to perform requested job
Returns:
list[str]: list of command line arguments
"""
raise NotImplementedError("generate_worker_subprocess not implemented")
def _parse_stdout(self, line):
"""Parses a line of standard output from the engine.
This method should be overridden in a subclass to implement the logic for processing
and interpreting a single line of output from the engine's standard output stream.
On frame completion, the subclass should:
1. Update value of self.current_frame
2. Call self._send_frame_complete_notification()
Args:
line (str): A line of text from the engine's standard output.
Raises:
NotImplementedError: If the method is not overridden in a subclass.
"""
raise NotImplementedError(f"_parse_stdout not implemented for {self.__class__.__name__}")
# --------------------------------------------
# Optional Overrides for Subclasses:
# --------------------------------------------
def percent_complete(self):
# todo: fix this
if self.status == RenderStatus.COMPLETED:
return 1.0
return 0
def post_processing(self):
"""Override to perform any engine-specific postprocessing"""
pass
# --------------------------------------------
# Do Not Override These Methods:
# --------------------------------------------
def __repr__(self): def __repr__(self):
return f"<Job id:{self.id} p{self.priority} {self.engine_name}-{self.engine_version} '{self.name}' status:{self.status.value}>" return f"<Job id:{self.id} p{self.priority} {self.renderer}-{self.renderer_version} '{self.name}' status:{self.status.value}>"
@property @property
def total_frames(self): def total_frames(self):
@@ -188,13 +141,16 @@ class BaseRenderWorker(Base):
return generated_args return generated_args
def get_raw_args(self): def get_raw_args(self):
raw_args_string = self.args.get('raw', '') raw_args_string = self.args.get('raw', None)
raw_args = None raw_args = None
if raw_args_string: if raw_args_string:
import shlex import shlex
raw_args = shlex.split(raw_args_string) raw_args = shlex.split(raw_args_string)
return raw_args return raw_args
def generate_worker_subprocess(self):
raise NotImplementedError("generate_worker_subprocess not implemented")
def log_path(self): def log_path(self):
filename = (self.name or os.path.basename(self.input_path)) + '_' + \ filename = (self.name or os.path.basename(self.input_path)) + '_' + \
self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log' self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log'
@@ -202,7 +158,7 @@ class BaseRenderWorker(Base):
def start(self): def start(self):
if self.status not in [RenderStatus.SCHEDULED, RenderStatus.NOT_STARTED, RenderStatus.CONFIGURING]: if self.status not in [RenderStatus.SCHEDULED, RenderStatus.NOT_STARTED]:
logger.error(f"Trying to start job with status: {self.status}") logger.error(f"Trying to start job with status: {self.status}")
return return
@@ -213,136 +169,97 @@ class BaseRenderWorker(Base):
self.errors.append(msg) self.errors.append(msg)
return return
if not os.path.exists(self.engine_path): if not os.path.exists(self.renderer_path):
self.status = RenderStatus.ERROR self.status = RenderStatus.ERROR
msg = f'Cannot find render engine path for {self.engine.name()}' msg = f'Cannot find render engine path for {self.engine.name()}'
logger.error(msg) logger.error(msg)
self.errors.append(msg) self.errors.append(msg)
return return
self.status = RenderStatus.RUNNING if not self.children else RenderStatus.WAITING_FOR_SUBJOBS self.status = RenderStatus.RUNNING
self.start_time = datetime.now() self.start_time = datetime.now()
self.__thread.start() self.__thread.start()
# handle multiple attempts at running subprocess
def __run__subprocess_cycle(self, log_file):
subprocess_cmds = self.generate_subprocess()
initial_file_count = len(self.file_list())
failed_attempts = 0
log_file.write(f"Running command: {subprocess_cmds}\n")
log_file.write('=' * 80 + '\n\n')
while True:
# Log attempt #
if failed_attempts:
if failed_attempts >= self.maximum_attempts:
err_msg = f"Maximum attempts exceeded ({self.maximum_attempts})"
logger.error(err_msg)
self.status = RenderStatus.ERROR
self.errors.append(err_msg)
return
else:
log_file.write(f'\n{"=" * 20} Attempt #{failed_attempts + 1} {"=" * 20}\n\n')
logger.warning(f"Restarting render - Attempt #{failed_attempts + 1}")
self.status = RenderStatus.RUNNING
return_code = self.__setup_and_run_process(log_file, subprocess_cmds)
message = f"{'=' * 50}\n\n{self.engine.name()} render ended with code {return_code} " \
f"after {self.time_elapsed()}\n\n"
log_file.write(message)
# don't try again if we've been cancelled
if self.status in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
return
# if file output hasn't increased, return as error, otherwise restart process.
file_count_has_increased = len(self.file_list()) > initial_file_count
if (self.status == RenderStatus.RUNNING) and file_count_has_increased and not return_code:
break
if return_code:
err_msg = f"{self.engine.name()} render failed with code {return_code}"
logger.error(err_msg)
self.errors.append(err_msg)
# handle instances where engine exits ok but doesnt generate files
if not return_code and not file_count_has_increased:
err_msg = (f"{self.engine.name()} render exited ok, but file count has not increased. "
f"Count is still {len(self.file_list())}")
log_file.write(f'Error: {err_msg}\n\n')
self.errors.append(err_msg)
# only count the attempt as failed if engine creates no output - reset counter on successful output
failed_attempts = 0 if file_count_has_increased else failed_attempts + 1
def __run__wait_for_subjobs(self, logfile):
from src.distributed_job_manager import DistributedJobManager
DistributedJobManager.wait_for_subjobs(parent_job=self)
@staticmethod
def log_and_print(message, log_file, level='info'):
if level == 'debug':
logger.debug(message)
elif level == 'error':
logger.error(message)
else:
logger.info(message)
log_file.write(f"{message}\n")
def __run(self): def __run(self):
logger.info(f'Starting {self.engine.name()} {self.renderer_version} Render for {self.input_path} | '
f'Frame Count: {self.total_frames}')
# Setup logging # Setup logging
log_dir = os.path.dirname(self.log_path()) log_dir = os.path.dirname(self.log_path())
os.makedirs(log_dir, exist_ok=True) os.makedirs(log_dir, exist_ok=True)
with open(self.log_path(), "a") as log_file: subprocess_cmds = self.generate_subprocess()
initial_file_count = len(self.file_list())
failed_attempts = 0
self.log_and_print(f"{self.start_time.isoformat()} - Starting " with open(self.log_path(), "a") as f:
f"{self.engine.name()} {self.engine_version} render job for {self.name} "
f"({self.input_path})", log_file)
log_file.write(f"\n")
if not self.children:
self.__run__subprocess_cycle(log_file)
else:
self.__run__wait_for_subjobs(log_file)
# Validate Output - End if missing frames f.write(f"{self.start_time.isoformat()} - Starting {self.engine.name()} {self.renderer_version} "
if self.status == RenderStatus.RUNNING: f"render for {self.input_path}\n\n")
file_list_length = len(self.file_list()) f.write(f"Running command: {subprocess_cmds}\n")
expected_list_length = (self.end_frame - self.start_frame + 1) if self.end_frame else 1 f.write('=' * 80 + '\n\n')
msg = f"Frames: Expected ({expected_list_length}) vs actual ({file_list_length}) for {self}" while True:
self.log_and_print(msg, log_file, 'debug') # Log attempt #
if failed_attempts:
if failed_attempts >= self.maximum_attempts:
err_msg = f"Maximum attempts exceeded ({self.maximum_attempts})"
logger.error(err_msg)
self.status = RenderStatus.ERROR
self.errors.append(err_msg)
return
else:
f.write(f'\n{"=" * 20} Attempt #{failed_attempts + 1} {"=" * 20}\n\n')
logger.warning(f"Restarting render - Attempt #{failed_attempts + 1}")
self.status = RenderStatus.RUNNING
if file_list_length not in (expected_list_length, 1): return_code = self.__setup_and_run_process(f, subprocess_cmds)
msg = f"Missing frames: Expected ({expected_list_length}) vs actual ({file_list_length})"
self.log_and_print(msg, log_file, 'error')
self.errors.append(msg)
self.status = RenderStatus.ERROR
# todo: create new subjob to generate missing frames
# cleanup and close if cancelled / error
if self.status in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
self.end_time = datetime.now() self.end_time = datetime.now()
message = f"{self.engine.name()} render ended with status '{self.status.value}' " \
f"after {self.time_elapsed()}"
self.log_and_print(message, log_file)
log_file.close()
return
# Post Render Work message = f"{'=' * 50}\n\n{self.engine.name()} render ended with code {return_code} " \
if not self.parent: f"after {self.time_elapsed()}\n\n"
logger.debug(f"Starting post-processing work for {self}") f.write(message)
self.log_and_print(f"Starting post-processing work for {self}", log_file, 'debug')
self.post_processing()
self.log_and_print(f"Completed post-processing work for {self}", log_file, 'debug')
self.status = RenderStatus.COMPLETED # Teardown
self.end_time = datetime.now() if self.status in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
message = f"Render {self.name} completed successfully after {self.time_elapsed()}" message = f"{self.engine.name()} render ended with status '{self.status}' " \
self.log_and_print(message, log_file) f"after {self.time_elapsed()}"
f.write(message)
return
# if file output hasn't increased, return as error, otherwise restart process.
file_count_has_increased = len(self.file_list()) > initial_file_count
if (self.status == RenderStatus.RUNNING) and file_count_has_increased and not return_code:
message = (f"{'=' * 50}\n\n{self.engine.name()} render completed successfully in "
f"{self.time_elapsed()}\n")
f.write(message)
break
if return_code:
err_msg = f"{self.engine.name()} render failed with code {return_code}"
logger.error(err_msg)
self.errors.append(err_msg)
# handle instances where renderer exits ok but doesnt generate files
if not return_code and not file_count_has_increased:
err_msg = (f"{self.engine.name()} render exited ok, but file count has not increased. "
f"Count is still {len(self.file_list())}")
f.write(f'Error: {err_msg}\n\n')
self.errors.append(err_msg)
# only count the attempt as failed if renderer creates no output - ignore error codes for now
if not file_count_has_increased:
failed_attempts += 1
if self.children:
from src.distributed_job_manager import DistributedJobManager
DistributedJobManager.wait_for_subjobs(local_job=self)
# Post Render Work
logger.debug("Starting post-processing work")
self.post_processing()
self.status = RenderStatus.COMPLETED
logger.info(f"Render {self.id}-{self.name} completed successfully after {self.time_elapsed()}")
def __setup_and_run_process(self, f, subprocess_cmds): def __setup_and_run_process(self, f, subprocess_cmds):
@@ -352,7 +269,7 @@ class BaseRenderWorker(Base):
time_since_last_update = time.time() - self.__last_output_time time_since_last_update = time.time() - self.__last_output_time
if time_since_last_update > self.watchdog_timeout: if time_since_last_update > self.watchdog_timeout:
logger.error(f"Process for {self} terminated due to exceeding timeout ({self.watchdog_timeout}s)") logger.error(f"Process for {self} terminated due to exceeding timeout ({self.watchdog_timeout}s)")
self.__kill_process() self.__process.kill()
break break
# logger.debug(f'Watchdog for {self} - Time since last update: {time_since_last_update}') # logger.debug(f'Watchdog for {self} - Time since last update: {time_since_last_update}')
time.sleep(1) time.sleep(1)
@@ -365,14 +282,8 @@ class BaseRenderWorker(Base):
try: try:
# Start process and get updates # Start process and get updates
if os.name == 'posix': # linux / mac self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=False)
universal_newlines=False, preexec_fn=os.setsid)
else: # windows
creationflags = subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.CREATE_NO_WINDOW
self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
universal_newlines=False,
creationflags=creationflags)
# Start watchdog # Start watchdog
self.__last_output_time = time.time() self.__last_output_time = time.time()
@@ -401,7 +312,7 @@ class BaseRenderWorker(Base):
message = f'Uncaught error running render process: {e}' message = f'Uncaught error running render process: {e}'
f.write(message) f.write(message)
logger.exception(message) logger.exception(message)
self.__kill_process() self.__process.kill()
# let watchdog end before continuing - prevents multiple watchdogs running when process restarts # let watchdog end before continuing - prevents multiple watchdogs running when process restarts
if watchdog_thread.is_alive(): if watchdog_thread.is_alive():
@@ -409,29 +320,11 @@ class BaseRenderWorker(Base):
return return_code return return_code
def __kill_process(self): def post_processing(self):
try: pass
if self.__process.poll():
return
logger.debug(f"Trying to kill process {self.__process}")
self.__process.terminate()
self.__process.kill()
if os.name == 'posix': # linux / macos
os.killpg(os.getpgid(self.__process.pid), signal.SIGTERM)
os.killpg(os.getpgid(self.__process.pid), signal.SIGKILL)
else: # windows
parent = psutil.Process(self.__process.pid)
for child in parent.children(recursive=True):
child.kill()
self.__process.wait(timeout=5)
logger.debug(f"Process ended with status {self.__process.poll()}")
except (ProcessLookupError, AttributeError, psutil.NoSuchProcess):
pass
except Exception as e:
logger.error(f"Error stopping the process: {e}")
def is_running(self): def is_running(self):
if hasattr(self, '__thread'): if self.__thread:
return self.__thread.is_alive() return self.__thread.is_alive()
return False return False
@@ -442,11 +335,15 @@ class BaseRenderWorker(Base):
self.stop(is_error=True) self.stop(is_error=True)
def stop(self, is_error=False): def stop(self, is_error=False):
logger.debug(f"Stopping {self}") if hasattr(self, '__process'):
try:
# cleanup status process = psutil.Process(self.__process.pid)
if self.status in [RenderStatus.RUNNING, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED, for proc in process.children(recursive=True):
RenderStatus.CONFIGURING]: proc.kill()
process.kill()
except Exception as e:
logger.debug(f"Error stopping the process: {e}")
if self.status in [RenderStatus.RUNNING, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED]:
if is_error: if is_error:
err_message = self.errors[-1] if self.errors else 'Unknown error' err_message = self.errors[-1] if self.errors else 'Unknown error'
logger.error(f"Halting render due to error: {err_message}") logger.error(f"Halting render due to error: {err_message}")
@@ -454,9 +351,13 @@ class BaseRenderWorker(Base):
else: else:
self.status = RenderStatus.CANCELLED self.status = RenderStatus.CANCELLED
self.__kill_process() def percent_complete(self):
if self.is_running(): # allow the log files to close if self.status == RenderStatus.COMPLETED:
self.__thread.join(timeout=5) return 1.0
return 0
def _parse_stdout(self, line):
raise NotImplementedError("_parse_stdout not implemented")
def time_elapsed(self): def time_elapsed(self):
return get_time_elapsed(self.start_time, self.end_time) return get_time_elapsed(self.start_time, self.end_time)
@@ -464,11 +365,7 @@ class BaseRenderWorker(Base):
def file_list(self): def file_list(self):
try: try:
job_dir = os.path.dirname(self.output_path) job_dir = os.path.dirname(self.output_path)
file_list = [ file_list = [os.path.join(job_dir, file) for file in os.listdir(job_dir)]
os.path.join(job_dir, file)
for file in os.listdir(job_dir)
if not file.startswith('.') # Ignore hidden files
]
file_list.sort() file_list.sort()
return file_list return file_list
except FileNotFoundError: except FileNotFoundError:
@@ -491,8 +388,8 @@ class BaseRenderWorker(Base):
'file_hash': self.file_hash, 'file_hash': self.file_hash,
'percent_complete': self.percent_complete(), 'percent_complete': self.percent_complete(),
'file_count': len(self.file_list()), 'file_count': len(self.file_list()),
'engine': self.engine_name, 'renderer': self.renderer,
'engine_version': self.engine_version, 'renderer_version': self.renderer_version,
'errors': getattr(self, 'errors', None), 'errors': getattr(self, 'errors', None),
'start_frame': self.start_frame, 'start_frame': self.start_frame,
'end_frame': self.end_frame, 'end_frame': self.end_frame,
+71 -110
View File
@@ -12,9 +12,6 @@ logger = logging.getLogger()
class EngineManager: class EngineManager:
"""Class that manages different versions of installed render engines and handles fetching and downloading new versions,
if possible.
"""
engines_path = None engines_path = None
download_tasks = [] download_tasks = []
@@ -23,14 +20,6 @@ class EngineManager:
def supported_engines(): def supported_engines():
return [Blender, FFMPEG] return [Blender, FFMPEG]
@classmethod
def downloadable_engines(cls):
return [engine for engine in cls.supported_engines() if hasattr(engine, "downloader") and engine.downloader()]
@classmethod
def active_downloads(cls) -> list:
return [x for x in cls.download_tasks if x.is_alive()]
@classmethod @classmethod
def engine_with_name(cls, engine_name): def engine_with_name(cls, engine_name):
for obj in cls.supported_engines(): for obj in cls.supported_engines():
@@ -38,15 +27,7 @@ class EngineManager:
return obj return obj
@classmethod @classmethod
def update_all_engines(cls): def get_engines(cls, filter_name=None):
for engine in cls.downloadable_engines():
update_available = cls.is_engine_update_available(engine)
if update_available:
update_available['name'] = engine.name()
cls.download_engine(engine.name(), update_available['version'], background=True)
@classmethod
def get_engines(cls, filter_name=None, include_corrupt=False, ignore_system=False):
if not cls.engines_path: if not cls.engines_path:
raise FileNotFoundError("Engine path is not set") raise FileNotFoundError("Engine path is not set")
@@ -68,8 +49,10 @@ class EngineManager:
# Initialize binary_name with engine name # Initialize binary_name with engine name
binary_name = result_dict['engine'].lower() binary_name = result_dict['engine'].lower()
# Determine the correct binary name based on the engine and system_os # Determine the correct binary name based on the engine and system_os
eng = cls.engine_with_name(result_dict['engine']) for eng in cls.supported_engines():
binary_name = eng.binary_names.get(result_dict['system_os'], binary_name) if eng.name().lower() == result_dict['engine']:
binary_name = eng.binary_names.get(result_dict['system_os'], binary_name)
break
# Find the path to the binary file # Find the path to the binary file
path = next( path = next(
@@ -77,16 +60,8 @@ class EngineManager:
os.walk(system_safe_path(os.path.join(cls.engines_path, directory))) if binary_name in files), os.walk(system_safe_path(os.path.join(cls.engines_path, directory))) if binary_name in files),
None None
) )
result_dict['path'] = path result_dict['path'] = path
# fetch version number from binary - helps detect corrupted downloads - disabled due to perf issues
# binary_version = eng(path).version()
# if not binary_version:
# logger.warning(f"Possible corrupt {eng.name()} {result_dict['version']} install detected: {path}")
# if not include_corrupt:
# continue
# result_dict['version'] = binary_version or 'error'
# Add the result dictionary to results if it matches the filter_name or if no filter is applied # Add the result dictionary to results if it matches the filter_name or if no filter is applied
if not filter_name or filter_name == result_dict['engine']: if not filter_name or filter_name == result_dict['engine']:
results.append(result_dict) results.append(result_dict)
@@ -94,61 +69,56 @@ class EngineManager:
logger.warning(f"Cannot find local engines download directory: {e}") logger.warning(f"Cannot find local engines download directory: {e}")
# add system installs to this list - use bg thread because it can be slow # add system installs to this list - use bg thread because it can be slow
def fetch_engine_details(eng, include_corrupt=False): def fetch_engine_details(eng):
version = eng().version()
if not version and not include_corrupt:
return
return { return {
'engine': eng.name(), 'engine': eng.name(),
'version': version or 'error', 'version': eng().version(),
'system_os': current_system_os(), 'system_os': current_system_os(),
'cpu': current_system_cpu(), 'cpu': current_system_cpu(),
'path': eng.default_engine_path(), 'path': eng.default_renderer_path(),
'type': 'system' 'type': 'system'
} }
if not ignore_system: with concurrent.futures.ThreadPoolExecutor() as executor:
with concurrent.futures.ThreadPoolExecutor() as executor: futures = {
futures = { executor.submit(fetch_engine_details, eng): eng.name()
executor.submit(fetch_engine_details, eng, include_corrupt): eng.name() for eng in cls.supported_engines()
for eng in cls.supported_engines() if eng.default_renderer_path() and (not filter_name or filter_name == eng.name())
if eng.default_engine_path() and (not filter_name or filter_name == eng.name()) }
}
for future in concurrent.futures.as_completed(futures): for future in concurrent.futures.as_completed(futures):
result = future.result() result = future.result()
if result: if result:
results.append(result) results.append(result)
return results return results
@classmethod @classmethod
def all_versions_for_engine(cls, engine_name, include_corrupt=False, ignore_system=False): def all_versions_for_engine(cls, engine_name):
versions = cls.get_engines(filter_name=engine_name, include_corrupt=include_corrupt, ignore_system=ignore_system) versions = cls.get_engines(filter_name=engine_name)
sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True) sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True)
return sorted_versions return sorted_versions
@classmethod @classmethod
def newest_engine_version(cls, engine, system_os=None, cpu=None, ignore_system=None): def newest_engine_version(cls, engine, system_os=None, cpu=None):
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
try: try:
filtered = [x for x in cls.all_versions_for_engine(engine, ignore_system=ignore_system) filtered = [x for x in cls.all_versions_for_engine(engine) if x['system_os'] == system_os and
if x['system_os'] == system_os and x['cpu'] == cpu] x['cpu'] == cpu]
return filtered[0] return filtered[0]
except IndexError: except IndexError:
logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}") logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}")
return None return None
@classmethod @classmethod
def is_version_downloaded(cls, engine, version, system_os=None, cpu=None, ignore_system=False): def is_version_downloaded(cls, engine, version, system_os=None, cpu=None):
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
filtered = [x for x in cls.get_engines(filter_name=engine, ignore_system=ignore_system) if filtered = [x for x in cls.get_engines(filter_name=engine) if x['system_os'] == system_os and
x['system_os'] == system_os and x['cpu'] == cpu and x['version'] == version] x['cpu'] == cpu and x['version'] == version]
return filtered[0] if filtered else False return filtered[0] if filtered else False
@classmethod @classmethod
@@ -181,7 +151,7 @@ class EngineManager:
return None return None
@classmethod @classmethod
def download_engine(cls, engine, version, system_os=None, cpu=None, background=False, ignore_system=False): def download_engine(cls, engine, version, system_os=None, cpu=None, background=False):
engine_to_download = cls.engine_with_name(engine) engine_to_download = cls.engine_with_name(engine)
existing_task = cls.get_existing_download_task(engine, version, system_os, cpu) existing_task = cls.get_existing_download_task(engine, version, system_os, cpu)
@@ -189,10 +159,10 @@ class EngineManager:
logger.debug(f"Already downloading {engine} {version}") logger.debug(f"Already downloading {engine} {version}")
if not background: if not background:
existing_task.join() # If download task exists, wait until it's done downloading existing_task.join() # If download task exists, wait until it's done downloading
return None return
elif not engine_to_download.downloader(): elif not engine_to_download.downloader():
logger.warning("No valid downloader for this engine. Please update this software manually.") logger.warning("No valid downloader for this engine. Please update this software manually.")
return None return
elif not cls.engines_path: elif not cls.engines_path:
raise FileNotFoundError("Engines path must be set before requesting downloads") raise FileNotFoundError("Engines path must be set before requesting downloads")
@@ -204,7 +174,7 @@ class EngineManager:
return thread return thread
thread.join() thread.join()
found_engine = cls.is_version_downloaded(engine, version, system_os, cpu, ignore_system) # Check that engine downloaded found_engine = cls.is_version_downloaded(engine, version, system_os, cpu) # Check that engine downloaded
if not found_engine: if not found_engine:
logger.error(f"Error downloading {engine}") logger.error(f"Error downloading {engine}")
return found_engine return found_engine
@@ -230,31 +200,41 @@ class EngineManager:
return False return False
@classmethod @classmethod
def is_engine_update_available(cls, engine_class, ignore_system_installs=False): def update_all_engines(cls):
logger.debug(f"Checking for updates to {engine_class.name()}") def engine_update_task(engine_class):
latest_version = engine_class.downloader().find_most_recent_version() logger.debug(f"Checking for updates to {engine_class.name()}")
latest_version = engine_class.downloader().find_most_recent_version()
if not latest_version: if not latest_version:
logger.warning(f"Could not find most recent version of {engine_class.name()} to download") logger.warning(f"Could not find most recent version of {engine.name()} to download")
return None return
version_num = latest_version.get('version') version_num = latest_version.get('version')
if cls.is_version_downloaded(engine_class.name(), version_num, ignore_system=ignore_system_installs): if cls.is_version_downloaded(engine_class.name(), version_num):
logger.debug(f"Latest version of {engine_class.name()} ({version_num}) already downloaded") logger.debug(f"Latest version of {engine_class.name()} ({version_num}) already downloaded")
return None return
return latest_version # download the engine
logger.info(f"Downloading latest version of {engine_class.name()} ({version_num})...")
cls.download_engine(engine=engine_class.name(), version=version_num, background=True)
logger.info(f"Checking for updates for render engines...")
threads = []
for engine in cls.supported_engines():
if engine.downloader():
thread = threading.Thread(target=engine_update_task, args=(engine,))
threads.append(thread)
thread.start()
@classmethod @classmethod
def create_worker(cls, engine_name, input_path, output_path, engine_version=None, args=None, parent=None, name=None): def create_worker(cls, renderer, input_path, output_path, engine_version=None, args=None, parent=None, name=None):
worker_class = cls.engine_with_name(engine_name).worker_class() worker_class = cls.engine_with_name(renderer).worker_class()
# check to make sure we have versions installed # check to make sure we have versions installed
all_versions = cls.all_versions_for_engine(engine_name) all_versions = cls.all_versions_for_engine(renderer)
if not all_versions: if not all_versions:
raise FileNotFoundError(f"Cannot find any installed '{engine_name}' engines") raise FileNotFoundError(f"Cannot find any installed {renderer} engines")
# Find the path to the requested engine version or use default # Find the path to the requested engine version or use default
engine_path = None engine_path = None
@@ -266,9 +246,9 @@ class EngineManager:
# Download the required engine if not found locally # Download the required engine if not found locally
if not engine_path: if not engine_path:
download_result = cls.download_engine(engine_name, engine_version) download_result = cls.download_engine(renderer, engine_version)
if not download_result: if not download_result:
raise FileNotFoundError(f"Cannot download requested version: {engine_name} {engine_version}") raise FileNotFoundError(f"Cannot download requested version: {renderer} {engine_version}")
engine_path = download_result['path'] engine_path = download_result['path']
logger.info("Engine downloaded. Creating worker.") logger.info("Engine downloaded. Creating worker.")
else: else:
@@ -286,52 +266,33 @@ class EngineManager:
_, extension = os.path.splitext(path) _, extension = os.path.splitext(path)
extension = extension.lower().strip('.') extension = extension.lower().strip('.')
for engine in cls.supported_engines(): for engine in cls.supported_engines():
if extension in engine().supported_extensions(): if extension in engine.supported_extensions():
return engine return engine
undefined_renderer_support = [x for x in cls.supported_engines() if not x().supported_extensions()] undefined_renderer_support = [x for x in cls.supported_engines() if not x.supported_extensions()]
return undefined_renderer_support[0] return undefined_renderer_support[0]
class EngineDownloadWorker(threading.Thread): class EngineDownloadWorker(threading.Thread):
"""A thread worker for downloading a specific version of a rendering engine.
This class handles the process of downloading a rendering engine in a separate thread,
ensuring that the download process does not block the main application.
Attributes:
engine (str): The name of the rendering engine to download.
version (str): The version of the rendering engine to download.
system_os (str, optional): The operating system for which to download the engine. Defaults to current OS type.
cpu (str, optional): Requested CPU architecture. Defaults to system CPU type.
"""
def __init__(self, engine, version, system_os=None, cpu=None): def __init__(self, engine, version, system_os=None, cpu=None):
super().__init__() super().__init__()
self.engine = engine self.engine = engine
self.version = version self.version = version
self.system_os = system_os self.system_os = system_os
self.cpu = cpu self.cpu = cpu
self.percent_complete = 0
def _update_progress(self, current_progress):
self.percent_complete = current_progress
def run(self): def run(self):
try: existing_download = EngineManager.is_version_downloaded(self.engine, self.version, self.system_os, self.cpu)
existing_download = EngineManager.is_version_downloaded(self.engine, self.version, self.system_os, self.cpu, if existing_download:
ignore_system=True) logger.info(f"Requested download of {self.engine} {self.version}, but local copy already exists")
if existing_download: return existing_download
logger.info(f"Requested download of {self.engine} {self.version}, but local copy already exists")
return existing_download
# Get the appropriate downloader class based on the engine type # Get the appropriate downloader class based on the engine type
downloader = EngineManager.engine_with_name(self.engine).downloader() EngineManager.engine_with_name(self.engine).downloader().download_engine(
downloader.download_engine( self.version, download_location=EngineManager.engines_path, self.version, download_location=EngineManager.engines_path, system_os=self.system_os, cpu=self.cpu,
system_os=self.system_os, cpu=self.cpu, timeout=300, progress_callback=self._update_progress) timeout=300)
except Exception as e:
logger.error(f"Error in download worker: {e}") # remove itself from the downloader list
finally: EngineManager.download_tasks.remove(self)
# remove itself from the downloader list
EngineManager.download_tasks.remove(self)
if __name__ == '__main__': if __name__ == '__main__':
+4 -5
View File
@@ -97,7 +97,7 @@ class FFMPEGDownloader(EngineDownloader):
'windows': cls.__get_windows_versions} 'windows': cls.__get_windows_versions}
if not versions_per_os.get(system_os): if not versions_per_os.get(system_os):
logger.error(f"Cannot find version list for {system_os}") logger.error(f"Cannot find version list for {system_os}")
return None return
results = [] results = []
all_versions = versions_per_os[system_os]() all_versions = versions_per_os[system_os]()
@@ -144,7 +144,7 @@ class FFMPEGDownloader(EngineDownloader):
return None return None
@classmethod @classmethod
def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120, progress_callback=None): def download_engine(cls, version, download_location, system_os=None, cpu=None, timeout=120):
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
@@ -152,7 +152,7 @@ class FFMPEGDownloader(EngineDownloader):
found_version = [item for item in cls.all_versions(system_os, cpu) if item['version'] == version] found_version = [item for item in cls.all_versions(system_os, cpu) if item['version'] == version]
if not found_version: if not found_version:
logger.error(f"Cannot find FFMPEG version {version} for {system_os} and {cpu}") logger.error(f"Cannot find FFMPEG version {version} for {system_os} and {cpu}")
return None return
# Platform specific naming cleanup # Platform specific naming cleanup
remote_url = cls.__get_remote_url_for_version(version=version, system_os=system_os, cpu=cpu) remote_url = cls.__get_remote_url_for_version(version=version, system_os=system_os, cpu=cpu)
@@ -162,8 +162,7 @@ class FFMPEGDownloader(EngineDownloader):
# Download and extract # Download and extract
try: try:
logger.info(f"Requesting download of ffmpeg-{version}-{system_os}-{cpu}") logger.info(f"Requesting download of ffmpeg-{version}-{system_os}-{cpu}")
cls.download_and_extract_app(remote_url=remote_url, download_location=download_location, timeout=timeout, cls.download_and_extract_app(remote_url=remote_url, download_location=download_location, timeout=timeout)
progress_callback=progress_callback)
# naming cleanup to match existing naming convention # naming cleanup to match existing naming convention
output_path = os.path.join(download_location, f'ffmpeg-{version}-{system_os}-{cpu}') output_path = os.path.join(download_location, f'ffmpeg-{version}-{system_os}-{cpu}')
+16 -20
View File
@@ -3,8 +3,6 @@ import re
from src.engines.core.base_engine import * from src.engines.core.base_engine import *
_creationflags = subprocess.CREATE_NO_WINDOW if platform.system() == 'Windows' else 0
class FFMPEG(BaseRenderEngine): class FFMPEG(BaseRenderEngine):
binary_names = {'linux': 'ffmpeg', 'windows': 'ffmpeg.exe', 'macos': 'ffmpeg'} binary_names = {'linux': 'ffmpeg', 'windows': 'ffmpeg.exe', 'macos': 'ffmpeg'}
@@ -23,9 +21,10 @@ class FFMPEG(BaseRenderEngine):
from src.engines.ffmpeg.ffmpeg_ui import FFMPEGUI from src.engines.ffmpeg.ffmpeg_ui import FFMPEGUI
return FFMPEGUI.get_options(self) return FFMPEGUI.get_options(self)
def supported_extensions(self): @classmethod
help_text = (subprocess.check_output([self.engine_path(), '-h', 'full'], stderr=subprocess.STDOUT, def supported_extensions(cls):
creationflags=_creationflags).decode('utf-8')) help_text = (subprocess.check_output([cls().renderer_path(), '-h', 'full'], stderr=subprocess.STDOUT)
.decode('utf-8'))
found = re.findall(r'extensions that .* is allowed to access \(default "(.*)"', help_text) found = re.findall(r'extensions that .* is allowed to access \(default "(.*)"', help_text)
found_extensions = set() found_extensions = set()
for match in found: for match in found:
@@ -35,8 +34,8 @@ class FFMPEG(BaseRenderEngine):
def version(self): def version(self):
version = None version = None
try: try:
ver_out = subprocess.check_output([self.engine_path(), '-version'], timeout=SUBPROCESS_TIMEOUT, ver_out = subprocess.check_output([self.renderer_path(), '-version'],
creationflags=_creationflags).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
match = re.match(r".*version\s*([\w.*]+)\W*", ver_out) match = re.match(r".*version\s*([\w.*]+)\W*", ver_out)
if match: if match:
version = match.groups()[0] version = match.groups()[0]
@@ -51,8 +50,7 @@ class FFMPEG(BaseRenderEngine):
'ffprobe', '-v', 'quiet', '-print_format', 'json', 'ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_streams', '-select_streams', 'v', project_path '-show_streams', '-select_streams', 'v', project_path
] ]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True, output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True)
creationflags=_creationflags)
video_info = json.loads(output) video_info = json.loads(output)
# Extract the necessary information # Extract the necessary information
@@ -82,8 +80,8 @@ class FFMPEG(BaseRenderEngine):
return None return None
def get_encoders(self): def get_encoders(self):
raw_stdout = subprocess.check_output([self.engine_path(), '-encoders'], stderr=subprocess.DEVNULL, raw_stdout = subprocess.check_output([self.renderer_path(), '-encoders'], stderr=subprocess.DEVNULL,
timeout=SUBPROCESS_TIMEOUT, creationflags=_creationflags).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
pattern = r'(?P<type>[VASFXBD.]{6})\s+(?P<name>\S{2,})\s+(?P<description>.*)' pattern = r'(?P<type>[VASFXBD.]{6})\s+(?P<name>\S{2,})\s+(?P<description>.*)'
encoders = [m.groupdict() for m in re.finditer(pattern, raw_stdout)] encoders = [m.groupdict() for m in re.finditer(pattern, raw_stdout)]
return encoders return encoders
@@ -94,9 +92,8 @@ class FFMPEG(BaseRenderEngine):
def get_all_formats(self): def get_all_formats(self):
try: try:
formats_raw = subprocess.check_output([self.engine_path(), '-formats'], stderr=subprocess.DEVNULL, formats_raw = subprocess.check_output([self.renderer_path(), '-formats'], stderr=subprocess.DEVNULL,
timeout=SUBPROCESS_TIMEOUT, timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
creationflags=_creationflags).decode('utf-8')
pattern = r'(?P<type>[DE]{1,2})\s+(?P<id>\S{2,})\s+(?P<name>.*)' pattern = r'(?P<type>[DE]{1,2})\s+(?P<id>\S{2,})\s+(?P<name>.*)'
all_formats = [m.groupdict() for m in re.finditer(pattern, formats_raw)] all_formats = [m.groupdict() for m in re.finditer(pattern, formats_raw)]
return all_formats return all_formats
@@ -108,8 +105,7 @@ class FFMPEG(BaseRenderEngine):
# Extract the common extension using regex # Extract the common extension using regex
muxer_flag = 'muxer' if 'E' in ffmpeg_format['type'] else 'demuxer' muxer_flag = 'muxer' if 'E' in ffmpeg_format['type'] else 'demuxer'
format_detail_raw = subprocess.check_output( format_detail_raw = subprocess.check_output(
[self.engine_path(), '-hide_banner', '-h', f"{muxer_flag}={ffmpeg_format['id']}"], [self.renderer_path(), '-hide_banner', '-h', f"{muxer_flag}={ffmpeg_format['id']}"]).decode('utf-8')
creationflags=_creationflags).decode('utf-8')
pattern = r"Common extensions: (\w+)" pattern = r"Common extensions: (\w+)"
common_extensions = re.findall(pattern, format_detail_raw) common_extensions = re.findall(pattern, format_detail_raw)
found_extensions = [] found_extensions = []
@@ -121,9 +117,9 @@ class FFMPEG(BaseRenderEngine):
return [x['id'] for x in self.get_all_formats() if 'E' in x['type'].upper()] return [x['id'] for x in self.get_all_formats() if 'E' in x['type'].upper()]
def get_frame_count(self, path_to_file): def get_frame_count(self, path_to_file):
raw_stdout = subprocess.check_output([self.engine_path(), '-i', path_to_file, '-map', '0:v:0', '-c', 'copy', raw_stdout = subprocess.check_output([self.renderer_path(), '-i', path_to_file, '-map', '0:v:0', '-c', 'copy',
'-f', 'null', '-'], stderr=subprocess.STDOUT, '-f', 'null', '-'], stderr=subprocess.STDOUT,
timeout=SUBPROCESS_TIMEOUT, creationflags=_creationflags).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
match = re.findall(r'frame=\s*(\d+)', raw_stdout) match = re.findall(r'frame=\s*(\d+)', raw_stdout)
if match: if match:
frame_number = int(match[-1]) frame_number = int(match[-1])
@@ -131,8 +127,8 @@ class FFMPEG(BaseRenderEngine):
return -1 return -1
def get_arguments(self): def get_arguments(self):
help_text = (subprocess.check_output([self.engine_path(), '-h', 'long'], stderr=subprocess.STDOUT, help_text = (subprocess.check_output([self.renderer_path(), '-h', 'long'], stderr=subprocess.STDOUT)
creationflags=_creationflags).decode('utf-8')) .decode('utf-8'))
lines = help_text.splitlines() lines = help_text.splitlines()
options = {} options = {}
+1 -1
View File
@@ -1,5 +1,5 @@
class FFMPEGUI: class FFMPEGUI:
@staticmethod @staticmethod
def get_options(system_info): def get_options(instance):
options = [] options = []
return options return options
+1 -1
View File
@@ -16,7 +16,7 @@ class FFMPEGRenderWorker(BaseRenderWorker):
def generate_worker_subprocess(self): def generate_worker_subprocess(self):
cmd = [self.engine_path, '-y', '-stats', '-i', self.input_path] cmd = [self.renderer_path, '-y', '-stats', '-i', self.input_path]
# Resize frame # Resize frame
if self.args.get('x_resolution', None) and self.args.get('y_resolution', None): if self.args.get('x_resolution', None) and self.args.get('y_resolution', None):
+158
View File
@@ -0,0 +1,158 @@
''' app/init.py '''
import logging
import multiprocessing
import os
import socket
import sys
import threading
import time
from collections import deque
from src.api.api_server import start_server
from src.api.preview_manager import PreviewManager
from src.api.serverproxy_manager import ServerProxyManager
from src.distributed_job_manager import DistributedJobManager
from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue
from src.utilities.config import Config
from src.utilities.misc_helper import system_safe_path, current_system_cpu, current_system_os, current_system_os_version
from src.utilities.zeroconf_server import ZeroconfServer
logger = logging.getLogger()
def run(server_only=False) -> int:
"""
Initializes the application and runs it.
Returns:
int: The exit status code.
"""
# setup logging
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
level=Config.server_log_level.upper())
logging.getLogger("requests").setLevel(logging.WARNING) # suppress noisy requests/urllib3 logging
logging.getLogger("urllib3").setLevel(logging.WARNING)
# Setup logging for console ui
buffer_handler = __setup_buffer_handler() if not server_only else None
logger.info(f"Starting Zordon Render Server")
return_code = 0
try:
# Load Config YAML
Config.setup_config_dir()
Config.load_config(system_safe_path(os.path.join(Config.config_dir(), 'config.yaml')))
# configure default paths
EngineManager.engines_path = system_safe_path(
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
'engines')))
os.makedirs(EngineManager.engines_path, exist_ok=True)
PreviewManager.storage_path = system_safe_path(
os.path.join(os.path.expanduser(Config.upload_folder), 'previews'))
# Debug info
logger.debug(f"Upload directory: {os.path.expanduser(Config.upload_folder)}")
logger.debug(f"Thumbs directory: {PreviewManager.storage_path}")
logger.debug(f"Engines directory: {EngineManager.engines_path}")
# Set up the RenderQueue object
RenderQueue.load_state(database_directory=system_safe_path(os.path.expanduser(Config.upload_folder)))
ServerProxyManager.subscribe_to_listener()
DistributedJobManager.subscribe_to_listener()
# check for updates for render engines if configured or on first launch
if Config.update_engines_on_launch or not EngineManager.get_engines():
EngineManager.update_all_engines()
# get hostname
local_hostname = socket.gethostname()
local_hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
# configure and start API server
api_server = threading.Thread(target=start_server, args=(local_hostname,))
api_server.daemon = True
api_server.start()
# start zeroconf server
ZeroconfServer.configure("_zordon._tcp.local.", local_hostname, Config.port_number)
ZeroconfServer.properties = {'system_cpu': current_system_cpu(),
'system_cpu_cores': multiprocessing.cpu_count(),
'system_os': current_system_os(),
'system_os_version': current_system_os_version()}
ZeroconfServer.start()
logger.info(f"Zordon Render Server started - Hostname: {local_hostname}")
RenderQueue.evaluation_inverval = Config.queue_eval_seconds
RenderQueue.start()
# start in gui or server only (cli) mode
logger.debug(f"Launching in {'server only' if server_only else 'GUI'} mode")
if server_only: # CLI only
api_server.join()
else: # GUI
return_code = __show_gui(buffer_handler)
except KeyboardInterrupt:
pass
except Exception as e:
logging.error(f"Unhandled exception: {e}")
return_code = 1
finally:
# shut down gracefully
logger.info(f"Zordon Render Server is preparing to shut down")
try:
RenderQueue.prepare_for_shutdown()
except Exception as e:
logger.exception(f"Exception during prepare for shutdown: {e}")
ZeroconfServer.stop()
logger.info(f"Zordon Render Server has shut down")
return sys.exit(return_code)
def __setup_buffer_handler():
# lazy load GUI frameworks
from PyQt6.QtCore import QObject, pyqtSignal
class BufferingHandler(logging.Handler, QObject):
new_record = pyqtSignal(str)
def __init__(self, capacity=100):
logging.Handler.__init__(self)
QObject.__init__(self)
self.buffer = deque(maxlen=capacity) # Define a buffer with a fixed capacity
def emit(self, record):
try:
msg = self.format(record)
self.buffer.append(msg) # Add message to the buffer
self.new_record.emit(msg) # Emit signal
except RuntimeError:
pass
def get_buffer(self):
return list(self.buffer) # Return a copy of the buffer
buffer_handler = BufferingHandler()
buffer_handler.setFormatter(logging.getLogger().handlers[0].formatter)
logger = logging.getLogger()
logger.addHandler(buffer_handler)
return buffer_handler
def __show_gui(buffer_handler):
# lazy load GUI frameworks
from PyQt6.QtWidgets import QApplication
# load application
app: QApplication = QApplication(sys.argv)
# configure main window
from src.ui.main_window import MainWindow
window: MainWindow = MainWindow()
window.buffer_handler = buffer_handler
window.show()
return app.exec()
+48 -63
View File
@@ -29,37 +29,19 @@ class RenderQueue:
maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4} maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
last_saved_counts = {} last_saved_counts = {}
is_running = False is_running = False
__eval_thread = None
evaluation_inverval = 1
# -------------------------------------------- # --------------------------------------------
# Render Queue Evaluation: # Start / Stop Background Updates
# -------------------------------------------- # --------------------------------------------
@classmethod @classmethod
def start(cls): def start(cls):
"""Start evaluating the render queue"""
logger.debug("Starting render queue updates") logger.debug("Starting render queue updates")
cls.is_running = True cls.is_running = True
cls.evaluate_queue() cls.evaluate_queue()
@classmethod
def evaluate_queue(cls):
try:
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
for job in not_started:
if cls.is_available_for_job(job.engine_name, job.priority):
cls.start_job(job)
scheduled = cls.jobs_with_status(RenderStatus.SCHEDULED, priority_sorted=True)
for job in scheduled:
if job.scheduled_start <= datetime.now():
logger.debug(f"Starting scheduled job: {job}")
cls.start_job(job)
if cls.last_saved_counts != cls.job_counts():
cls.save_state()
except DetachedInstanceError:
pass
@classmethod @classmethod
def __local_job_status_changed(cls, job_id, old_status, new_status): def __local_job_status_changed(cls, job_id, old_status, new_status):
render_job = RenderQueue.job_with_id(job_id, none_ok=True) render_job = RenderQueue.job_with_id(job_id, none_ok=True)
@@ -73,9 +55,20 @@ class RenderQueue:
cls.is_running = False cls.is_running = False
# -------------------------------------------- # --------------------------------------------
# Fetch Jobs: # Queue Management
# -------------------------------------------- # --------------------------------------------
@classmethod
def add_to_render_queue(cls, render_job, force_start=False):
logger.info(f"Adding job to render queue: {render_job}")
cls.job_queue.append(render_job)
if cls.is_running and force_start and render_job.status in (RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED):
cls.start_job(render_job)
cls.session.add(render_job)
cls.save_state()
if cls.is_running:
cls.evaluate_queue()
@classmethod @classmethod
def all_jobs(cls): def all_jobs(cls):
return cls.job_queue return cls.job_queue
@@ -105,15 +98,12 @@ class RenderQueue:
return found_job return found_job
@classmethod @classmethod
def job_counts(cls): def clear_history(cls):
job_counts = {} to_remove = [x for x in cls.all_jobs() if x.status in [RenderStatus.CANCELLED,
for job_status in RenderStatus: RenderStatus.COMPLETED, RenderStatus.ERROR]]
job_counts[job_status.value] = len(cls.jobs_with_status(job_status)) for job_to_remove in to_remove:
return job_counts cls.delete_job(job_to_remove)
cls.save_state()
# --------------------------------------------
# Startup / Shutdown:
# --------------------------------------------
@classmethod @classmethod
def load_state(cls, database_directory): def load_state(cls, database_directory):
@@ -138,16 +128,6 @@ class RenderQueue:
cls.save_state() cls.save_state()
cls.session.close() cls.session.close()
# --------------------------------------------
# Renderer Availability:
# --------------------------------------------
@classmethod
def renderer_instances(cls):
from collections import Counter
all_instances = [x.engine_name for x in cls.running_jobs()]
return Counter(all_instances)
@classmethod @classmethod
def is_available_for_job(cls, renderer, priority=2): def is_available_for_job(cls, renderer, priority=2):
@@ -157,20 +137,24 @@ class RenderQueue:
maxed_out_instances = renderer in instances.keys() and instances[renderer] >= max_allowed_instances maxed_out_instances = renderer in instances.keys() and instances[renderer] >= max_allowed_instances
return not maxed_out_instances and not higher_priority_jobs return not maxed_out_instances and not higher_priority_jobs
# --------------------------------------------
# Job Lifecycle Management:
# --------------------------------------------
@classmethod @classmethod
def add_to_render_queue(cls, render_job, force_start=False): def evaluate_queue(cls):
logger.info(f"Adding job to render queue: {render_job}") try:
cls.job_queue.append(render_job) not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
if cls.is_running and force_start and render_job.status in (RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED): for job in not_started:
cls.start_job(render_job) if cls.is_available_for_job(job.renderer, job.priority):
cls.session.add(render_job) cls.start_job(job)
cls.save_state()
if cls.is_running: scheduled = cls.jobs_with_status(RenderStatus.SCHEDULED, priority_sorted=True)
cls.evaluate_queue() for job in scheduled:
if job.scheduled_start <= datetime.now():
logger.debug(f"Starting scheduled job: {job}")
cls.start_job(job)
if cls.last_saved_counts != cls.job_counts():
cls.save_state()
except DetachedInstanceError:
pass
@classmethod @classmethod
def start_job(cls, job): def start_job(cls, job):
@@ -193,14 +177,15 @@ class RenderQueue:
cls.save_state() cls.save_state()
return True return True
# -------------------------------------------- @classmethod
# Miscellaneous: def renderer_instances(cls):
# -------------------------------------------- from collections import Counter
all_instances = [x.renderer for x in cls.running_jobs()]
return Counter(all_instances)
@classmethod @classmethod
def clear_history(cls): def job_counts(cls):
to_remove = [x for x in cls.all_jobs() if x.status in [RenderStatus.CANCELLED, job_counts = {}
RenderStatus.COMPLETED, RenderStatus.ERROR]] for job_status in RenderStatus:
for job_to_remove in to_remove: job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
cls.delete_job(job_to_remove) return job_counts
cls.save_state()
-74
View File
@@ -1,74 +0,0 @@
import os
import sys
from PyQt6.QtCore import Qt
from PyQt6.QtGui import QPixmap
from PyQt6.QtWidgets import QDialog, QVBoxLayout, QLabel, QDialogButtonBox, QHBoxLayout
from src.version import *
class AboutDialog(QDialog):
def __init__(self):
super().__init__()
self.setWindowTitle(f"About {APP_NAME}")
# Create the layout
layout = QVBoxLayout()
# App Icon
icon_name = 'Server.png' # todo: temp icon - replace with final later
icon_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
'resources', icon_name)
icon_label = QLabel(self)
icon_pixmap = QPixmap(icon_path)
icon_label.setPixmap(icon_pixmap)
icon_layout = QHBoxLayout()
icon_layout.addStretch()
icon_layout.addWidget(icon_label)
icon_layout.addStretch()
layout.addLayout(icon_layout)
# Application name
name_label = QLabel(f"<h2>{APP_NAME}</h2>")
layout.addWidget(name_label)
# Description
description_label = QLabel(APP_DESCRIPTION)
layout.addWidget(description_label)
# Version
version_label = QLabel(f"<strong>Version:</strong> {APP_VERSION}")
layout.addWidget(version_label)
# Contributors
contributors_label = QLabel(f"Copyright © {APP_COPYRIGHT_YEAR} {APP_AUTHOR}")
layout.addWidget(contributors_label)
# License
license_label = QLabel(f"Released under {APP_LICENSE}")
layout.addWidget(license_label)
# Add an "OK" button to close the dialog
button_box = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok)
button_box.accepted.connect(self.accept)
layout.addWidget(button_box)
# Set the layout for the dialog
self.setLayout(layout)
# Make the dialog non-resizable
self.setWindowFlags(self.windowFlags() & ~Qt.WindowType.WindowContextHelpButtonHint)
self.setFixedSize(self.sizeHint())
if __name__ == '__main__':
# lazy load GUI frameworks
from PyQt6.QtWidgets import QApplication
# load application
app: QApplication = QApplication(sys.argv)
window: AboutDialog = AboutDialog()
window.show()
app.exec()
+112 -122
View File
@@ -14,7 +14,7 @@ from requests import Response
from src.api.server_proxy import RenderServerProxy from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager from src.engines.engine_manager import EngineManager
from src.ui.engine_help_window import EngineHelpViewer from src.ui.engine_help_viewer import EngineHelpViewer
from src.utilities.zeroconf_server import ZeroconfServer from src.utilities.zeroconf_server import ZeroconfServer
@@ -24,7 +24,7 @@ class NewRenderJobForm(QWidget):
self.notes_group = None self.notes_group = None
self.frame_rate_input = None self.frame_rate_input = None
self.resolution_x_input = None self.resolution_x_input = None
self.engine_group = None self.renderer_group = None
self.output_settings_group = None self.output_settings_group = None
self.resolution_y_input = None self.resolution_y_input = None
self.project_path = project_path self.project_path = project_path
@@ -34,17 +34,17 @@ class NewRenderJobForm(QWidget):
self.load_file_group = None self.load_file_group = None
self.current_engine_options = None self.current_engine_options = None
self.file_format_combo = None self.file_format_combo = None
self.engine_options_layout = None self.renderer_options_layout = None
self.cameras_list = None self.cameras_list = None
self.cameras_group = None self.cameras_group = None
self.engine_version_combo = None self.renderer_version_combo = None
self.worker_thread = None self.worker_thread = None
self.msg_box = None self.msg_box = None
self.engine_help_viewer = None self.engine_help_viewer = None
self.raw_args = None self.raw_args = None
self.submit_progress_label = None self.submit_progress_label = None
self.submit_progress = None self.submit_progress = None
self.engine_type = None self.renderer_type = None
self.process_label = None self.process_label = None
self.process_progress_bar = None self.process_progress_bar = None
self.splitjobs_same_os = None self.splitjobs_same_os = None
@@ -62,18 +62,17 @@ class NewRenderJobForm(QWidget):
# Job / Server Data # Job / Server Data
self.server_proxy = RenderServerProxy(socket.gethostname()) self.server_proxy = RenderServerProxy(socket.gethostname())
self.engine_info = None self.renderer_info = None
self.project_info = None self.project_info = None
# Setup # Setup
self.setWindowTitle("New Job") self.setWindowTitle("New Job")
self.setup_ui() self.setup_ui()
self.update_engine_info()
self.setup_project() self.setup_project()
# get renderer info in bg thread # get renderer info in bg thread
# t = threading.Thread(target=self.update_renderer_info) t = threading.Thread(target=self.update_renderer_info)
# t.start() t.start()
self.show() self.show()
@@ -182,33 +181,33 @@ class NewRenderJobForm(QWidget):
# add group to layout # add group to layout
main_layout.addWidget(self.output_settings_group) main_layout.addWidget(self.output_settings_group)
# Engine Group # Renderer Group
self.engine_group = QGroupBox("Engine Settings") self.renderer_group = QGroupBox("Renderer Settings")
engine_group_layout = QVBoxLayout(self.engine_group) renderer_group_layout = QVBoxLayout(self.renderer_group)
engine_layout = QHBoxLayout() renderer_layout = QHBoxLayout()
engine_layout.addWidget(QLabel("Engine:")) renderer_layout.addWidget(QLabel("Renderer:"))
self.engine_type = QComboBox() self.renderer_type = QComboBox()
self.engine_type.currentIndexChanged.connect(self.engine_changed) self.renderer_type.currentIndexChanged.connect(self.renderer_changed)
engine_layout.addWidget(self.engine_type) renderer_layout.addWidget(self.renderer_type)
# Version # Version
engine_layout.addWidget(QLabel("Version:")) renderer_layout.addWidget(QLabel("Version:"))
self.engine_version_combo = QComboBox() self.renderer_version_combo = QComboBox()
self.engine_version_combo.addItem('latest') self.renderer_version_combo.addItem('latest')
engine_layout.addWidget(self.engine_version_combo) renderer_layout.addWidget(self.renderer_version_combo)
engine_group_layout.addLayout(engine_layout) renderer_group_layout.addLayout(renderer_layout)
# dynamic options # dynamic options
self.engine_options_layout = QVBoxLayout() self.renderer_options_layout = QVBoxLayout()
engine_group_layout.addLayout(self.engine_options_layout) renderer_group_layout.addLayout(self.renderer_options_layout)
# Raw Args # Raw Args
raw_args_layout = QHBoxLayout(self.engine_group) raw_args_layout = QHBoxLayout(self.renderer_group)
raw_args_layout.addWidget(QLabel("Raw Args:")) raw_args_layout.addWidget(QLabel("Raw Args:"))
self.raw_args = QLineEdit() self.raw_args = QLineEdit()
raw_args_layout.addWidget(self.raw_args) raw_args_layout.addWidget(self.raw_args)
args_help_button = QPushButton("?") args_help_button = QPushButton("?")
args_help_button.clicked.connect(self.args_help_button_clicked) args_help_button.clicked.connect(self.args_help_button_clicked)
raw_args_layout.addWidget(args_help_button) raw_args_layout.addWidget(args_help_button)
engine_group_layout.addLayout(raw_args_layout) renderer_group_layout.addLayout(raw_args_layout)
main_layout.addWidget(self.engine_group) main_layout.addWidget(self.renderer_group)
# Cameras Group # Cameras Group
self.cameras_group = QGroupBox("Cameras") self.cameras_group = QGroupBox("Cameras")
@@ -240,28 +239,28 @@ class NewRenderJobForm(QWidget):
self.submit_progress_label.setHidden(True) self.submit_progress_label.setHidden(True)
main_layout.addWidget(self.submit_progress_label) main_layout.addWidget(self.submit_progress_label)
self.toggle_engine_enablement(False) self.toggle_renderer_enablement(False)
def update_engine_info(self): def update_renderer_info(self):
# get the engine info and add them all to the ui # get the renderer info and add them all to the ui
self.engine_info = self.server_proxy.get_engine_info(response_type='full') self.renderer_info = self.server_proxy.get_renderer_info(response_type='full')
self.engine_type.addItems(self.engine_info.keys()) self.renderer_type.addItems(self.renderer_info.keys())
# select the best engine for the file type # select the best renderer for the file type
engine = EngineManager.engine_for_project_path(self.project_path) engine = EngineManager.engine_for_project_path(self.project_path)
self.engine_type.setCurrentText(engine.name().lower()) self.renderer_type.setCurrentText(engine.name().lower())
# refresh ui # refresh ui
self.engine_changed() self.renderer_changed()
def engine_changed(self): def renderer_changed(self):
# load the version numbers # load the version numbers
current_engine = self.engine_type.currentText().lower() or self.engine_type.itemText(0) current_renderer = self.renderer_type.currentText().lower() or self.renderer_type.itemText(0)
self.engine_version_combo.clear() self.renderer_version_combo.clear()
self.engine_version_combo.addItem('latest') self.renderer_version_combo.addItem('latest')
self.file_format_combo.clear() self.file_format_combo.clear()
if current_engine: if current_renderer:
engine_vers = [version_info['version'] for version_info in self.engine_info[current_engine]['versions']] renderer_vers = [version_info['version'] for version_info in self.renderer_info[current_renderer]['versions']]
self.engine_version_combo.addItems(engine_vers) self.renderer_version_combo.addItems(renderer_vers)
self.file_format_combo.addItems(self.engine_info[current_engine]['supported_export_formats']) self.file_format_combo.addItems(self.renderer_info[current_renderer]['supported_export_formats'])
def update_server_list(self): def update_server_list(self):
clients = ZeroconfServer.found_hostnames() clients = ZeroconfServer.found_hostnames()
@@ -278,7 +277,7 @@ class NewRenderJobForm(QWidget):
# UI stuff on main thread # UI stuff on main thread
self.process_progress_bar.setHidden(False) self.process_progress_bar.setHidden(False)
self.process_label.setHidden(False) self.process_label.setHidden(False)
self.toggle_engine_enablement(False) self.toggle_renderer_enablement(False)
output_name, _ = os.path.splitext(os.path.basename(self.scene_file_input.text())) output_name, _ = os.path.splitext(os.path.basename(self.scene_file_input.text()))
output_name = output_name.replace(' ', '_') output_name = output_name.replace(' ', '_')
@@ -296,8 +295,8 @@ class NewRenderJobForm(QWidget):
self.render_name_input.setText(directory) self.render_name_input.setText(directory)
def args_help_button_clicked(self): def args_help_button_clicked(self):
url = (f'http://{self.server_proxy.hostname}:{self.server_proxy.port}/api/engine/' url = (f'http://{self.server_proxy.hostname}:{self.server_proxy.port}/api/renderer/'
f'{self.engine_type.currentText()}/help') f'{self.renderer_type.currentText()}/help')
self.engine_help_viewer = EngineHelpViewer(url) self.engine_help_viewer = EngineHelpViewer(url)
self.engine_help_viewer.show() self.engine_help_viewer.show()
@@ -306,20 +305,20 @@ class NewRenderJobForm(QWidget):
def post_get_project_info_update(self): def post_get_project_info_update(self):
"""Called by the GetProjectInfoWorker - Do not call directly.""" """Called by the GetProjectInfoWorker - Do not call directly."""
try: try:
# Set the best engine we can find # Set the best renderer we can find
input_path = self.scene_file_input.text() input_path = self.scene_file_input.text()
engine = EngineManager.engine_for_project_path(input_path) engine = EngineManager.engine_for_project_path(input_path)
engine_index = self.engine_type.findText(engine.name().lower()) engine_index = self.renderer_type.findText(engine.name().lower())
if engine_index >= 0: if engine_index >= 0:
self.engine_type.setCurrentIndex(engine_index) self.renderer_type.setCurrentIndex(engine_index)
else: else:
self.engine_type.setCurrentIndex(0) #todo: find out why we don't have engine info yet self.renderer_type.setCurrentIndex(0) #todo: find out why we don't have renderer info yet
# not ideal but if we don't have the engine info we have to pick something # not ideal but if we don't have the renderer info we have to pick something
# cleanup progress UI # cleanup progress UI
self.load_file_group.setHidden(True) self.load_file_group.setHidden(True)
self.toggle_engine_enablement(True) self.toggle_renderer_enablement(True)
# Load scene data # Load scene data
self.start_frame_input.setValue(self.project_info.get('frame_start')) self.start_frame_input.setValue(self.project_info.get('frame_start'))
@@ -347,10 +346,9 @@ class NewRenderJobForm(QWidget):
self.cameras_group.setHidden(True) self.cameras_group.setHidden(True)
# Dynamic Engine Options # Dynamic Engine Options
clear_layout(self.engine_options_layout) # clear old options clear_layout(self.renderer_options_layout) # clear old options
# dynamically populate option list # dynamically populate option list
system_info = self.engine_info.get(engine.name(), {}).get('system_info', {}) self.current_engine_options = engine().ui_options()
self.current_engine_options = engine.ui_options(system_info=system_info)
for option in self.current_engine_options: for option in self.current_engine_options:
h_layout = QHBoxLayout() h_layout = QHBoxLayout()
label = QLabel(option['name'].replace('_', ' ').capitalize() + ':') label = QLabel(option['name'].replace('_', ' ').capitalize() + ':')
@@ -363,21 +361,21 @@ class NewRenderJobForm(QWidget):
else: else:
text_box = QLineEdit() text_box = QLineEdit()
h_layout.addWidget(text_box) h_layout.addWidget(text_box)
self.engine_options_layout.addLayout(h_layout) self.renderer_options_layout.addLayout(h_layout)
except AttributeError: except AttributeError:
pass pass
def toggle_engine_enablement(self, enabled=False): def toggle_renderer_enablement(self, enabled=False):
"""Toggle on/off all the render settings""" """Toggle on/off all the render settings"""
self.project_group.setHidden(not enabled) self.project_group.setHidden(not enabled)
self.output_settings_group.setHidden(not enabled) self.output_settings_group.setHidden(not enabled)
self.engine_group.setHidden(not enabled) self.renderer_group.setHidden(not enabled)
self.notes_group.setHidden(not enabled) self.notes_group.setHidden(not enabled)
if not enabled: if not enabled:
self.cameras_group.setHidden(True) self.cameras_group.setHidden(True)
self.submit_button.setEnabled(enabled) self.submit_button.setEnabled(enabled)
def after_job_submission(self, error_string): def after_job_submission(self, result):
# UI cleanup # UI cleanup
self.submit_progress.setMaximum(0) self.submit_progress.setMaximum(0)
@@ -386,10 +384,10 @@ class NewRenderJobForm(QWidget):
self.submit_progress_label.setHidden(True) self.submit_progress_label.setHidden(True)
self.process_progress_bar.setHidden(True) self.process_progress_bar.setHidden(True)
self.process_label.setHidden(True) self.process_label.setHidden(True)
self.toggle_engine_enablement(True) self.toggle_renderer_enablement(True)
self.msg_box = QMessageBox() self.msg_box = QMessageBox()
if not error_string: if result.ok:
self.msg_box.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) self.msg_box.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
self.msg_box.setIcon(QMessageBox.Icon.Information) self.msg_box.setIcon(QMessageBox.Icon.Information)
self.msg_box.setText("Job successfully submitted to server. Submit another?") self.msg_box.setText("Job successfully submitted to server. Submit another?")
@@ -400,7 +398,7 @@ class NewRenderJobForm(QWidget):
else: else:
self.msg_box.setStandardButtons(QMessageBox.StandardButton.Ok) self.msg_box.setStandardButtons(QMessageBox.StandardButton.Ok)
self.msg_box.setIcon(QMessageBox.Icon.Critical) self.msg_box.setIcon(QMessageBox.Icon.Critical)
self.msg_box.setText(error_string) self.msg_box.setText(result.text or "Unknown error")
self.msg_box.setWindowTitle("Error") self.msg_box.setWindowTitle("Error")
self.msg_box.exec() self.msg_box.exec()
@@ -431,7 +429,7 @@ class NewRenderJobForm(QWidget):
class SubmitWorker(QThread): class SubmitWorker(QThread):
"""Worker class called to submit all the jobs to the server and update the UI accordingly""" """Worker class called to submit all the jobs to the server and update the UI accordingly"""
message_signal = pyqtSignal(str) message_signal = pyqtSignal(Response)
update_ui_signal = pyqtSignal(str, str) update_ui_signal = pyqtSignal(str, str)
def __init__(self, window): def __init__(self, window):
@@ -447,69 +445,61 @@ class SubmitWorker(QThread):
self.update_ui_signal.emit(hostname, percent) self.update_ui_signal.emit(hostname, percent)
return callback return callback
try: hostname = self.window.server_input.currentText()
hostname = self.window.server_input.currentText() job_json = {'owner': psutil.Process().username() + '@' + socket.gethostname(),
job_json = {'owner': psutil.Process().username() + '@' + socket.gethostname(), 'renderer': self.window.renderer_type.currentText().lower(),
'engine_name': self.window.engine_type.currentText().lower(), 'engine_version': self.window.renderer_version_combo.currentText(),
'engine_version': self.window.engine_version_combo.currentText(), 'args': {'raw': self.window.raw_args.text(),
'args': {'raw': self.window.raw_args.text(), 'export_format': self.window.file_format_combo.currentText()},
'export_format': self.window.file_format_combo.currentText()}, 'output_path': self.window.render_name_input.text(),
'output_path': self.window.render_name_input.text(), 'start_frame': self.window.start_frame_input.value(),
'start_frame': self.window.start_frame_input.value(), 'end_frame': self.window.end_frame_input.value(),
'end_frame': self.window.end_frame_input.value(), 'priority': self.window.priority_input.currentIndex() + 1,
'priority': self.window.priority_input.currentIndex() + 1, 'notes': self.window.notes_input.toPlainText(),
'notes': self.window.notes_input.toPlainText(), 'enable_split_jobs': self.window.enable_splitjobs.isChecked(),
'enable_split_jobs': self.window.enable_splitjobs.isChecked(), 'split_jobs_same_os': self.window.splitjobs_same_os.isChecked(),
'split_jobs_same_os': self.window.splitjobs_same_os.isChecked(), 'name': self.window.render_name_input.text()}
'name': self.window.render_name_input.text()}
# get the dynamic args # get the dynamic args
for i in range(self.window.engine_options_layout.count()): for i in range(self.window.renderer_options_layout.count()):
item = self.window.engine_options_layout.itemAt(i) item = self.window.renderer_options_layout.itemAt(i)
layout = item.layout() # get the layout layout = item.layout() # get the layout
for x in range(layout.count()): for x in range(layout.count()):
z = layout.itemAt(x) z = layout.itemAt(x)
widget = z.widget() widget = z.widget()
if isinstance(widget, QComboBox): if isinstance(widget, QComboBox):
job_json['args'][self.window.current_engine_options[i]['name']] = widget.currentText() job_json['args'][self.window.current_engine_options[i]['name']] = widget.currentText()
elif isinstance(widget, QLineEdit): elif isinstance(widget, QLineEdit):
job_json['args'][self.window.current_engine_options[i]['name']] = widget.text() job_json['args'][self.window.current_engine_options[i]['name']] = widget.text()
# determine if any cameras are checked # determine if any cameras are checked
selected_cameras = [] selected_cameras = []
if self.window.cameras_list.count() and not self.window.cameras_group.isHidden(): if self.window.cameras_list.count() and not self.window.cameras_group.isHidden():
for index in range(self.window.cameras_list.count()): for index in range(self.window.cameras_list.count()):
item = self.window.cameras_list.item(index) item = self.window.cameras_list.item(index)
if item.checkState() == Qt.CheckState.Checked: if item.checkState() == Qt.CheckState.Checked:
selected_cameras.append(item.text().rsplit('-', 1)[0].strip()) # cleanup to just camera name selected_cameras.append(item.text().rsplit('-', 1)[0].strip()) # cleanup to just camera name
# process cameras into nested format # process cameras into nested format
input_path = self.window.scene_file_input.text() input_path = self.window.scene_file_input.text()
if selected_cameras and self.window.cameras_list.count() > 1: if selected_cameras:
children_jobs = [] job_list = []
for cam in selected_cameras: for cam in selected_cameras:
child_job_data = dict() job_copy = copy.deepcopy(job_json)
child_job_data['args'] = {} job_copy['args']['camera'] = cam
child_job_data['args']['camera'] = cam job_copy['name'] = job_copy['name'].replace(' ', '-') + "_" + cam.replace(' ', '')
child_job_data['name'] = job_json['name'].replace(' ', '-') + "_" + cam.replace(' ', '') job_copy['output_path'] = job_copy['name']
child_job_data['output_path'] = child_job_data['name'] job_list.append(job_copy)
children_jobs.append(child_job_data) else:
job_json['child_jobs'] = children_jobs job_list = [job_json]
# presubmission tasks # presubmission tasks
engine = EngineManager.engine_with_name(self.window.engine_type.currentText().lower()) engine = EngineManager.engine_with_name(self.window.renderer_type.currentText().lower())
input_path = engine().perform_presubmission_tasks(input_path) input_path = engine().perform_presubmission_tasks(input_path)
# submit # submit
err_msg = "" result = self.window.server_proxy.post_job_to_server(file_path=input_path, job_list=job_list,
result = self.window.server_proxy.post_job_to_server(file_path=input_path, job_data=job_json, callback=create_callback)
callback=create_callback) self.message_signal.emit(result)
if not (result and result.ok):
err_msg = f"Error posting job to server: {result.message}"
self.message_signal.emit(err_msg)
except Exception as e:
self.message_signal.emit(str(e))
class GetProjectInfoWorker(QThread): class GetProjectInfoWorker(QThread):
+6 -6
View File
@@ -93,7 +93,7 @@ class EngineBrowserWindow(QMainWindow):
def update_table(self): def update_table(self):
def update_table_worker(): def update_table_worker():
raw_server_data = RenderServerProxy(self.hostname).get_engine_info() raw_server_data = RenderServerProxy(self.hostname).get_renderer_info()
if not raw_server_data: if not raw_server_data:
return return
@@ -128,18 +128,18 @@ class EngineBrowserWindow(QMainWindow):
self.launch_button.setEnabled(is_localhost(self.hostname)) self.launch_button.setEnabled(is_localhost(self.hostname))
def update_download_status(self): def update_download_status(self):
running_tasks = EngineManager.active_downloads() running_tasks = [x for x in EngineManager.download_tasks if x.is_alive()]
hide_progress = not bool(running_tasks) hide_progress = not bool(running_tasks)
self.progress_bar.setHidden(hide_progress) self.progress_bar.setHidden(hide_progress)
self.progress_label.setHidden(hide_progress) self.progress_label.setHidden(hide_progress)
# Update the status labels # Update the status labels
if len(running_tasks) == 0: if len(EngineManager.download_tasks) == 0:
new_status = "" new_status = ""
elif len(running_tasks) == 1: elif len(EngineManager.download_tasks) == 1:
task = running_tasks[0] task = EngineManager.download_tasks[0]
new_status = f"Downloading {task.engine.capitalize()} {task.version}..." new_status = f"Downloading {task.engine.capitalize()} {task.version}..."
else: else:
new_status = f"Downloading {len(running_tasks)} engines..." new_status = f"Downloading {len(EngineManager.download_tasks)} engines..."
self.progress_label.setText(new_status) self.progress_label.setText(new_status)
def launch_button_click(self): def launch_button_click(self):
+28 -91
View File
@@ -1,10 +1,9 @@
''' app/ui/main_window.py ''' ''' app/ui/main_window.py '''
import ast
import datetime import datetime
import io import io
import json
import logging import logging
import os import os
import subprocess
import sys import sys
import threading import threading
import time import time
@@ -17,22 +16,20 @@ from PyQt6.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QListWidget, QTab
QTableWidgetItem, QLabel, QVBoxLayout, QHeaderView, QMessageBox, QGroupBox, QPushButton, QListWidgetItem, \ QTableWidgetItem, QLabel, QVBoxLayout, QHeaderView, QMessageBox, QGroupBox, QPushButton, QListWidgetItem, \
QFileDialog QFileDialog
from src.api.api_server import API_VERSION
from src.render_queue import RenderQueue from src.render_queue import RenderQueue
from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost
from src.utilities.status_utils import RenderStatus from src.utilities.status_utils import RenderStatus
from src.utilities.zeroconf_server import ZeroconfServer from src.utilities.zeroconf_server import ZeroconfServer
from src.ui.add_job_window import NewRenderJobForm from .add_job import NewRenderJobForm
from src.ui.console_window import ConsoleWindow from .console import ConsoleWindow
from src.ui.engine_browser import EngineBrowserWindow from .engine_browser import EngineBrowserWindow
from src.ui.log_window import LogViewer from .log_viewer import LogViewer
from src.ui.widgets.menubar import MenuBar from .widgets.menubar import MenuBar
from src.ui.widgets.proportional_image_label import ProportionalImageLabel from .widgets.proportional_image_label import ProportionalImageLabel
from src.ui.widgets.statusbar import StatusBar from .widgets.statusbar import StatusBar
from src.ui.widgets.toolbar import ToolBar from .widgets.toolbar import ToolBar
from src.api.serverproxy_manager import ServerProxyManager from src.api.serverproxy_manager import ServerProxyManager
from src.utilities.misc_helper import launch_url, iso_datestring_to_formatted_datestring from src.utilities.misc_helper import launch_url
from src.version import APP_NAME
logger = logging.getLogger() logger = logging.getLogger()
@@ -56,7 +53,6 @@ class MainWindow(QMainWindow):
self.server_info_ram = None self.server_info_ram = None
self.server_info_cpu = None self.server_info_cpu = None
self.server_info_os = None self.server_info_os = None
self.server_info_gpu = None
self.server_info_hostname = None self.server_info_hostname = None
self.engine_browser_window = None self.engine_browser_window = None
self.server_info_group = None self.server_info_group = None
@@ -67,7 +63,7 @@ class MainWindow(QMainWindow):
self.buffer_handler = None self.buffer_handler = None
# Window-Settings # Window-Settings
self.setWindowTitle(APP_NAME) self.setWindowTitle("Zordon")
self.setGeometry(100, 100, 900, 800) self.setGeometry(100, 100, 900, 800)
central_widget = QWidget(self) central_widget = QWidget(self)
self.setCentralWidget(central_widget) self.setCentralWidget(central_widget)
@@ -91,13 +87,12 @@ class MainWindow(QMainWindow):
self.setup_ui(main_layout) self.setup_ui(main_layout)
self.create_toolbars()
# Add Widgets to Window # Add Widgets to Window
# self.custom_menu_bar =
self.setMenuBar(MenuBar(self)) self.setMenuBar(MenuBar(self))
self.setStatusBar(StatusBar(self)) self.setStatusBar(StatusBar(self))
self.create_toolbars()
# start background update # start background update
self.bg_update_thread = QThread() self.bg_update_thread = QThread()
self.bg_update_thread.run = self.__background_update self.bg_update_thread.run = self.__background_update
@@ -126,7 +121,6 @@ class MainWindow(QMainWindow):
self.server_info_os = QLabel() self.server_info_os = QLabel()
self.server_info_cpu = QLabel() self.server_info_cpu = QLabel()
self.server_info_ram = QLabel() self.server_info_ram = QLabel()
self.server_info_gpu = QLabel()
server_info_engines_button = QPushButton("Render Engines") server_info_engines_button = QPushButton("Render Engines")
server_info_engines_button.clicked.connect(self.engine_browser) server_info_engines_button.clicked.connect(self.engine_browser)
server_info_layout = QVBoxLayout() server_info_layout = QVBoxLayout()
@@ -134,7 +128,6 @@ class MainWindow(QMainWindow):
server_info_layout.addWidget(self.server_info_os) server_info_layout.addWidget(self.server_info_os)
server_info_layout.addWidget(self.server_info_cpu) server_info_layout.addWidget(self.server_info_cpu)
server_info_layout.addWidget(self.server_info_ram) server_info_layout.addWidget(self.server_info_ram)
server_info_layout.addWidget(self.server_info_gpu)
server_info_layout.addWidget(server_info_engines_button) server_info_layout.addWidget(server_info_engines_button)
server_info_group.setLayout(server_info_layout) server_info_group.setLayout(server_info_layout)
@@ -244,42 +237,15 @@ class MainWindow(QMainWindow):
def update_server_info_display(self, hostname): def update_server_info_display(self, hostname):
"""Updates the server information section of the UI.""" """Updates the server information section of the UI."""
self.server_info_hostname.setText(f"Name: {hostname}") self.server_info_hostname.setText(hostname or "unknown")
server_info = ZeroconfServer.get_hostname_properties(hostname) server_info = ZeroconfServer.get_hostname_properties(hostname)
# Use the get method with defaults to avoid KeyError # Use the get method with defaults to avoid KeyError
os_info = f"OS: {server_info.get('system_os', 'Unknown')} {server_info.get('system_os_version', '')}" os_info = f"OS: {server_info.get('system_os', 'Unknown')} {server_info.get('system_os_version', '')}"
cleaned_cpu_name = server_info.get('system_cpu_brand', 'Unknown').replace(' CPU','').replace('(TM)','').replace('(R)', '') cpu_info = f"CPU: {server_info.get('system_cpu', 'Unknown')} - {server_info.get('system_cpu_cores', 'Unknown')} cores"
cpu_info = f"CPU: {cleaned_cpu_name} ({server_info.get('system_cpu_cores', 'Unknown')} cores)"
memory_info = f"RAM: {server_info.get('system_memory', 'Unknown')} GB"
# Get and format GPU info
try:
gpu_list = ast.literal_eval(server_info.get('gpu_info', []))
# Format all GPUs
gpu_info_parts = []
for gpu in gpu_list:
gpu_name = gpu.get('name', 'Unknown').replace('(TM)','').replace('(R)', '')
gpu_memory = gpu.get('memory', 'Unknown')
# Add " GB" suffix if memory is a number
if isinstance(gpu_memory, (int, float)) or (isinstance(gpu_memory, str) and gpu_memory.isdigit()):
gpu_memory_str = f"{gpu_memory} GB"
else:
gpu_memory_str = str(gpu_memory)
gpu_info_parts.append(f"{gpu_name} ({gpu_memory_str})")
gpu_info = f"GPU: {', '.join(gpu_info_parts)}" if gpu_info_parts else "GPU: Unknown"
except Exception as e:
logger.error(f"Error parsing GPU info: {e}")
gpu_info = "GPU: Unknown"
self.server_info_os.setText(os_info.strip()) self.server_info_os.setText(os_info.strip())
self.server_info_cpu.setText(cpu_info) self.server_info_cpu.setText(cpu_info)
self.server_info_ram.setText(memory_info)
self.server_info_gpu.setText(gpu_info)
def fetch_jobs(self, clear_table=False): def fetch_jobs(self, clear_table=False):
@@ -290,7 +256,7 @@ class MainWindow(QMainWindow):
self.job_list_view.clear() self.job_list_view.clear()
self.refresh_job_headers() self.refresh_job_headers()
job_fetch = self.current_server_proxy.get_all_jobs(ignore_token=False) job_fetch = self.current_server_proxy.get_all_jobs(ignore_token=clear_table)
if job_fetch: if job_fetch:
num_jobs = len(job_fetch) num_jobs = len(job_fetch)
self.job_list_view.setRowCount(num_jobs) self.job_list_view.setRowCount(num_jobs)
@@ -307,14 +273,13 @@ class MainWindow(QMainWindow):
get_time_elapsed(start_time, end_time) get_time_elapsed(start_time, end_time)
name = job.get('name') or os.path.basename(job.get('input_path', '')) name = job.get('name') or os.path.basename(job.get('input_path', ''))
engine_name = f"{job.get('renderer', '')}-{job.get('renderer_version')}" renderer = f"{job.get('renderer', '')}-{job.get('renderer_version')}"
priority = str(job.get('priority', '')) priority = str(job.get('priority', ''))
total_frames = str(job.get('total_frames', '')) total_frames = str(job.get('total_frames', ''))
date_created_string = iso_datestring_to_formatted_datestring(job['date_created'])
items = [QTableWidgetItem(job['id']), QTableWidgetItem(name), QTableWidgetItem(engine_name), items = [QTableWidgetItem(job['id']), QTableWidgetItem(name), QTableWidgetItem(renderer),
QTableWidgetItem(priority), QTableWidgetItem(display_status), QTableWidgetItem(time_elapsed), QTableWidgetItem(priority), QTableWidgetItem(display_status), QTableWidgetItem(time_elapsed),
QTableWidgetItem(total_frames), QTableWidgetItem(date_created_string)] QTableWidgetItem(total_frames), QTableWidgetItem(job['date_created'])]
for col, item in enumerate(items): for col, item in enumerate(items):
self.job_list_view.setItem(row, col, item) self.job_list_view.setItem(row, col, item)
@@ -360,7 +325,7 @@ class MainWindow(QMainWindow):
current_status = self.job_list_view.item(selected_row.row(), 4).text() current_status = self.job_list_view.item(selected_row.row(), 4).text()
# show / hide the stop button # show / hide the stop button
show_stop_button = "%" in current_status show_stop_button = current_status.lower() == 'running'
self.topbar.actions_call['Stop Job'].setEnabled(show_stop_button) self.topbar.actions_call['Stop Job'].setEnabled(show_stop_button)
self.topbar.actions_call['Stop Job'].setVisible(show_stop_button) self.topbar.actions_call['Stop Job'].setVisible(show_stop_button)
self.topbar.actions_call['Delete Job'].setEnabled(not show_stop_button) self.topbar.actions_call['Delete Job'].setEnabled(not show_stop_button)
@@ -396,7 +361,7 @@ class MainWindow(QMainWindow):
return [] return []
def refresh_job_headers(self): def refresh_job_headers(self):
self.job_list_view.setHorizontalHeaderLabels(["ID", "Name", "Engine", "Priority", "Status", self.job_list_view.setHorizontalHeaderLabels(["ID", "Name", "Renderer", "Priority", "Status",
"Time Elapsed", "Frames", "Date Created"]) "Time Elapsed", "Frames", "Date Created"])
self.job_list_view.setColumnHidden(0, True) self.job_list_view.setColumnHidden(0, True)
@@ -445,8 +410,6 @@ class MainWindow(QMainWindow):
def update_servers(self): def update_servers(self):
found_servers = list(set(ZeroconfServer.found_hostnames() + self.added_hostnames)) found_servers = list(set(ZeroconfServer.found_hostnames() + self.added_hostnames))
found_servers = [x for x in found_servers if ZeroconfServer.get_hostname_properties(x)['api_version'] == API_VERSION]
# Always make sure local hostname is first # Always make sure local hostname is first
if found_servers and not is_localhost(found_servers[0]): if found_servers and not is_localhost(found_servers[0]):
for hostname in found_servers: for hostname in found_servers:
@@ -504,10 +467,10 @@ class MainWindow(QMainWindow):
resources_directory = resources_dir() resources_directory = resources_dir()
# Top Toolbar Buttons # Top Toolbar Buttons
self.topbar.add_button(
"Settings", f"{resources_directory}/Gear.png", self.menuBar().show_settings)
self.topbar.add_button( self.topbar.add_button(
"Console", f"{resources_directory}/Console.png", self.open_console_window) "Console", f"{resources_directory}/Console.png", self.open_console_window)
self.topbar.add_button(
"Engines", f"{resources_directory}/SoftwareInstaller.png", self.engine_browser)
self.topbar.add_separator() self.topbar.add_separator()
self.topbar.add_button( self.topbar.add_button(
"Stop Job", f"{resources_directory}/StopSign.png", self.stop_job) "Stop Job", f"{resources_directory}/StopSign.png", self.stop_job)
@@ -549,7 +512,7 @@ class MainWindow(QMainWindow):
def stop_job(self, event): def stop_job(self, event):
""" """
Event handler for the Stop Job button Event handler for the "Exit" button. Closes the application.
""" """
job_ids = self.selected_job_ids() job_ids = self.selected_job_ids()
if not job_ids: if not job_ids:
@@ -559,14 +522,14 @@ class MainWindow(QMainWindow):
job = next((job for job in self.current_server_proxy.get_all_jobs() if job.get('id') == job_ids[0]), None) job = next((job for job in self.current_server_proxy.get_all_jobs() if job.get('id') == job_ids[0]), None)
if job: if job:
display_name = job.get('name', os.path.basename(job.get('input_path', ''))) display_name = job.get('name', os.path.basename(job.get('input_path', '')))
message = f"Are you sure you want to stop the job:\n{display_name}?" message = f"Are you sure you want to delete the job:\n{display_name}?"
else: else:
return # Job not found, handle this case as needed return # Job not found, handle this case as needed
else: else:
message = f"Are you sure you want to stop these {len(job_ids)} jobs?" message = f"Are you sure you want to delete these {len(job_ids)} jobs?"
# Display the message box and check the response in one go # Display the message box and check the response in one go
msg_box = QMessageBox(QMessageBox.Icon.Warning, "Stop Job", message, msg_box = QMessageBox(QMessageBox.Icon.Warning, "Delete Job", message,
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, self) QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, self)
if msg_box.exec() == QMessageBox.StandardButton.Yes: if msg_box.exec() == QMessageBox.StandardButton.Yes:
@@ -602,15 +565,7 @@ class MainWindow(QMainWindow):
self.fetch_jobs(clear_table=True) self.fetch_jobs(clear_table=True)
def download_files(self, event): def download_files(self, event):
pass
job_ids = self.selected_job_ids()
if not job_ids:
return
import webbrowser
download_url = (f"http://{self.current_server_proxy.hostname}:{self.current_server_proxy.port}"
f"/api/job/{job_ids[0]}/download_all")
webbrowser.open(download_url)
def open_files(self, event): def open_files(self, event):
job_ids = self.selected_job_ids() job_ids = self.selected_job_ids()
@@ -628,21 +583,3 @@ class MainWindow(QMainWindow):
if file_name: if file_name:
self.new_job_window = NewRenderJobForm(file_name) self.new_job_window = NewRenderJobForm(file_name)
self.new_job_window.show() self.new_job_window.show()
if __name__ == "__main__":
# lazy load GUI frameworks
from PyQt6.QtWidgets import QApplication
# load application
# QtCore.QCoreApplication.setAttribute(QtCore.Qt.ApplicationAttribute.AA_MacDontSwapCtrlAndMeta)
app: QApplication = QApplication(sys.argv)
# configure main main_window
main_window = MainWindow()
# main_window.buffer_handler = buffer_handler
app.setActiveWindow(main_window)
main_window.show()
sys.exit(app.exec())
-554
View File
@@ -1,554 +0,0 @@
import os
import socket
from datetime import datetime
from pathlib import Path
import humanize
from PyQt6 import QtCore
from PyQt6.QtCore import Qt, QSettings, pyqtSignal as Signal, QThread, pyqtSignal, QTimer
from PyQt6.QtGui import QIcon
from PyQt6.QtWidgets import QApplication, QMainWindow, QListWidget, QListWidgetItem, QStackedWidget, QVBoxLayout, \
QWidget, QLabel, QCheckBox, QLineEdit, \
QPushButton, QHBoxLayout, QGroupBox, QTableWidget, QAbstractItemView, QTableWidgetItem, QHeaderView, \
QMessageBox, QProgressBar
from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager
from src.utilities.config import Config
from src.utilities.misc_helper import launch_url, system_safe_path
from src.version import APP_AUTHOR, APP_NAME
settings = QSettings(APP_AUTHOR, APP_NAME)
class GetEngineInfoWorker(QThread):
"""
The GetEngineInfoWorker class fetches engine information from a server in a background thread.
Attributes:
done: A signal emitted when the engine information is retrieved.
Methods:
run(self): Fetches engine information from the server.
"""
done = pyqtSignal(object) # emits the result when finished
def __init__(self, parent=None):
super().__init__(parent)
self.parent = parent
def run(self):
data = RenderServerProxy(socket.gethostname()).get_engine_info()
self.done.emit(data)
class SettingsWindow(QMainWindow):
"""
The SettingsWindow class provides a user interface for managing engine settings.
"""
def __init__(self):
super().__init__()
self.engine_download_progress_bar = None
self.engines_last_update_label = None
self.check_for_engine_updates_checkbox = None
self.delete_engine_button = None
self.launch_engine_button = None
self.show_password_button = None
self.network_password_line = None
self.enable_network_password_checkbox = None
self.check_for_new_engines_button = None
if not EngineManager.engines_path: # fix issue where sometimes path was not set
EngineManager.engines_path = system_safe_path(
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
'engines')))
self.installed_engines_table = None
self.setWindowTitle("Settings")
# Create the main layout
main_layout = QVBoxLayout()
# Create the sidebar (QListWidget) for navigation
self.sidebar = QListWidget()
self.sidebar.setFixedWidth(150)
# Set the icon size
self.sidebar.setIconSize(QtCore.QSize(32, 32)) # Increase the icon size to 32x32 pixels
# Adjust the font size for the sidebar items
font = self.sidebar.font()
font.setPointSize(12) # Increase the font size
self.sidebar.setFont(font)
# Add items with icons to the sidebar
resources_dir = os.path.join(Path(__file__).resolve().parent.parent.parent, 'resources')
self.add_sidebar_item("General", os.path.join(resources_dir, "Gear.png"))
self.add_sidebar_item("Server", os.path.join(resources_dir, "Server.png"))
self.add_sidebar_item("Engines", os.path.join(resources_dir, "Blender.png"))
self.sidebar.setCurrentRow(0)
# Create the stacked widget to hold different settings pages
self.stacked_widget = QStackedWidget()
# Create pages for each section
general_page = self.create_general_page()
network_page = self.create_network_page()
engines_page = self.create_engines_page()
# Add pages to the stacked widget
self.stacked_widget.addWidget(general_page)
self.stacked_widget.addWidget(network_page)
self.stacked_widget.addWidget(engines_page)
# Connect the sidebar to the stacked widget
self.sidebar.currentRowChanged.connect(self.stacked_widget.setCurrentIndex)
# Create a horizontal layout to hold the sidebar and stacked widget
content_layout = QHBoxLayout()
content_layout.addWidget(self.sidebar)
content_layout.addWidget(self.stacked_widget)
# Add the content layout to the main layout
main_layout.addLayout(content_layout)
# Add the "OK" button at the bottom
ok_button = QPushButton("OK")
ok_button.clicked.connect(self.close)
ok_button.setFixedWidth(80)
ok_button.setDefault(True)
main_layout.addWidget(ok_button, alignment=Qt.AlignmentFlag.AlignRight)
# Create a central widget and set the layout
central_widget = QWidget()
central_widget.setLayout(main_layout)
self.setCentralWidget(central_widget)
self.setMinimumSize(700, 400)
# timers for background download UI updates
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_engine_download_status)
def add_sidebar_item(self, name, icon_path):
"""Add an item with an icon to the sidebar."""
item = QListWidgetItem(QIcon(icon_path), name)
self.sidebar.addItem(item)
def create_general_page(self):
"""Create the General settings page."""
page = QWidget()
layout = QVBoxLayout()
# Startup Settings Group
startup_group = QGroupBox("Startup Settings")
startup_layout = QVBoxLayout()
# startup_layout.addWidget(QCheckBox("Start application on system startup"))
check_for_updates_checkbox = QCheckBox("Check for updates automatically")
check_for_updates_checkbox.setChecked(settings.value("auto_check_for_updates", True, type=bool))
check_for_updates_checkbox.stateChanged.connect(lambda state: settings.setValue("auto_check_for_updates", bool(state)))
startup_layout.addWidget(check_for_updates_checkbox)
startup_group.setLayout(startup_layout)
# Local Files Group
data_path = Path(Config.upload_folder).expanduser()
path_size = sum(f.stat().st_size for f in Path(data_path).rglob('*') if f.is_file())
database_group = QGroupBox("Local Files")
database_layout = QVBoxLayout()
database_layout.addWidget(QLabel(f"Local Directory: {data_path}"))
database_layout.addWidget(QLabel(f"Size: {humanize.naturalsize(path_size, binary=True)}"))
open_database_path_button = QPushButton("Open Directory")
open_database_path_button.clicked.connect(lambda: launch_url(data_path))
open_database_path_button.setFixedWidth(200)
database_layout.addWidget(open_database_path_button)
database_group.setLayout(database_layout)
# Render Settings Group
render_settings_group = QGroupBox("Render Engine Settings")
render_settings_layout = QVBoxLayout()
render_settings_layout.addWidget(QLabel("Restrict to render nodes with same:"))
require_same_engine_checkbox = QCheckBox("Renderer Version")
require_same_engine_checkbox.setChecked(settings.value("render_require_same_engine_version", False, type=bool))
require_same_engine_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_engine_version", bool(state)))
render_settings_layout.addWidget(require_same_engine_checkbox)
require_same_cpu_checkbox = QCheckBox("CPU Architecture")
require_same_cpu_checkbox.setChecked(settings.value("render_require_same_cpu_type", False, type=bool))
require_same_cpu_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_cpu_type", bool(state)))
render_settings_layout.addWidget(require_same_cpu_checkbox)
require_same_os_checkbox = QCheckBox("Operating System")
require_same_os_checkbox.setChecked(settings.value("render_require_same_os", False, type=bool))
require_same_os_checkbox.stateChanged.connect(lambda state: settings.setValue("render_require_same_os", bool(state)))
render_settings_layout.addWidget(require_same_os_checkbox)
render_settings_group.setLayout(render_settings_layout)
layout.addWidget(startup_group)
layout.addWidget(database_group)
layout.addWidget(render_settings_group)
layout.addStretch() # Add a stretch to push content to the top
page.setLayout(layout)
return page
def create_network_page(self):
"""Create the Network settings page."""
page = QWidget()
layout = QVBoxLayout()
# Sharing Settings Group
sharing_group = QGroupBox("Sharing Settings")
sharing_layout = QVBoxLayout()
enable_sharing_checkbox = QCheckBox("Enable other computers on the network to render to this machine")
enable_sharing_checkbox.setChecked(settings.value("enable_network_sharing", False, type=bool))
enable_sharing_checkbox.stateChanged.connect(self.toggle_render_sharing)
sharing_layout.addWidget(enable_sharing_checkbox)
password_enabled = (settings.value("enable_network_sharing", False, type=bool) and
settings.value("enable_network_password", False, type=bool))
password_layout = QHBoxLayout()
password_layout.setContentsMargins(0, 0, 0, 0)
self.enable_network_password_checkbox = QCheckBox("Enable network password:")
self.enable_network_password_checkbox.setChecked(settings.value("enable_network_password", False, type=bool))
self.enable_network_password_checkbox.stateChanged.connect(self.enable_network_password_changed)
self.enable_network_password_checkbox.setEnabled(settings.value("enable_network_sharing", False, type=bool))
sharing_layout.addWidget(self.enable_network_password_checkbox)
self.network_password_line = QLineEdit()
self.network_password_line.setPlaceholderText("Enter a password")
self.network_password_line.setEchoMode(QLineEdit.EchoMode.Password)
self.network_password_line.setEnabled(password_enabled)
password_layout.addWidget(self.network_password_line)
self.show_password_button = QPushButton("Show")
self.show_password_button.setEnabled(password_enabled)
self.show_password_button.clicked.connect(self.show_password_button_pressed)
password_layout.addWidget(self.show_password_button)
sharing_layout.addLayout(password_layout)
sharing_group.setLayout(sharing_layout)
layout.addWidget(sharing_group)
layout.addStretch() # Add a stretch to push content to the top
page.setLayout(layout)
return page
def toggle_render_sharing(self, enable_sharing):
settings.setValue("enable_network_sharing", enable_sharing)
self.enable_network_password_checkbox.setEnabled(enable_sharing)
enable_password = enable_sharing and settings.value("enable_network_password", False, type=bool)
self.network_password_line.setEnabled(enable_password)
self.show_password_button.setEnabled(enable_password)
def enable_network_password_changed(self, new_value):
settings.setValue("enable_network_password", new_value)
self.network_password_line.setEnabled(new_value)
self.show_password_button.setEnabled(new_value)
def show_password_button_pressed(self):
# toggle showing / hiding the password
show_pass = self.show_password_button.text() == "Show"
self.show_password_button.setText("Hide" if show_pass else "Show")
self.network_password_line.setEchoMode(QLineEdit.EchoMode.Normal if show_pass else QLineEdit.EchoMode.Password)
def create_engines_page(self):
"""Create the Engines settings page."""
page = QWidget()
layout = QVBoxLayout()
# Installed Engines Group
installed_group = QGroupBox("Installed Engines")
installed_layout = QVBoxLayout()
# Setup table
self.installed_engines_table = EngineTableWidget()
self.installed_engines_table.row_selected.connect(self.engine_table_selected)
installed_layout.addWidget(self.installed_engines_table)
# Ignore system installs
engine_ignore_system_installs_checkbox = QCheckBox("Ignore system installs")
engine_ignore_system_installs_checkbox.setChecked(settings.value("engines_ignore_system_installs", False, type=bool))
engine_ignore_system_installs_checkbox.stateChanged.connect(self.change_ignore_system_installs)
installed_layout.addWidget(engine_ignore_system_installs_checkbox)
# Engine Launch / Delete buttons
installed_buttons_layout = QHBoxLayout()
self.launch_engine_button = QPushButton("Launch")
self.launch_engine_button.setEnabled(False)
self.launch_engine_button.clicked.connect(self.launch_selected_engine)
self.delete_engine_button = QPushButton("Delete")
self.delete_engine_button.setEnabled(False)
self.delete_engine_button.clicked.connect(self.delete_selected_engine)
installed_buttons_layout.addWidget(self.launch_engine_button)
installed_buttons_layout.addWidget(self.delete_engine_button)
installed_layout.addLayout(installed_buttons_layout)
installed_group.setLayout(installed_layout)
# Engine Updates Group
engine_updates_group = QGroupBox("Auto-Install")
engine_updates_layout = QVBoxLayout()
engine_download_layout = QHBoxLayout()
engine_download_layout.addWidget(QLabel("Enable Downloads for:"))
at_least_one_downloadable = False
for engine in EngineManager.downloadable_engines():
engine_download_check = QCheckBox(engine.name())
is_checked = settings.value(f"engine_download-{engine.name()}", False, type=bool)
at_least_one_downloadable |= is_checked
engine_download_check.setChecked(is_checked)
# Capture the checkbox correctly using a default argument in lambda
engine_download_check.clicked.connect(
lambda state, checkbox=engine_download_check: self.engine_download_settings_changed(state, checkbox.text())
)
engine_download_layout.addWidget(engine_download_check)
engine_updates_layout.addLayout(engine_download_layout)
self.check_for_engine_updates_checkbox = QCheckBox("Check for new versions on launch")
self.check_for_engine_updates_checkbox.setChecked(settings.value('check_for_engine_updates_on_launch', True, type=bool))
self.check_for_engine_updates_checkbox.setEnabled(at_least_one_downloadable)
self.check_for_engine_updates_checkbox.stateChanged.connect(
lambda state: settings.setValue("check_for_engine_updates_on_launch", bool(state)))
engine_updates_layout.addWidget(self.check_for_engine_updates_checkbox)
self.engines_last_update_label = QLabel()
self.update_last_checked_label()
self.engines_last_update_label.setEnabled(at_least_one_downloadable)
engine_updates_layout.addWidget(self.engines_last_update_label)
self.engine_download_progress_bar = QProgressBar()
engine_updates_layout.addWidget(self.engine_download_progress_bar)
self.engine_download_progress_bar.setHidden(True)
self.check_for_new_engines_button = QPushButton("Check for New Versions...")
self.check_for_new_engines_button.setEnabled(at_least_one_downloadable)
self.check_for_new_engines_button.clicked.connect(self.check_for_new_engines)
engine_updates_layout.addWidget(self.check_for_new_engines_button)
engine_updates_group.setLayout(engine_updates_layout)
layout.addWidget(installed_group)
layout.addWidget(engine_updates_group)
layout.addStretch() # Add a stretch to push content to the top
page.setLayout(layout)
return page
def change_ignore_system_installs(self, value):
settings.setValue("engines_ignore_system_installs", bool(value))
self.installed_engines_table.update_engines_table()
def update_last_checked_label(self):
"""Retrieve the last check timestamp and return a human-friendly string."""
last_checked_str = settings.value("engines_last_update_time", None)
if not last_checked_str:
time_string = "Never"
else:
last_checked_dt = datetime.fromisoformat(last_checked_str)
now = datetime.now()
time_string = humanize.naturaltime(now - last_checked_dt)
self.engines_last_update_label.setText(f"Last Updated: {time_string}")
def engine_download_settings_changed(self, state, engine_name):
settings.setValue(f"engine_download-{engine_name}", state)
at_least_one_downloadable = False
for engine in EngineManager.downloadable_engines():
at_least_one_downloadable |= settings.value(f"engine_download-{engine.name()}", False, type=bool)
self.check_for_new_engines_button.setEnabled(at_least_one_downloadable)
self.check_for_engine_updates_checkbox.setEnabled(at_least_one_downloadable)
self.engines_last_update_label.setEnabled(at_least_one_downloadable)
def delete_selected_engine(self):
engine_info = self.installed_engines_table.selected_engine_data()
reply = QMessageBox.question(self, f"Delete {engine_info['engine']} {engine_info['version']}?",
f"Do you want to delete {engine_info['engine']} {engine_info['version']}?",
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
if reply is not QMessageBox.StandardButton.Yes:
return
delete_result = EngineManager.delete_engine_download(engine_info.get('engine'),
engine_info.get('version'),
engine_info.get('system_os'),
engine_info.get('cpu'))
self.installed_engines_table.update_engines_table(use_cached=False)
if delete_result:
QMessageBox.information(self, f"{engine_info['engine']} {engine_info['version']} Deleted",
f"{engine_info['engine']} {engine_info['version']} deleted successfully",
QMessageBox.StandardButton.Ok)
else:
QMessageBox.warning(self, f"Unknown Error",
f"Unknown error while deleting {engine_info['engine']} {engine_info['version']}.",
QMessageBox.StandardButton.Ok)
def launch_selected_engine(self):
engine_info = self.installed_engines_table.selected_engine_data()
if engine_info:
launch_url(engine_info['path'])
def engine_table_selected(self):
engine_data = self.installed_engines_table.selected_engine_data()
if engine_data:
self.launch_engine_button.setEnabled(bool(engine_data.get('path') or True))
self.delete_engine_button.setEnabled(engine_data.get('type') == 'managed')
else:
self.launch_engine_button.setEnabled(False)
self.delete_engine_button.setEnabled(False)
def check_for_new_engines(self):
ignore_system = settings.value("engines_ignore_system_installs", False, type=bool)
messagebox_shown = False
for engine in EngineManager.downloadable_engines():
if settings.value(f'engine_download-{engine.name()}', False, type=bool):
result = EngineManager.is_engine_update_available(engine, ignore_system_installs=ignore_system)
if result:
result['name'] = engine.name()
msg_box = QMessageBox()
msg_box.setWindowTitle(f"{result['name']} ({result['version']}) Available")
msg_box.setText(f"A new version of {result['name']} is available ({result['version']}).\n\n"
f"Would you like to download it now?")
msg_box.setIcon(QMessageBox.Icon.Question)
msg_box.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
msg_result = msg_box.exec()
messagebox_shown = True
if msg_result == QMessageBox.StandardButton.Yes:
EngineManager.download_engine(engine=engine.name(), version=result['version'], background=True,
ignore_system=ignore_system)
self.engine_download_progress_bar.setHidden(False)
self.engine_download_progress_bar.setValue(0)
self.engine_download_progress_bar.setMaximum(100)
self.check_for_new_engines_button.setEnabled(False)
self.timer.start(1000)
if not messagebox_shown:
msg_box = QMessageBox()
msg_box.setWindowTitle("No Updates Available")
msg_box.setText("No Updates Available.")
msg_box.setIcon(QMessageBox.Icon.Information)
msg_box.setStandardButtons(QMessageBox.StandardButton.Ok)
msg_box.exec()
settings.setValue("engines_last_update_time", datetime.now().isoformat())
self.update_engine_download_status()
def update_engine_download_status(self):
running_tasks = EngineManager.active_downloads()
if not running_tasks:
self.timer.stop()
self.engine_download_progress_bar.setHidden(True)
self.installed_engines_table.update_engines_table(use_cached=False)
self.update_last_checked_label()
self.check_for_new_engines_button.setEnabled(True)
return
percent_complete = int(running_tasks[0].percent_complete * 100)
self.engine_download_progress_bar.setValue(percent_complete)
if percent_complete == 100:
status_update = f"Installing {running_tasks[0].engine.capitalize()} {running_tasks[0].version}..."
else:
status_update = f"Downloading {running_tasks[0].engine.capitalize()} {running_tasks[0].version}..."
self.engines_last_update_label.setText(status_update)
class EngineTableWidget(QWidget):
"""
The EngineTableWidget class displays a table of installed engines.
Attributes:
table: A table widget displaying engine information.
Methods:
on_selection_changed(self): Emits a signal when the user selects a different row in the table.
"""
row_selected = Signal()
def __init__(self):
super().__init__()
self.__get_engine_info_worker = None
self.table = QTableWidget(0, 4)
self.table.setHorizontalHeaderLabels(["Engine", "Version", "Type", "Path"])
self.table.setSelectionBehavior(QAbstractItemView.SelectionBehavior.SelectRows)
self.table.verticalHeader().setVisible(False)
# self.table_widget.itemSelectionChanged.connect(self.engine_picked)
self.table.setEditTriggers(QAbstractItemView.EditTrigger.NoEditTriggers)
self.table.selectionModel().selectionChanged.connect(self.on_selection_changed)
layout = QVBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
layout.addWidget(self.table)
self.raw_server_data = None
def showEvent(self, event):
"""Runs when the widget is about to be shown."""
self.update_engines_table()
super().showEvent(event) # Ensure normal event processing
def engine_data_ready(self, raw_server_data):
self.raw_server_data = raw_server_data
self.update_engines_table()
def update_engines_table(self, use_cached=True):
if not self.raw_server_data or not use_cached:
self.__get_engine_info_worker = GetEngineInfoWorker(self)
self.__get_engine_info_worker.done.connect(self.engine_data_ready)
self.__get_engine_info_worker.start()
if not self.raw_server_data:
return
table_data = [] # convert the data into a flat list
for _, engine_data in self.raw_server_data.items():
table_data.extend(engine_data['versions'])
if settings.value("engines_ignore_system_installs", False, type=bool):
table_data = [x for x in table_data if x['type'] != 'system']
self.table.clear()
self.table.setRowCount(len(table_data))
self.table.setColumnCount(4)
self.table.setHorizontalHeaderLabels(['Engine', 'Version', 'Type', 'Path'])
self.table.horizontalHeader().setSectionResizeMode(0, QHeaderView.ResizeMode.Fixed)
self.table.horizontalHeader().setSectionResizeMode(1, QHeaderView.ResizeMode.Fixed)
self.table.horizontalHeader().setSectionResizeMode(2, QHeaderView.ResizeMode.Fixed)
self.table.horizontalHeader().setSectionResizeMode(3, QHeaderView.ResizeMode.Stretch)
for row, engine in enumerate(table_data):
self.table.setItem(row, 0, QTableWidgetItem(engine['engine']))
self.table.setItem(row, 1, QTableWidgetItem(engine['version']))
self.table.setItem(row, 2, QTableWidgetItem(engine['type']))
self.table.setItem(row, 3, QTableWidgetItem(engine['path']))
self.table.selectRow(0)
def selected_engine_data(self):
"""Returns the data from the selected row as a dictionary."""
row = self.table.currentRow() # Get the selected row index
if row < 0 or not len(self.table.selectedItems()): # No row selected
return None
data = {
"engine": self.table.item(row, 0).text(),
"version": self.table.item(row, 1).text(),
"type": self.table.item(row, 2).text(),
"path": self.table.item(row, 3).text(),
}
return data
def on_selection_changed(self):
self.row_selected.emit()
if __name__ == "__main__":
app = QApplication([])
window = SettingsWindow()
window.show()
app.exec()
+6 -88
View File
@@ -1,6 +1,5 @@
''' app/ui/widgets/menubar.py ''' ''' app/ui/widgets/menubar.py '''
from PyQt6.QtGui import QAction from PyQt6.QtWidgets import QMenuBar
from PyQt6.QtWidgets import QMenuBar, QApplication, QMessageBox, QDialog, QVBoxLayout, QLabel, QPushButton
class MenuBar(QMenuBar): class MenuBar(QMenuBar):
@@ -13,93 +12,12 @@ class MenuBar(QMenuBar):
def __init__(self, parent=None) -> None: def __init__(self, parent=None) -> None:
super().__init__(parent) super().__init__(parent)
self.settings_window = None
# setup menus
file_menu = self.addMenu("File") file_menu = self.addMenu("File")
# edit_menu = self.addMenu("Edit") # edit_menu = self.addMenu("Edit")
# view_menu = self.addMenu("View") # view_menu = self.addMenu("View")
help_menu = self.addMenu("Help") # help_menu = self.addMenu("Help")
# --file menu-- # Add actions to the menus
# new job # file_menu.addAction(self.parent().topbar.actions_call["Open"]) # type: ignore
new_job_action = QAction("New Job...", self) # file_menu.addAction(self.parent().topbar.actions_call["Save"]) # type: ignore
new_job_action.setShortcut(f'Ctrl+N') # file_menu.addAction(self.parent().topbar.actions_call["Exit"]) # type: ignore
new_job_action.triggered.connect(self.new_job)
file_menu.addAction(new_job_action)
# settings
settings_action = QAction("Settings...", self)
settings_action.triggered.connect(self.show_settings)
settings_action.setShortcut(f'Ctrl+,')
file_menu.addAction(settings_action)
# exit
exit_action = QAction('&Exit', self)
exit_action.setShortcut('Ctrl+Q')
exit_action.triggered.connect(QApplication.instance().quit)
file_menu.addAction(exit_action)
# --help menu--
about_action = QAction("About", self)
about_action.triggered.connect(self.show_about)
help_menu.addAction(about_action)
update_action = QAction("Check for Updates...", self)
update_action.triggered.connect(self.check_for_updates)
help_menu.addAction(update_action)
def new_job(self):
self.parent().new_job()
def show_settings(self):
from src.ui.settings_window import SettingsWindow
self.settings_window = SettingsWindow()
self.settings_window.show()
@staticmethod
def show_about():
from src.ui.about_window import AboutDialog
dialog = AboutDialog()
dialog.exec()
@staticmethod
def check_for_updates():
from src.utilities.misc_helper import check_for_updates
from src.version import APP_NAME, APP_VERSION, APP_REPO_NAME, APP_REPO_OWNER
found_update = check_for_updates(APP_REPO_NAME, APP_REPO_OWNER, APP_NAME, APP_VERSION)
if found_update:
dialog = UpdateDialog(found_update, APP_VERSION)
dialog.exec()
else:
QMessageBox.information(None, "No Update", "No updates available.")
class UpdateDialog(QDialog):
def __init__(self, release_info, current_version, parent=None):
super().__init__(parent)
self.setWindowTitle(f"Update Available ({current_version} -> {release_info['tag_name']})")
layout = QVBoxLayout()
label = QLabel(f"A new version ({release_info['tag_name']}) is available! Current version: {current_version}")
layout.addWidget(label)
# Label to show the release notes
description = QLabel(release_info["body"])
layout.addWidget(description)
# Button to download the latest version
download_button = QPushButton(f"Download Latest Version ({release_info['tag_name']})")
download_button.clicked.connect(lambda: self.open_url(release_info["html_url"]))
layout.addWidget(download_button)
# OK button to dismiss the dialog
ok_button = QPushButton("Dismiss")
ok_button.clicked.connect(self.accept) # Close the dialog when clicked
layout.addWidget(ok_button)
self.setLayout(layout)
def open_url(self, url):
from PyQt6.QtCore import QUrl
from PyQt6.QtGui import QDesktopServices
QDesktopServices.openUrl(QUrl(url))
self.accept()
+4 -5
View File
@@ -35,13 +35,12 @@ class StatusBar(QStatusBar):
try: try:
# update status label - get download status # update status label - get download status
new_status = proxy.status() new_status = proxy.status()
active_downloads = EngineManager.active_downloads() if EngineManager.download_tasks:
if active_downloads: if len(EngineManager.download_tasks) == 1:
if len(active_downloads) == 1: task = EngineManager.download_tasks[0]
task = active_downloads[0]
new_status = f"{new_status} | Downloading {task.engine.capitalize()} {task.version}..." new_status = f"{new_status} | Downloading {task.engine.capitalize()} {task.version}..."
else: else:
new_status = f"{new_status} | Downloading {len(active_downloads)} engines" new_status = f"{new_status} | Downloading {len(EngineManager.download_tasks)} engines"
self.messageLabel.setText(new_status) self.messageLabel.setText(new_status)
# update status image # update status image
+3 -3
View File
@@ -4,18 +4,18 @@ from src.engines.ffmpeg.ffmpeg_engine import FFMPEG
def image_sequence_to_video(source_glob_pattern, output_path, framerate=24, encoder="prores_ks", profile=4, def image_sequence_to_video(source_glob_pattern, output_path, framerate=24, encoder="prores_ks", profile=4,
start_frame=1): start_frame=1):
subprocess.run([FFMPEG.default_engine_path(), "-framerate", str(framerate), "-start_number", subprocess.run([FFMPEG.default_renderer_path(), "-framerate", str(framerate), "-start_number",
str(start_frame), "-i", f"{source_glob_pattern}", "-c:v", encoder, "-profile:v", str(profile), str(start_frame), "-i", f"{source_glob_pattern}", "-c:v", encoder, "-profile:v", str(profile),
'-pix_fmt', 'yuva444p10le', output_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, '-pix_fmt', 'yuva444p10le', output_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
check=True) check=True)
def save_first_frame(source_path, dest_path, max_width=1280): def save_first_frame(source_path, dest_path, max_width=1280):
subprocess.run([FFMPEG.default_engine_path(), '-i', source_path, '-vf', f'scale={max_width}:-1', subprocess.run([FFMPEG.default_renderer_path(), '-i', source_path, '-vf', f'scale={max_width}:-1',
'-vframes', '1', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True) '-vframes', '1', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
def generate_thumbnail(source_path, dest_path, max_width=240, fps=12): def generate_thumbnail(source_path, dest_path, max_width=240, fps=12):
subprocess.run([FFMPEG.default_engine_path(), '-i', source_path, '-vf', subprocess.run([FFMPEG.default_renderer_path(), '-i', source_path, '-vf',
f"scale={max_width}:trunc(ow/a/2)*2,format=yuv420p", '-r', str(fps), '-c:v', 'libx264', '-preset', f"scale={max_width}:trunc(ow/a/2)*2,format=yuv420p", '-r', str(fps), '-c:v', 'libx264', '-preset',
'ultrafast', '-an', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True) 'ultrafast', '-an', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
-201
View File
@@ -1,13 +1,10 @@
import logging import logging
import json
import os import os
import platform import platform
import re
import shutil import shutil
import socket import socket
import string import string
import subprocess import subprocess
import sys
from datetime import datetime from datetime import datetime
logger = logging.getLogger() logger = logging.getLogger()
@@ -140,40 +137,6 @@ def current_system_cpu():
# convert all x86 64 to "x64" # convert all x86 64 to "x64"
return platform.machine().lower().replace('amd64', 'x64').replace('x86_64', 'x64') return platform.machine().lower().replace('amd64', 'x64').replace('x86_64', 'x64')
def current_system_cpu_brand():
"""Fast cross-platform CPU brand string"""
if sys.platform.startswith('darwin'): # macOS
try:
return subprocess.check_output(['sysctl', '-n', 'machdep.cpu.brand_string']).decode().strip()
except Exception:
pass
elif sys.platform.startswith('win'): # Windows
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
try:
# Open the registry key where Windows stores the CPU name
key = OpenKey(HKEY_LOCAL_MACHINE, r"HARDWARE\DESCRIPTION\System\CentralProcessor\0")
# The value name is "ProcessorNameString"
value, _ = QueryValueEx(key, "ProcessorNameString")
return value.strip() # Usually perfect, with full marketing name
except Exception:
# Fallback: sometimes the key is under a different index, try 1
try:
key = OpenKey(HKEY_LOCAL_MACHINE, r"HARDWARE\DESCRIPTION\System\CentralProcessor\1")
value, _ = QueryValueEx(key, "ProcessorNameString")
return value.strip()
except Exception:
return "Unknown CPU"
elif sys.platform.startswith('linux'):
try:
with open('/proc/cpuinfo') as f:
for line in f:
if line.startswith('model name'):
return line.split(':', 1)[1].strip()
except Exception:
pass
# Ultimate fallback
return platform.processor() or 'Unknown CPU'
def resources_dir(): def resources_dir():
resource_environment_path = os.environ.get('RESOURCEPATH', None) resource_environment_path = os.environ.get('RESOURCEPATH', None)
@@ -196,33 +159,6 @@ def copy_directory_contents(src_dir, dst_dir):
shutil.copy2(src_path, dst_path) shutil.copy2(src_path, dst_path)
def check_for_updates(repo_name, repo_owner, app_name, current_version):
def get_github_releases(owner, repo):
import requests
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
try:
response = requests.get(url, timeout=3)
response.raise_for_status()
releases = response.json()
return releases
except Exception as e:
logger.error(f"Error checking for updates: {e}")
return []
releases = get_github_releases(repo_owner, repo_name)
if not releases:
return None
latest_version = releases[0]
latest_version_tag = latest_version['tag_name']
from packaging import version
if version.parse(latest_version_tag) > version.parse(current_version):
logger.info(f"Newer version of {app_name} available. "
f"Latest: {latest_version_tag}, Current: {current_version}")
return latest_version
return None
def is_localhost(comparison_hostname): def is_localhost(comparison_hostname):
# this is necessary because socket.gethostname() does not always include '.local' - This is a sanitized comparison # this is necessary because socket.gethostname() does not always include '.local' - This is a sanitized comparison
try: try:
@@ -247,140 +183,3 @@ def num_to_alphanumeric(num):
result += characters[remainder] result += characters[remainder]
return result[::-1] # Reverse the result to get the correct alphanumeric string return result[::-1] # Reverse the result to get the correct alphanumeric string
def iso_datestring_to_formatted_datestring(iso_date_string):
from dateutil import parser
import pytz
# Parse the ISO date string into a datetime object and convert timezones
date = parser.isoparse(iso_date_string).astimezone(pytz.UTC)
local_timezone = datetime.now().astimezone().tzinfo
date_local = date.astimezone(local_timezone)
# Format the date to the desired readable yet sortable format with 12-hour time
formatted_date = date_local.strftime('%Y-%m-%d %I:%M %p')
return formatted_date
def get_gpu_info():
"""Cross-platform GPU information retrieval"""
def get_windows_gpu_info():
"""Get GPU info on Windows"""
try:
result = subprocess.run(
['wmic', 'path', 'win32_videocontroller', 'get', 'name,AdapterRAM', '/format:list'],
capture_output=True, text=True, timeout=5
)
# Virtual adapters to exclude
virtual_adapters = [
'virtual', 'rdp', 'hyper-v', 'microsoft basic', 'basic display',
'vga compatible', 'dummy', 'nvfbc', 'nvencode'
]
gpus = []
current_gpu = None
for line in result.stdout.strip().split('\n'):
line = line.strip()
if not line:
continue
if line.startswith('Name='):
if current_gpu and current_gpu.get('name'):
gpus.append(current_gpu)
gpu_name = line.replace('Name=', '').strip()
# Skip virtual adapters
if any(virtual in gpu_name.lower() for virtual in virtual_adapters):
current_gpu = None
else:
current_gpu = {'name': gpu_name, 'memory': 'Integrated'}
elif line.startswith('AdapterRAM=') and current_gpu:
vram_bytes_str = line.replace('AdapterRAM=', '').strip()
if vram_bytes_str and vram_bytes_str != '0':
try:
vram_gb = int(vram_bytes_str) / (1024**3)
current_gpu['memory'] = round(vram_gb, 2)
except:
pass
if current_gpu and current_gpu.get('name'):
gpus.append(current_gpu)
return gpus if gpus else [{'name': 'Unknown GPU', 'memory': 'Unknown'}]
except Exception as e:
logger.error(f"Failed to get Windows GPU info: {e}")
return [{'name': 'Unknown GPU', 'memory': 'Unknown'}]
def get_macos_gpu_info():
"""Get GPU info on macOS (works with Apple Silicon)"""
try:
if current_system_cpu() == "arm64":
# don't bother with system_profiler with Apple ARM - we know its integrated
return [{'name': current_system_cpu_brand(), 'memory': 'Integrated'}]
result = subprocess.run(['system_profiler', 'SPDisplaysDataType', '-detailLevel', 'mini', '-json'],
capture_output=True, text=True, timeout=5)
data = json.loads(result.stdout)
gpus = []
displays = data.get('SPDisplaysDataType', [])
for display in displays:
if 'sppci_model' in display:
gpus.append({
'name': display.get('sppci_model', 'Unknown GPU'),
'memory': display.get('sppci_vram', 'Integrated'),
})
return gpus if gpus else [{'name': 'Apple GPU', 'memory': 'Integrated'}]
except Exception as e:
print(f"Failed to get macOS GPU info: {e}")
return [{'name': 'Unknown GPU', 'memory': 'Unknown'}]
def get_linux_gpu_info():
gpus = []
try:
# Run plain lspci and filter for GPU-related lines
output = subprocess.check_output(
["lspci"], universal_newlines=True, stderr=subprocess.DEVNULL
)
for line in output.splitlines():
if any(keyword in line.lower() for keyword in ["vga", "3d", "display"]):
# Extract the part after the colon (vendor + model)
if ":" in line:
name_part = line.split(":", 1)[1].strip()
# Clean up common extras like (rev xx) or (prog-if ...)
name = name_part.split("(")[0].split("controller:")[-1].strip()
vendor = "Unknown"
if "nvidia" in name.lower():
vendor = "NVIDIA"
elif "amd" in name.lower() or "ati" in name.lower():
vendor = "AMD"
elif "intel" in name.lower():
vendor = "Intel"
gpus.append({
"name": name,
"vendor": vendor,
"memory": "Unknown"
})
except FileNotFoundError:
print("lspci not found. Install pciutils: sudo apt install pciutils")
return []
except Exception as e:
print(f"Error running lspci: {e}")
return []
return gpus
system = platform.system()
if system == 'Darwin': # macOS
return get_macos_gpu_info()
elif system == 'Windows':
return get_windows_gpu_info()
else: # Assume Linux or other
return get_linux_gpu_info()
+33 -186
View File
@@ -1,200 +1,47 @@
import logging import logging
import os import os
import zipfile import subprocess
from concurrent.futures import ThreadPoolExecutor import threading
import requests from src.utilities.ffmpeg_helper import generate_thumbnail, save_first_frame
from src.api.server_proxy import RenderServerProxy
from src.utilities.misc_helper import get_file_size_human
from src.utilities.zeroconf_server import ZeroconfServer
logger = logging.getLogger() logger = logging.getLogger()
def download_missing_frames_from_subjob(local_job, subjob_id, subjob_hostname): def generate_thumbnail_for_job(job, thumb_video_path, thumb_image_path, max_width=320):
success = True
try:
local_files = [os.path.basename(x) for x in local_job.file_list()]
subjob_proxy = RenderServerProxy(subjob_hostname)
subjob_files = subjob_proxy.get_job_files_list(job_id=subjob_id) or []
for subjob_filename in subjob_files: # Simple thread to generate thumbs in background
if subjob_filename not in local_files: def generate_thumb_thread(source):
try: in_progress_path = thumb_video_path + '_IN-PROGRESS'
logger.debug(f"Downloading new file '{subjob_filename}' from {subjob_hostname}") subprocess.run(['touch', in_progress_path])
local_save_path = os.path.join(os.path.dirname(local_job.output_path), subjob_filename) try:
subjob_proxy.download_job_file(job_id=subjob_id, job_filename=subjob_filename, logger.debug(f"Generating video thumbnail for {source}")
save_path=local_save_path) generate_thumbnail(source_path=source, dest_path=thumb_video_path, max_width=max_width)
logger.debug(f'Downloaded successfully - {local_save_path}') except subprocess.CalledProcessError as err:
except Exception as e: logger.error(f"Error generating video thumbnail for {source}: {err}")
logger.error(f"Error downloading file '{subjob_filename}' from {subjob_hostname}: {e}")
success = False
except Exception as e:
logger.exception(f'Uncaught exception while trying to download from subjob: {e}')
success = False
return success
try:
os.remove(in_progress_path)
except FileNotFoundError:
pass
def download_all_from_subjob(local_job, subjob_id, subjob_hostname): # Determine best source file to use for thumbs
""" source_files = job.file_list() or [job.input_path]
Downloads and extracts files from a completed subjob on a remote server. if source_files:
video_formats = ['.mp4', '.mov', '.avi', '.mpg', '.mpeg', '.mxf', '.m4v', 'mkv']
image_formats = ['.jpg', '.png', '.exr']
Parameters: image_files = [f for f in source_files if os.path.splitext(f)[-1].lower() in image_formats]
local_job (BaseRenderWorker): The local parent job worker. video_files = [f for f in source_files if os.path.splitext(f)[-1].lower() in video_formats]
subjob_id (str or int): The ID of the subjob.
subjob_hostname (str): The hostname of the remote server where the subjob is located.
Returns: if (video_files or image_files) and not os.path.exists(thumb_image_path):
bool: True if the files have been downloaded and extracted successfully, False otherwise.
"""
child_key = f'{subjob_id}@{subjob_hostname}'
logname = f"{local_job.id}:{child_key}"
zip_file_path = local_job.output_path + f'_{subjob_hostname}_{subjob_id}.zip'
# download zip file from server
try:
local_job.children[child_key]['download_status'] = 'working'
logger.info(f"Downloading completed subjob files from {subjob_hostname} to localhost")
RenderServerProxy(subjob_hostname).download_all_job_files(subjob_id, zip_file_path)
logger.info(f"File transfer complete for {logname} - Transferred {get_file_size_human(zip_file_path)}")
except Exception as e:
logger.error(f"Error downloading files from remote server: {e}")
local_job.children[child_key]['download_status'] = 'failed'
return False
# extract zip
try:
logger.debug(f"Extracting zip file: {zip_file_path}")
extract_path = os.path.dirname(zip_file_path)
with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
zip_ref.extractall(extract_path)
logger.info(f"Successfully extracted zip to: {extract_path}")
os.remove(zip_file_path)
local_job.children[child_key]['download_status'] = 'complete'
except Exception as e:
logger.exception(f"Exception extracting zip file: {e}")
local_job.children[child_key]['download_status'] = 'failed'
return local_job.children[child_key].get('download_status', None) == 'complete'
def distribute_server_work(start_frame, end_frame, available_servers, method='evenly'):
"""
Splits the frame range among available servers proportionally based on their performance (CPU count).
Args:
start_frame (int): The start frame number of the animation to be rendered.
end_frame (int): The end frame number of the animation to be rendered.
available_servers (list): A list of available server dictionaries. Each server dictionary should include
'hostname' and 'cpu_count' keys (see find_available_servers).
method (str, optional): Specifies the distribution method. Possible values are 'cpu_benchmark', 'cpu_count'
and 'evenly'.
Defaults to 'cpu_benchmark'.
Returns:
list: A list of server dictionaries where each dictionary includes the frame range and total number of
frames to be rendered by the server.
"""
# Calculate respective frames for each server
def divide_frames_by_cpu_count(frame_start, frame_end, servers):
total_frames = frame_end - frame_start + 1
total_cpus = sum(server['cpu_count'] for server in servers)
frame_ranges = {}
current_frame = frame_start
allocated_frames = 0
for i, server in enumerate(servers):
if i == len(servers) - 1: # if it's the last server
# Give all remaining frames to the last server
num_frames = total_frames - allocated_frames
else:
num_frames = round((server['cpu_count'] / total_cpus) * total_frames)
allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1
if current_frame <= frame_end_for_server:
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
current_frame = frame_end_for_server + 1
return frame_ranges
def divide_frames_by_benchmark(frame_start, frame_end, servers):
def fetch_benchmark(server):
try: try:
benchmark = requests.get(f'http://{server["hostname"]}:{ZeroconfServer.server_port}' path_of_source = image_files[0] if image_files else video_files[0]
f'/api/cpu_benchmark').text logger.debug(f"Generating image thumbnail for {path_of_source}")
server['cpu_benchmark'] = benchmark save_first_frame(source_path=path_of_source, dest_path=thumb_image_path, max_width=max_width)
logger.debug(f'Benchmark for {server["hostname"]}: {benchmark}') except Exception as e:
except requests.exceptions.RequestException as e: logger.error(f"Exception saving first frame: {e}")
logger.error(f'Error fetching benchmark for {server["hostname"]}: {e}')
# Number of threads to use (can adjust based on your needs or number of servers) if video_files and not os.path.exists(thumb_video_path):
threads = len(servers) x = threading.Thread(target=generate_thumb_thread, args=(video_files[0],))
x.start()
with ThreadPoolExecutor(max_workers=threads) as executor:
executor.map(fetch_benchmark, servers)
total_frames = frame_end - frame_start + 1
total_performance = sum(int(server['cpu_benchmark']) for server in servers)
frame_ranges = {}
current_frame = frame_start
allocated_frames = 0
for i, server in enumerate(servers):
if i == len(servers) - 1: # if it's the last server
# Give all remaining frames to the last server
num_frames = total_frames - allocated_frames
else:
num_frames = round((int(server['cpu_benchmark']) / total_performance) * total_frames)
allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1
if current_frame <= frame_end_for_server:
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
current_frame = frame_end_for_server + 1
return frame_ranges
def divide_frames_equally(frame_start, frame_end, servers):
frame_range = frame_end - frame_start + 1
frames_per_server = frame_range // len(servers)
leftover_frames = frame_range % len(servers)
frame_ranges = {}
current_start = frame_start
for i, server in enumerate(servers):
current_end = current_start + frames_per_server - 1
if leftover_frames > 0:
current_end += 1
leftover_frames -= 1
if current_start <= current_end:
frame_ranges[server['hostname']] = (current_start, current_end)
current_start = current_end + 1
return frame_ranges
if len(available_servers) == 1:
breakdown = {available_servers[0]['hostname']: (start_frame, end_frame)}
else:
logger.debug(f'Splitting between {len(available_servers)} servers by {method} method')
if method == 'evenly':
breakdown = divide_frames_equally(start_frame, end_frame, available_servers)
elif method == 'cpu_benchmark':
breakdown = divide_frames_by_benchmark(start_frame, end_frame, available_servers)
elif method == 'cpu_count':
breakdown = divide_frames_by_cpu_count(start_frame, end_frame, available_servers)
else:
raise ValueError(f"Invalid distribution method: {method}")
server_breakdown = [server for server in available_servers if breakdown.get(server['hostname']) is not None]
for server in server_breakdown:
server['frame_range'] = breakdown[server['hostname']]
server['total_frames'] = breakdown[server['hostname']][-1] - breakdown[server['hostname']][0] + 1
return server_breakdown
+2 -4
View File
@@ -32,11 +32,9 @@ class ZeroconfServer:
def start(cls, listen_only=False): def start(cls, listen_only=False):
if not cls.service_type: if not cls.service_type:
raise RuntimeError("The 'configure' method must be run before starting the zeroconf server") raise RuntimeError("The 'configure' method must be run before starting the zeroconf server")
elif not listen_only: logger.debug("Starting zeroconf service")
logger.debug(f"Starting zeroconf service") if not listen_only:
cls._register_service() cls._register_service()
else:
logger.debug(f"Starting zeroconf service - Listen only mode")
cls._browse_services() cls._browse_services()
@classmethod @classmethod
-8
View File
@@ -1,8 +0,0 @@
APP_NAME = "Zordon"
APP_VERSION = "0.0.1"
APP_AUTHOR = "Brett Williams"
APP_DESCRIPTION = "Distributed Render Farm Tools"
APP_COPYRIGHT_YEAR = "2024"
APP_LICENSE = "MIT License"
APP_REPO_NAME = APP_NAME
APP_REPO_OWNER = "blw1138"