6 Commits

Author SHA1 Message Date
Brett Williams
a3e2fa7e07 Update the renderer to reflect the current file type 2023-11-17 08:18:07 -06:00
Brett Williams
23901bc8e4 Fix add_job crashing 2023-11-16 14:36:09 -06:00
Brett Williams
ba996c58f5 Make sure supported_extensions is now called as a method everywhere 2023-11-16 14:01:48 -06:00
Brett Williams
9e8eb77328 Cleanup extension matching 2023-11-16 13:55:49 -06:00
Brett Williams
81d2cb70b8 Cleanup unnecessary code in FFMPEG 2023-11-16 11:27:30 -06:00
Brett Williams
6dc8db2d8c Make sure progress UI updates occur on main thread 2023-11-16 06:13:12 -06:00
77 changed files with 1472 additions and 1392 deletions

View File

@@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["3.10", "3.11", "3.12"] python-version: ["3.8", "3.9", "3.10"]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}

6
.gitignore vendored
View File

@@ -1,8 +1,8 @@
/job_history.json
*.icloud *.icloud
*.fcpxml *.fcpxml
/uploads /uploads
*.pyc *.pyc
/server_state.json
/.scheduler_prefs
*.db *.db
/dist/
/build/
/.github/

View File

@@ -1,4 +0,0 @@
[MASTER]
max-line-length = 120
[MESSAGES CONTROL]
disable = missing-docstring, invalid-name, import-error, logging-fstring-interpolation

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Brett Williams
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,10 +1,19 @@
# 🎬 Zordon - Render Management Tools # 🎬 Zordon - Render Management Tools 🎬
Welcome to Zordon! It's a local network render farm manager, aiming to streamline and simplify the rendering process across multiple home computers. Welcome to Zordon! This is a hobby project written with fellow filmmakers in mind. It's a local network render farm manager, aiming to streamline and simplify the rendering process across multiple home computers.
## 📦 Installation ## 📦 Installation
Install the necessary dependencies: `pip3 install -r requirements.txt` Make sure to install the necessary dependencies: `pip3 install -r requirements.txt`
## 🚀 How to Use
Zordon has two main files: `start_server.py` and `start_client.py`.
- **start_server.py**: Run this on any computer you want to render jobs. It manages the incoming job queue and kicks off the appropriate render jobs when ready.
- **start_client.py**: Run this to administer your render servers. It lets you manage and submit jobs.
When the server is running, the job queue can be accessed via a web browser on the server's hostname (default port is 8080). You can also access it via the GUI client or a simple view-only dashboard.
## 🎨 Supported Renderers ## 🎨 Supported Renderers

279
dashboard.py Normal file
View File

@@ -0,0 +1,279 @@
#!/usr/bin/env python3
import datetime
import os.path
import socket
import threading
import time
import traceback
from rich import box
from rich.console import Console
from rich.layout import Layout
from rich.live import Live
from rich.panel import Panel
from rich.table import Column
from rich.table import Table
from rich.text import Text
from rich.tree import Tree
from src.engines.core.base_worker import RenderStatus, string_to_status
from src.api.server_proxy import RenderServerProxy
from src.utilities.misc_helper import get_time_elapsed
from start_server import start_server
"""
The RenderDashboard is designed to be run on a remote machine or on the local server
This provides a detailed status of all jobs running on the server
"""
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
RenderStatus.RUNNING: 'cyan'}
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.UNDEFINED]
renderer_colors = {'ffmpeg': '[magenta]', 'blender': '[orange1]', 'aerender': '[purple]'}
local_hostname = socket.gethostname()
def status_string_to_color(status_string):
job_status = string_to_status(status_string)
job_color = '[{}]'.format(status_colors[job_status])
return job_color
def sorted_jobs(all_jobs):
sort_by_date = True
if not sort_by_date:
sorted_job_list = []
if all_jobs:
for status_category in categories:
found_jobs = [x for x in all_jobs if x['status'] == status_category.value]
if found_jobs:
sorted_found_jobs = sorted(found_jobs, key=lambda d: datetime.datetime.fromisoformat(d['date_created']), reverse=True)
sorted_job_list.extend(sorted_found_jobs)
else:
sorted_job_list = sorted(all_jobs, key=lambda d: datetime.datetime.fromisoformat(d['date_created']), reverse=True)
return sorted_job_list
def create_node_tree(all_server_data) -> Tree:
main_tree = Tree("[magenta]Server Cluster")
for server_host, server_data in all_server_data['servers'].items():
node_title_local = f"[cyan bold]{server_host}[/] [yellow](This Computer)[default]"
node_title_remote = f"[cyan]{server_host} [magenta](Remote)[default]"
node_tree_text = node_title_local if (server_host == local_hostname) else node_title_remote
if server_data.get('is_online', False):
node_tree_text = node_tree_text + " - [green]Running"
node_tree = Tree(node_tree_text)
stats_text = f"CPU: [yellow]{server_data['status']['cpu_percent']}% [default]| RAM: " \
f"[yellow]{server_data['status']['memory_percent']}% [default]| Cores: " \
f"[yellow]{server_data['status']['cpu_count']} [default]| " \
f"{server_data['status']['platform'].split('-')[0]}"
node_tree.add(Tree(stats_text))
running_jobs = [job for job in server_data['jobs'] if job['status'] == RenderStatus.RUNNING.value]
not_started = [job for job in server_data['jobs'] if job['status'] == RenderStatus.NOT_STARTED.value]
scheduled = [job for job in server_data['jobs'] if job['status'] == RenderStatus.SCHEDULED.value]
jobs_to_display = running_jobs + not_started + scheduled
jobs_tree = Tree(f"Running: [green]{len(running_jobs)} [default]| Queued: [cyan]{len(not_started)}"
f"[default] | Scheduled: [cyan]{len(scheduled)}")
for job in jobs_to_display:
renderer = f"{renderer_colors[job['renderer']]}{job['renderer']}[default]"
filename = os.path.basename(job['input_path']).split('.')[0]
if job['status'] == RenderStatus.RUNNING.value:
jobs_tree.add(f"[bold]{renderer} {filename} ({job['id']}) - {status_string_to_color(job['status'])}{(float(job['percent_complete']) * 100):.1f}%")
else:
jobs_tree.add(f"{filename} ({job['id']}) - {status_string_to_color(job['status'])}{job['status'].title()}")
if not jobs_to_display:
jobs_tree.add("[italic]No running jobs")
node_tree.add(jobs_tree)
main_tree.add(node_tree)
else:
# if server is offline
node_tree_text = node_tree_text + " - [red]Offline"
node_tree = Tree(node_tree_text)
main_tree.add(node_tree)
return main_tree
def create_jobs_table(all_server_data) -> Table:
table = Table("ID", "Name", "Renderer", Column(header="Priority", justify="center"),
Column(header="Status", justify="center"), Column(header="Time Elapsed", justify="right"),
Column(header="# Frames", justify="right"), "Client", show_lines=True,
box=box.HEAVY_HEAD)
all_jobs = []
for server_name, server_data in all_server_data['servers'].items():
for job in server_data['jobs']:
#todo: clean this up
all_jobs.append(job)
all_jobs = sorted_jobs(all_jobs)
for job in all_jobs:
job_status = string_to_status(job['status'])
job_color = '[{}]'.format(status_colors[job_status])
job_text = f"{job_color}" + job_status.value.title()
if job_status == RenderStatus.ERROR and job['errors']:
job_text = job_text + "\n" + "\n".join(job['errors'])
# Project name
project_name = job_color + (job['name'] or os.path.basename(job['input_path']))
elapsed_time = get_time_elapsed(datetime.datetime.fromisoformat(job['start_time']),
datetime.datetime.fromisoformat(job['end_time']))
if job_status == RenderStatus.RUNNING:
job_text = f"{job_color}[bold]Running - {float(job['percent_complete']) * 100:.1f}%"
elapsed_time = "[bold]" + elapsed_time
project_name = "[bold]" + project_name
elif job_status == RenderStatus.CANCELLED or job_status == RenderStatus.ERROR:
project_name = "[strike]" + project_name
# Priority
priority_color = ["red", "yellow", "cyan"][(job['priority'] - 1)]
client_name = job['client'] or 'unknown'
client_colors = {'unknown': '[red]', local_hostname: '[yellow]'}
client_title = client_colors.get(client_name, '[magenta]') + client_name
table.add_row(
job['id'],
project_name,
renderer_colors.get(job['renderer'], '[cyan]') + job['renderer'] + '[default]-' + job['renderer_version'],
f"[{priority_color}]{job['priority']}",
job_text,
elapsed_time,
str(max(int(job['total_frames']), 1)),
client_title
)
return table
def create_status_panel(all_server_data):
for key, value in all_server_data['servers'].items():
if key == local_hostname:
return str(value['status'])
return "no status"
class KeyboardThread(threading.Thread):
def __init__(self, input_cbk = None, name='keyboard-input-thread'):
self.input_cbk = input_cbk
super(KeyboardThread, self).__init__(name=name)
self.start()
def run(self):
while True:
self.input_cbk(input()) #waits to get input + Return
def my_callback(inp):
#evaluate the keyboard input
print('You Entered:', inp)
if __name__ == '__main__':
get_server_ip = input("Enter server IP or None for local: ") or local_hostname
server_proxy = RenderServerProxy(get_server_ip, "8080")
if not server_proxy.connect():
if server_proxy.hostname == local_hostname:
start_server_input = input("Local server not running. Start server? (y/n) ")
if start_server_input and start_server_input[0].lower() == "y":
# Startup the local server
start_server()
test = server_proxy.connect()
print(f"connected? {test}")
else:
print(f"\nUnable to connect to server: {server_proxy.hostname}")
print("\nVerify IP address is correct and server is running")
exit(1)
# start the Keyboard thread
# kthread = KeyboardThread(my_callback)
# Console Layout
console = Console()
layout = Layout()
# Divide the "screen" in to three parts
layout.split(
Layout(name="header", size=3),
Layout(ratio=1, name="main")
# Layout(size=10, name="footer"),
)
# Divide the "main" layout in to "side" and "body"
layout["main"].split_row(
Layout(name="side"),
Layout(name="body",
ratio=3))
# Divide the "side" layout in to two
layout["side"].split(Layout(name="side_top"), Layout(name="side_bottom"))
# Server connection header
header_text = Text(f"Connected to server: ")
header_text.append(f"{server_proxy.hostname} ", style="green")
if server_proxy.hostname == local_hostname:
header_text.append("(This Computer)", style="yellow")
else:
header_text.append("(Remote)", style="magenta")
# background process to update server data independent of the UI
def fetch_server_data(server):
while True:
fetched_data = server.get_data(timeout=5)
if fetched_data:
server.fetched_status_data = fetched_data
time.sleep(1)
x = threading.Thread(target=fetch_server_data, args=(server_proxy,))
x.daemon = True
x.start()
# draw and update the UI
with Live(console=console, screen=False, refresh_per_second=1, transient=True) as live:
while True:
try:
if server_proxy.fetched_status_data:
server_online = False
if server_proxy.fetched_status_data.get('timestamp', None):
timestamp = datetime.datetime.fromisoformat(server_proxy.fetched_status_data['timestamp'])
time_diff = datetime.datetime.now() - timestamp
server_online = time_diff.seconds < 10 # client is offline if not updated in certain time
layout["body"].update(create_jobs_table(server_proxy.fetched_status_data))
layout["side_top"].update(Panel(create_node_tree(server_proxy.fetched_status_data)))
layout["side_bottom"].update(Panel(create_status_panel(server_proxy.fetched_status_data)))
online_text = "Online" if server_online else "Offline"
online_color = "green" if server_online else "red"
layout["header"].update(Panel(Text(f"Zordon Render Client - Version 0.0.1 alpha - {online_text}",
justify="center", style=online_color)))
live.update(layout, refresh=False)
except Exception as e:
print(f"Exception updating table: {e}")
traceback.print_exception(e)
time.sleep(1)
# # # todo: Add input prompt to manage running jobs (ie add, cancel, get info, etc)

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from src import init from src import init
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,37 +1,15 @@
PyQt6>=6.6.1 requests==2.31.0
psutil>=5.9.8 psutil==5.9.6
requests>=2.31.0 PyYAML==6.0.1
Pillow>=10.2.0 Flask==3.0.0
PyYAML>=6.0.1 rich==13.6.0
flask>=3.0.2 Werkzeug~=3.0.1
tqdm>=4.66.2 json2html~=1.3.0
werkzeug>=3.0.1 SQLAlchemy~=2.0.15
Pypubsub>=4.0.3 Pillow==10.1.0
zeroconf>=0.131.0 zeroconf==0.119.0
SQLAlchemy>=2.0.25 Pypubsub~=4.0.3
plyer>=2.1.0 tqdm==4.66.1
pytz>=2023.3.post1 plyer==2.1.0
future>=0.18.3 PyQt6~=6.6.0
rich>=13.7.0 PySide6~=6.6.0
pytest>=8.0.0
numpy>=1.26.3
setuptools>=69.0.3
pandas>=2.2.0
matplotlib>=3.8.2
MarkupSafe>=2.1.4
dmglib>=0.9.5; sys_platform == 'darwin'
python-dateutil>=2.8.2
certifi>=2023.11.17
shiboken6>=6.6.1
Pygments>=2.17.2
cycler>=0.12.1
contourpy>=1.2.0
packaging>=23.2
fonttools>=4.47.2
Jinja2>=3.1.3
pyparsing>=3.1.1
kiwisolver>=1.4.5
attrs>=23.2.0
lxml>=5.1.0
click>=8.1.7
requests_toolbelt>=1.0.0

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

Before

Width:  |  Height:  |  Size: 2.0 KiB

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

Before

Width:  |  Height:  |  Size: 6.1 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

View File

Before

Width:  |  Height:  |  Size: 921 B

After

Width:  |  Height:  |  Size: 921 B

View File

Before

Width:  |  Height:  |  Size: 476 B

After

Width:  |  Height:  |  Size: 476 B

View File

Before

Width:  |  Height:  |  Size: 979 B

After

Width:  |  Height:  |  Size: 979 B

View File

Before

Width:  |  Height:  |  Size: 2.2 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

View File

Before

Width:  |  Height:  |  Size: 2.2 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

Before

Width:  |  Height:  |  Size: 450 B

After

Width:  |  Height:  |  Size: 450 B

View File

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

Before

Width:  |  Height:  |  Size: 694 B

After

Width:  |  Height:  |  Size: 694 B

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

Before

Width:  |  Height:  |  Size: 816 B

After

Width:  |  Height:  |  Size: 816 B

View File

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

Before

Width:  |  Height:  |  Size: 806 B

After

Width:  |  Height:  |  Size: 806 B

View File

@@ -1,28 +0,0 @@
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
import glob
from setuptools import setup
APP = ['main.py']
DATA_FILES = [('config', glob.glob('config/*.*')),
('resources', glob.glob('resources/*.*'))]
OPTIONS = {
'excludes': ['PySide6'],
'includes': ['zeroconf', 'zeroconf._services.info'],
'plist': {
'LSMinimumSystemVersion': '10.15', # Specify minimum macOS version
},
}
setup(
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
name='Zordon'
)

View File

@@ -10,28 +10,14 @@ import requests
from tqdm import tqdm from tqdm import tqdm
from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
from src.distributed_job_manager import DistributedJobManager
from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue
logger = logging.getLogger() logger = logging.getLogger()
def handle_uploaded_project_files(request, jobs_list, upload_directory): def handle_uploaded_project_files(request, jobs_list, upload_directory):
"""
Handles the uploaded project files.
This method takes a request with a file, a list of jobs, and an upload directory. It checks if the file was uploaded
directly, if it needs to be downloaded from a URL, or if it's already present on the local file system. It then
moves the file to the appropriate directory and returns the local path to the file and its name.
Args:
request (Request): The request object containing the file.
jobs_list (list): A list of jobs. The first job in the list is used to get the file's URL and local path.
upload_directory (str): The directory where the file should be uploaded.
Raises:
ValueError: If no valid project paths are found.
Returns:
tuple: A tuple containing the local path to the loaded project file and its name.
"""
# Initialize default values # Initialize default values
loaded_project_local_path = None loaded_project_local_path = None
@@ -82,6 +68,7 @@ def download_project_from_url(project_url):
# This nested function is to handle downloading from a URL # This nested function is to handle downloading from a URL
logger.info(f"Downloading project from url: {project_url}") logger.info(f"Downloading project from url: {project_url}")
referred_name = os.path.basename(project_url) referred_name = os.path.basename(project_url)
downloaded_file_url = None
try: try:
response = requests.get(project_url, stream=True) response = requests.get(project_url, stream=True)
@@ -108,21 +95,7 @@ def download_project_from_url(project_url):
def process_zipped_project(zip_path): def process_zipped_project(zip_path):
""" # Given a zip path, extract its content, and return the main project file path
Processes a zipped project.
This method takes a path to a zip file, extracts its contents, and returns the path to the extracted project file.
If the zip file contains more than one project file or none, an error is raised.
Args:
zip_path (str): The path to the zip file.
Raises:
ValueError: If there's more than 1 project file or none in the zip file.
Returns:
str: The path to the main project file.
"""
work_path = os.path.dirname(zip_path) work_path = os.path.dirname(zip_path)
try: try:
@@ -149,3 +122,58 @@ def process_zipped_project(zip_path):
logger.error(f"Error processing zip file: {e}") logger.error(f"Error processing zip file: {e}")
raise ValueError(f"Error processing zip file: {e}") raise ValueError(f"Error processing zip file: {e}")
return extracted_project_path return extracted_project_path
def create_render_jobs(jobs_list, loaded_project_local_path, job_dir):
results = []
for job_data in jobs_list:
try:
# get new output path in output_dir
output_path = job_data.get('output_path')
if not output_path:
loaded_project_filename = os.path.basename(loaded_project_local_path)
output_filename = os.path.splitext(loaded_project_filename)[0]
else:
output_filename = os.path.basename(output_path)
# Prepare output path
output_dir = os.path.join(os.path.dirname(os.path.dirname(loaded_project_local_path)), 'output')
output_path = os.path.join(output_dir, output_filename)
os.makedirs(output_dir, exist_ok=True)
logger.debug(f"New job output path: {output_path}")
# create & configure jobs
worker = EngineManager.create_worker(renderer=job_data['renderer'],
input_path=loaded_project_local_path,
output_path=output_path,
engine_version=job_data.get('engine_version'),
args=job_data.get('args', {}))
worker.status = job_data.get("initial_status", worker.status)
worker.parent = job_data.get("parent", worker.parent)
worker.name = job_data.get("name", worker.name)
worker.priority = int(job_data.get('priority', worker.priority))
worker.start_frame = int(job_data.get("start_frame", worker.start_frame))
worker.end_frame = int(job_data.get("end_frame", worker.end_frame))
# determine if we can / should split the job
if job_data.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent:
DistributedJobManager.split_into_subjobs(worker, job_data, loaded_project_local_path)
else:
logger.debug("Not splitting into subjobs")
RenderQueue.add_to_render_queue(worker, force_start=job_data.get('force_start', False))
if not worker.parent:
from src.api.api_server import make_job_ready
make_job_ready(worker.id)
results.append(worker.json())
except FileNotFoundError as e:
err_msg = f"Cannot create job: {e}"
logger.error(err_msg)
results.append({'error': err_msg})
except Exception as e:
err_msg = f"Exception creating render job: {e}"
logger.exception(err_msg)
results.append({'error': err_msg})
return results

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import concurrent.futures
import json import json
import logging import logging
import multiprocessing import multiprocessing
@@ -14,11 +13,12 @@ import time
from datetime import datetime from datetime import datetime
from zipfile import ZipFile from zipfile import ZipFile
import json2html
import psutil import psutil
import yaml import yaml
from flask import Flask, request, send_file, after_this_request, Response, redirect, url_for, abort from flask import Flask, request, render_template, send_file, after_this_request, Response, redirect, url_for, abort
from src.api.add_job_helpers import handle_uploaded_project_files, process_zipped_project from src.api.add_job_helpers import handle_uploaded_project_files, process_zipped_project, create_render_jobs
from src.api.serverproxy_manager import ServerProxyManager from src.api.serverproxy_manager import ServerProxyManager
from src.distributed_job_manager import DistributedJobManager from src.distributed_job_manager import DistributedJobManager
from src.engines.core.base_worker import string_to_status, RenderStatus from src.engines.core.base_worker import string_to_status, RenderStatus
@@ -26,13 +26,12 @@ from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue, JobNotFoundError from src.render_queue import RenderQueue, JobNotFoundError
from src.utilities.config import Config from src.utilities.config import Config
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \ from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu, \
current_system_os_version, num_to_alphanumeric current_system_os_version, config_dir
from src.utilities.server_helper import generate_thumbnail_for_job from src.utilities.server_helper import generate_thumbnail_for_job
from src.utilities.zeroconf_server import ZeroconfServer from src.utilities.zeroconf_server import ZeroconfServer
from src.utilities.benchmark import cpu_benchmark, disk_io_benchmark
logger = logging.getLogger() logger = logging.getLogger()
server = Flask(__name__) server = Flask(__name__, template_folder='web/templates', static_folder='web/static')
ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED, categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
@@ -53,34 +52,45 @@ def sorted_jobs(all_jobs, sort_by_date=True):
return sorted_job_list return sorted_job_list
@server.route('/')
@server.route('/index')
def index():
with open(system_safe_path(os.path.join(config_dir(), 'presets.yaml'))) as f:
render_presets = yaml.load(f, Loader=yaml.FullLoader)
return render_template('index.html', all_jobs=sorted_jobs(RenderQueue.all_jobs()),
hostname=server.config['HOSTNAME'], renderer_info=renderer_info(),
render_clients=[server.config['HOSTNAME']], preset_list=render_presets)
@server.get('/api/jobs') @server.get('/api/jobs')
def jobs_json(): def jobs_json():
try: try:
hash_token = request.args.get('token', None)
all_jobs = [x.json() for x in RenderQueue.all_jobs()] all_jobs = [x.json() for x in RenderQueue.all_jobs()]
job_cache_int = int(json.dumps(all_jobs).__hash__()) job_cache_token = str(json.dumps(all_jobs).__hash__())
job_cache_token = num_to_alphanumeric(job_cache_int)
if hash_token and hash_token == job_cache_token:
return [], 204 # no need to update
else:
return {'jobs': all_jobs, 'token': job_cache_token} return {'jobs': all_jobs, 'token': job_cache_token}
except Exception as e: except Exception as e:
logger.exception(f"Exception fetching jobs_json: {e}") logger.exception(f"Exception fetching all_jobs_cached: {e}")
return {}, 500 return [], 500
@server.get('/api/jobs_long_poll') @server.route('/ui/job/<job_id>/full_details')
def long_polling_jobs(): def job_detail(job_id):
try: found_job = RenderQueue.job_with_id(job_id)
hash_token = request.args.get('token', None) table_html = json2html.json2html.convert(json=found_job.json(),
start_time = time.time() table_attributes='class="table is-narrow is-striped is-fullwidth"')
while True: media_url = None
all_jobs = jobs_json() if found_job.file_list() and found_job.status == RenderStatus.COMPLETED:
if all_jobs['token'] != hash_token: media_basename = os.path.basename(found_job.file_list()[0])
return all_jobs media_url = f"/api/job/{job_id}/file/{media_basename}"
# Break after 30 seconds to avoid gateway timeout return render_template('details.html', detail_table=table_html, media_url=media_url,
if time.time() - start_time > 30: hostname=server.config['HOSTNAME'], job_status=found_job.status.value.title(),
return {}, 204 job=found_job, renderer_info=renderer_info())
time.sleep(1)
except Exception as e:
logger.exception(f"Exception fetching long_polling_jobs: {e}")
return {}, 500
@server.route('/api/job/<job_id>/thumbnail') @server.route('/api/job/<job_id>/thumbnail')
@@ -120,8 +130,17 @@ def job_thumbnail(job_id):
elif os.path.exists(thumb_image_path): elif os.path.exists(thumb_image_path):
return send_file(thumb_image_path, mimetype='image/jpeg') return send_file(thumb_image_path, mimetype='image/jpeg')
return found_job.status.value, 200 # Misc status icons
return found_job.status.value, 404 if found_job.status == RenderStatus.RUNNING:
return send_file('../web/static/images/gears.png', mimetype="image/png")
elif found_job.status == RenderStatus.CANCELLED:
return send_file('../web/static/images/cancelled.png', mimetype="image/png")
elif found_job.status == RenderStatus.SCHEDULED:
return send_file('../web/static/images/scheduled.png', mimetype="image/png")
elif found_job.status == RenderStatus.NOT_STARTED:
return send_file('../web/static/images/not_started.png', mimetype="image/png")
# errors
return send_file('../web/static/images/error.png', mimetype="image/png")
# Get job file routing # Get job file routing
@@ -183,6 +202,24 @@ def get_file_list(job_id):
return RenderQueue.job_with_id(job_id).file_list() return RenderQueue.job_with_id(job_id).file_list()
@server.get('/api/job/<job_id>/make_ready')
def make_job_ready(job_id):
try:
found_job = RenderQueue.job_with_id(job_id)
if found_job.status in [RenderStatus.CONFIGURING, RenderStatus.NOT_STARTED]:
if found_job.children:
for child_key in found_job.children.keys():
child_id = child_key.split('@')[0]
hostname = child_key.split('@')[-1]
ServerProxyManager.get_proxy_for_hostname(hostname).request_data(f'job/{child_id}/make_ready')
found_job.status = RenderStatus.NOT_STARTED
RenderQueue.save_state()
return found_job.json(), 200
except Exception as e:
return "Error making job ready: {e}", 500
return "Not valid command", 405
@server.route('/api/job/<job_id>/download_all') @server.route('/api/job/<job_id>/download_all')
def download_all(job_id): def download_all(job_id):
zip_filename = None zip_filename = None
@@ -190,10 +227,7 @@ def download_all(job_id):
@after_this_request @after_this_request
def clear_zip(response): def clear_zip(response):
if zip_filename and os.path.exists(zip_filename): if zip_filename and os.path.exists(zip_filename):
try:
os.remove(zip_filename) os.remove(zip_filename)
except Exception as e:
logger.warning(f"Error removing zip file '{zip_filename}': {e}")
return response return response
found_job = RenderQueue.job_with_id(job_id) found_job = RenderQueue.job_with_id(job_id)
@@ -214,8 +248,8 @@ def download_all(job_id):
def presets(): def presets():
presets_path = system_safe_path('config/presets.yaml') presets_path = system_safe_path('config/presets.yaml')
with open(presets_path) as f: with open(presets_path) as f:
loaded_presets = yaml.load(f, Loader=yaml.FullLoader) presets = yaml.load(f, Loader=yaml.FullLoader)
return loaded_presets return presets
@server.get('/api/full_status') @server.get('/api/full_status')
@@ -257,7 +291,18 @@ def add_job_handler():
elif request.form.get('json', None): elif request.form.get('json', None):
jobs_list = json.loads(request.form['json']) jobs_list = json.loads(request.form['json'])
else: else:
return "Invalid data", 400 # Cleanup flat form data into nested structure
form_dict = {k: v for k, v in dict(request.form).items() if v}
args = {}
arg_keys = [k for k in form_dict.keys() if '-arg_' in k]
for server_hostname in arg_keys:
if form_dict['renderer'] in server_hostname or 'AnyRenderer' in server_hostname:
cleaned_key = server_hostname.split('-arg_')[-1]
args[cleaned_key] = form_dict[server_hostname]
form_dict.pop(server_hostname)
args['raw'] = form_dict.get('raw_args', None)
form_dict['args'] = args
jobs_list = [form_dict]
except Exception as e: except Exception as e:
err_msg = f"Error processing job data: {e}" err_msg = f"Error processing job data: {e}"
logger.error(err_msg) logger.error(err_msg)
@@ -269,13 +314,16 @@ def add_job_handler():
if loaded_project_local_path.lower().endswith('.zip'): if loaded_project_local_path.lower().endswith('.zip'):
loaded_project_local_path = process_zipped_project(loaded_project_local_path) loaded_project_local_path = process_zipped_project(loaded_project_local_path)
results = [] results = create_render_jobs(jobs_list, loaded_project_local_path, referred_name)
for new_job_data in jobs_list: for response in results:
new_job = DistributedJobManager.create_render_job(new_job_data, loaded_project_local_path) if response.get('error', None):
results.append(new_job.json()) return results, 400
if request.args.get('redirect', False):
return redirect(url_for('index'))
else:
return results, 200 return results, 200
except Exception as e: except Exception as e:
logger.exception(f"Error adding job: {e}") logger.exception(f"Unknown error adding job: {e}")
return 'unknown error', 500 return 'unknown error', 500
@@ -342,6 +390,13 @@ def clear_history():
@server.route('/api/status') @server.route('/api/status')
def status(): def status():
renderer_data = {}
for render_class in EngineManager.supported_engines():
if EngineManager.all_versions_for_engine(render_class.name): # only return renderers installed on host
renderer_data[render_class.engine.name()] = \
{'versions': EngineManager.all_versions_for_engine(render_class.engine.name()),
'is_available': RenderQueue.is_available_for_job(render_class.engine.name())
}
# Get system info # Get system info
return {"timestamp": datetime.now().isoformat(), return {"timestamp": datetime.now().isoformat(),
@@ -355,6 +410,7 @@ def status():
"memory_available": psutil.virtual_memory().available, "memory_available": psutil.virtual_memory().available,
"memory_percent": psutil.virtual_memory().percent, "memory_percent": psutil.virtual_memory().percent,
"job_counts": RenderQueue.job_counts(), "job_counts": RenderQueue.job_counts(),
"renderers": renderer_data,
"hostname": server.config['HOSTNAME'], "hostname": server.config['HOSTNAME'],
"port": server.config['PORT'] "port": server.config['PORT']
} }
@@ -362,53 +418,18 @@ def status():
@server.get('/api/renderer_info') @server.get('/api/renderer_info')
def renderer_info(): def renderer_info():
renderer_data = {}
response_type = request.args.get('response_type', 'standard') for engine in EngineManager.supported_engines():
# Get all installed versions of engine
def process_engine(engine):
try:
# Get all installed versions of the engine
installed_versions = EngineManager.all_versions_for_engine(engine.name()) installed_versions = EngineManager.all_versions_for_engine(engine.name())
if installed_versions: if installed_versions:
# Use system-installed versions to avoid permission issues # fixme: using system versions only because downloaded versions may have permissions issues
system_installed_versions = [x for x in installed_versions if x['type'] == 'system'] system_installed_versions = [x for x in installed_versions if x['type'] == 'system']
install_path = system_installed_versions[0]['path'] if system_installed_versions else \ install_path = system_installed_versions[0]['path'] if system_installed_versions else installed_versions[0]['path']
installed_versions[0]['path'] renderer_data[engine.name()] = {'is_available': RenderQueue.is_available_for_job(engine.name()),
en = engine(install_path)
if response_type == 'full': # Full dataset - Can be slow
return {
en.name(): {
'is_available': RenderQueue.is_available_for_job(en.name()),
'versions': installed_versions, 'versions': installed_versions,
'supported_extensions': engine.supported_extensions(), 'supported_extensions': engine.supported_extensions(),
'supported_export_formats': en.get_output_formats(), 'supported_export_formats': engine(install_path).get_output_formats()}
'system_info': en.system_info()
}
}
elif response_type == 'standard': # Simpler dataset to reduce response times
return {
en.name(): {
'is_available': RenderQueue.is_available_for_job(en.name()),
'versions': installed_versions,
}
}
else:
raise AttributeError(f"Invalid response_type: {response_type}")
except Exception as e:
logger.error(f'Error fetching details for {engine.name()} renderer: {e}')
return {}
renderer_data = {}
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {executor.submit(process_engine, engine): engine.name() for engine in EngineManager.supported_engines()}
for future in concurrent.futures.as_completed(futures):
result = future.result()
if result:
renderer_data.update(result)
return renderer_data return renderer_data
@@ -463,7 +484,6 @@ def delete_engine_download():
@server.get('/api/renderer/<renderer>/args') @server.get('/api/renderer/<renderer>/args')
def get_renderer_args(renderer): def get_renderer_args(renderer):
try: try:
# todo: possibly deprecate
renderer_engine_class = EngineManager.engine_with_name(renderer) renderer_engine_class = EngineManager.engine_with_name(renderer)
return renderer_engine_class().get_arguments() return renderer_engine_class().get_arguments()
except LookupError: except LookupError:
@@ -479,15 +499,9 @@ def get_renderer_help(renderer):
return f"Cannot find renderer '{renderer}'", 400 return f"Cannot find renderer '{renderer}'", 400
@server.get('/api/cpu_benchmark') @server.route('/upload')
def get_cpu_benchmark_score(): def upload_file_page():
return str(cpu_benchmark(10)) return render_template('upload.html', supported_renderers=EngineManager.supported_engines())
@server.get('/api/disk_benchmark')
def get_disk_benchmark():
results = disk_io_benchmark()
return {'write_speed': results[0], 'read_speed': results[-1]}
def start_server(): def start_server():
@@ -522,8 +536,8 @@ def start_server():
flask_log = logging.getLogger('werkzeug') flask_log = logging.getLogger('werkzeug')
flask_log.setLevel(Config.flask_log_level.upper()) flask_log.setLevel(Config.flask_log_level.upper())
# check for updates for render engines if configured or on first launch # check for updates for render engines if config'd or on first launch
if Config.update_engines_on_launch or not EngineManager.get_engines(): if Config.update_engines_on_launch or not EngineManager.all_engines():
EngineManager.update_all_engines() EngineManager.update_all_engines()
# Set up the RenderQueue object # Set up the RenderQueue object

View File

@@ -1,41 +1,28 @@
import json import json
import logging import logging
import os import os
import socket
import threading import threading
import time import time
import requests import requests
from requests_toolbelt.multipart import MultipartEncoder, MultipartEncoderMonitor from requests_toolbelt.multipart import MultipartEncoder, MultipartEncoderMonitor
from urllib.parse import urljoin
from src.utilities.misc_helper import is_localhost from src.utilities.misc_helper import is_localhost
from src.utilities.status_utils import RenderStatus from src.utilities.status_utils import RenderStatus
from src.utilities.zeroconf_server import ZeroconfServer
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green', status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple', RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
RenderStatus.RUNNING: 'cyan', RenderStatus.WAITING_FOR_SUBJOBS: 'blue'} RenderStatus.RUNNING: 'cyan', RenderStatus.WAITING_FOR_SUBJOBS: 'blue'}
categories = [RenderStatus.RUNNING, RenderStatus.WAITING_FOR_SUBJOBS, RenderStatus.ERROR, RenderStatus.NOT_STARTED, categories = [RenderStatus.RUNNING, RenderStatus.WAITING_FOR_SUBJOBS, RenderStatus.ERROR, RenderStatus.NOT_STARTED,
RenderStatus.SCHEDULED, RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.UNDEFINED, RenderStatus.SCHEDULED, RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.UNDEFINED]
RenderStatus.CONFIGURING]
logger = logging.getLogger() logger = logging.getLogger()
OFFLINE_MAX = 4 OFFLINE_MAX = 2
LOOPBACK = '127.0.0.1'
class RenderServerProxy: class RenderServerProxy:
"""
The ServerProxy class is responsible for interacting with a remote server.
It provides methods to request data from the server and store the status of the server.
Attributes:
system_cpu (str): The CPU type of the system.
system_cpu_count (int): The number of CPUs in the system.
system_os (str): The operating system of the system.
system_os_version (str): The version of the operating system.
"""
def __init__(self, hostname, server_port="8080"): def __init__(self, hostname, server_port="8080"):
self.hostname = hostname self.hostname = hostname
@@ -47,7 +34,6 @@ class RenderServerProxy:
self.__background_thread = None self.__background_thread = None
self.__offline_flags = 0 self.__offline_flags = 0
self.update_cadence = 5 self.update_cadence = 5
self.is_localhost = bool(is_localhost(hostname))
# Cache some basic server info # Cache some basic server info
self.system_cpu = None self.system_cpu = None
@@ -55,9 +41,6 @@ class RenderServerProxy:
self.system_os = None self.system_os = None
self.system_os_version = None self.system_os_version = None
def __repr__(self):
return f"<RenderServerProxy - {self.hostname}>"
def connect(self): def connect(self):
return self.status() return self.status()
@@ -65,7 +48,7 @@ class RenderServerProxy:
if self.__update_in_background: if self.__update_in_background:
return self.__offline_flags < OFFLINE_MAX return self.__offline_flags < OFFLINE_MAX
else: else:
return self.get_status() is not None return self.connect() is not None
def status(self): def status(self):
if not self.is_online(): if not self.is_online():
@@ -76,9 +59,8 @@ class RenderServerProxy:
def request_data(self, payload, timeout=5): def request_data(self, payload, timeout=5):
try: try:
req = self.request(payload, timeout) req = self.request(payload, timeout)
if req.ok: if req.ok and req.status_code == 200:
self.__offline_flags = 0 self.__offline_flags = 0
if req.status_code == 200:
return req.json() return req.json()
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logger.debug(f"JSON decode error: {e}") logger.debug(f"JSON decode error: {e}")
@@ -90,18 +72,10 @@ class RenderServerProxy:
self.__offline_flags = self.__offline_flags + 1 self.__offline_flags = self.__offline_flags + 1
except Exception as e: except Exception as e:
logger.exception(f"Uncaught exception: {e}") logger.exception(f"Uncaught exception: {e}")
# If server unexpectedly drops off the network, stop background updates
if self.__offline_flags > OFFLINE_MAX:
try:
self.stop_background_update()
except KeyError:
pass
return None return None
def request(self, payload, timeout=5): def request(self, payload, timeout=5):
hostname = LOOPBACK if self.is_localhost else self.hostname return requests.get(f'http://{self.hostname}:{self.port}/api/{payload}', timeout=timeout)
return requests.get(f'http://{hostname}:{self.port}/api/{payload}', timeout=timeout)
def start_background_update(self): def start_background_update(self):
if self.__update_in_background: if self.__update_in_background:
@@ -109,11 +83,9 @@ class RenderServerProxy:
self.__update_in_background = True self.__update_in_background = True
def thread_worker(): def thread_worker():
logger.debug(f'Starting background updates for {self.hostname}')
while self.__update_in_background: while self.__update_in_background:
self.__update_job_cache() self.__update_job_cache()
time.sleep(self.update_cadence) time.sleep(self.update_cadence)
logger.debug(f'Stopping background updates for {self.hostname}')
self.__background_thread = threading.Thread(target=thread_worker) self.__background_thread = threading.Thread(target=thread_worker)
self.__background_thread.daemon = True self.__background_thread.daemon = True
@@ -130,13 +102,8 @@ class RenderServerProxy:
self.__update_job_cache(timeout, ignore_token) self.__update_job_cache(timeout, ignore_token)
return self.__jobs_cache.copy() if self.__jobs_cache else None return self.__jobs_cache.copy() if self.__jobs_cache else None
def __update_job_cache(self, timeout=40, ignore_token=False): def __update_job_cache(self, timeout=5, ignore_token=False):
url = f'jobs?token={self.__jobs_cache_token}' if self.__jobs_cache_token and not ignore_token else 'jobs'
if self.__offline_flags: # if we're offline, don't bother with the long poll
ignore_token = True
url = f'jobs_long_poll?token={self.__jobs_cache_token}' if (self.__jobs_cache_token and
not ignore_token) else 'jobs'
status_result = self.request_data(url, timeout=timeout) status_result = self.request_data(url, timeout=timeout)
if status_result is not None: if status_result is not None:
sorted_jobs = [] sorted_jobs = []
@@ -148,7 +115,8 @@ class RenderServerProxy:
self.__jobs_cache_token = status_result['token'] self.__jobs_cache_token = status_result['token']
def get_data(self, timeout=5): def get_data(self, timeout=5):
return self.request_data('full_status', timeout=timeout) all_data = self.request_data('full_status', timeout=timeout)
return all_data
def cancel_job(self, job_id, confirm=False): def cancel_job(self, job_id, confirm=False):
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}') return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
@@ -158,7 +126,7 @@ class RenderServerProxy:
def get_status(self): def get_status(self):
status = self.request_data('status') status = self.request_data('status')
if status and not self.system_cpu: if not self.system_cpu:
self.system_cpu = status['system_cpu'] self.system_cpu = status['system_cpu']
self.system_cpu_count = status['cpu_count'] self.system_cpu_count = status['cpu_count']
self.system_os = status['system_os'] self.system_os = status['system_os']
@@ -172,69 +140,35 @@ class RenderServerProxy:
return self.request_data('all_engines') return self.request_data('all_engines')
def notify_parent_of_status_change(self, parent_id, subjob): def notify_parent_of_status_change(self, parent_id, subjob):
""" return requests.post(f'http://{self.hostname}:{self.port}/api/job/{parent_id}/notify_parent_of_status_change',
Notifies the parent job of a status change in a subjob.
Args:
parent_id (str): The ID of the parent job.
subjob (Job): The subjob that has changed status.
Returns:
Response: The response from the server.
"""
hostname = LOOPBACK if self.is_localhost else self.hostname
return requests.post(f'http://{hostname}:{self.port}/api/job/{parent_id}/notify_parent_of_status_change',
json=subjob.json()) json=subjob.json())
def post_job_to_server(self, file_path, job_list, callback=None): def post_job_to_server(self, file_path, job_list, callback=None):
"""
Posts a job to the server.
Args: # bypass uploading file if posting to localhost
file_path (str): The path to the file to upload. if is_localhost(self.hostname):
job_list (list): A list of jobs to post. jobs_with_path = [{**item, "local_path": file_path} for item in job_list]
callback (function, optional): A callback function to call during the upload. Defaults to None. return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=json.dumps(jobs_with_path),
headers={'Content-Type': 'application/json'})
Returns: # Prepare the form data
Response: The response from the server.
"""
try:
# Check if file exists
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")
# Bypass uploading file if posting to localhost
if self.is_localhost:
jobs_with_path = [{'local_path': file_path, **item} for item in job_list]
job_data = json.dumps(jobs_with_path)
url = urljoin(f'http://{LOOPBACK}:{self.port}', '/api/add_job')
headers = {'Content-Type': 'application/json'}
return requests.post(url, data=job_data, headers=headers)
# Prepare the form data for remote host
with open(file_path, 'rb') as file:
encoder = MultipartEncoder({ encoder = MultipartEncoder({
'file': (os.path.basename(file_path), file, 'application/octet-stream'), 'file': (os.path.basename(file_path), open(file_path, 'rb'), 'application/octet-stream'),
'json': (None, json.dumps(job_list), 'application/json'), 'json': (None, json.dumps(job_list), 'application/json'),
}) })
# Create a monitor that will track the upload progress # Create a monitor that will track the upload progress
monitor = MultipartEncoderMonitor(encoder, callback) if callback else MultipartEncoderMonitor(encoder) if callback:
monitor = MultipartEncoderMonitor(encoder, callback(encoder))
else:
monitor = MultipartEncoderMonitor(encoder)
# Send the request
headers = {'Content-Type': monitor.content_type} headers = {'Content-Type': monitor.content_type}
url = urljoin(f'http://{self.hostname}:{self.port}', '/api/add_job') return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=monitor, headers=headers)
# Send the request with proper resource management
with requests.post(url, data=monitor, headers=headers) as response:
return response
except requests.ConnectionError as e:
logger.error(f"Connection error: {e}")
except Exception as e:
logger.error(f"An error occurred: {e}")
def get_job_files(self, job_id, save_path): def get_job_files(self, job_id, save_path):
hostname = LOOPBACK if self.is_localhost else self.hostname url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"
url = f"http://{hostname}:{self.port}/api/job/{job_id}/download_all"
return self.download_file(url, filename=save_path) return self.download_file(url, filename=save_path)
@staticmethod @staticmethod
@@ -248,32 +182,10 @@ class RenderServerProxy:
# --- Renderer --- # # --- Renderer --- #
def get_renderer_info(self, response_type='standard', timeout=5): def get_renderer_info(self, timeout=5):
""" all_data = self.request_data(f'renderer_info', timeout=timeout)
Fetches renderer information from the server.
Args:
response_type (str, optional): Returns standard or full version of renderer info
timeout (int, optional): The number of seconds to wait for a response from the server. Defaults to 5.
Returns:
dict: A dictionary containing the renderer information.
"""
all_data = self.request_data(f"renderer_info?response_type={response_type}", timeout=timeout)
return all_data return all_data
def delete_engine(self, engine, version, system_cpu=None): def delete_engine(self, engine, version, system_cpu=None):
"""
Sends a request to the server to delete a specific engine.
Args:
engine (str): The name of the engine to delete.
version (str): The version of the engine to delete.
system_cpu (str, optional): The system CPU type. Defaults to None.
Returns:
Response: The response from the server.
"""
form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu} form_data = {'engine': engine, 'version': version, 'system_cpu': system_cpu}
hostname = LOOPBACK if self.is_localhost else self.hostname return requests.post(f'http://{self.hostname}:{self.port}/api/delete_engine', json=form_data)
return requests.post(f'http://{hostname}:{self.port}/api/delete_engine', json=form_data)

View File

@@ -17,19 +17,19 @@ class ServerProxyManager:
pub.subscribe(cls.__zeroconf_state_change, 'zeroconf_state_change') pub.subscribe(cls.__zeroconf_state_change, 'zeroconf_state_change')
@classmethod @classmethod
def __zeroconf_state_change(cls, hostname, state_change): def __zeroconf_state_change(cls, hostname, state_change, info):
if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated: if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated:
cls.get_proxy_for_hostname(hostname) cls.get_proxy_for_hostname(hostname)
else: else:
cls.get_proxy_for_hostname(hostname).stop_background_update()
cls.server_proxys.pop(hostname) cls.server_proxys.pop(hostname)
@classmethod @classmethod
def get_proxy_for_hostname(cls, hostname): def get_proxy_for_hostname(cls, hostname):
found_proxy = cls.server_proxys.get(hostname) found_proxy = cls.server_proxys.get(hostname)
if hostname and not found_proxy: if not found_proxy:
new_proxy = RenderServerProxy(hostname) new_proxy = RenderServerProxy(hostname)
new_proxy.start_background_update() new_proxy.start_background_update()
cls.server_proxys[hostname] = new_proxy cls.server_proxys[hostname] = new_proxy
found_proxy = new_proxy found_proxy = new_proxy
return found_proxy return found_proxy

View File

@@ -1,17 +1,13 @@
import logging import logging
import os import os
import socket import socket
import threading
import time import time
import zipfile import zipfile
from concurrent.futures import ThreadPoolExecutor
import requests
from plyer import notification from plyer import notification
from pubsub import pub from pubsub import pub
from src.api.server_proxy import RenderServerProxy from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue from src.render_queue import RenderQueue
from src.utilities.misc_helper import get_file_size_human from src.utilities.misc_helper import get_file_size_human
from src.utilities.status_utils import RenderStatus, string_to_status from src.utilities.status_utils import RenderStatus, string_to_status
@@ -65,21 +61,21 @@ class DistributedJobManager:
# UI Notifications # UI Notifications
try: try:
if new_status == RenderStatus.COMPLETED: if new_status == RenderStatus.COMPLETED:
logger.debug("Show render complete notification") logger.debug("show render complete notification")
notification.notify( notification.notify(
title='Render Job Complete', title='Render Job Complete',
message=f'{render_job.name} completed succesfully', message=f'{render_job.name} completed succesfully',
timeout=10 # Display time in seconds timeout=10 # Display time in seconds
) )
elif new_status == RenderStatus.ERROR: elif new_status == RenderStatus.ERROR:
logger.debug("Show render error notification") logger.debug("show render complete notification")
notification.notify( notification.notify(
title='Render Job Failed', title='Render Job Failed',
message=f'{render_job.name} failed rendering', message=f'{render_job.name} failed rendering',
timeout=10 # Display time in seconds timeout=10 # Display time in seconds
) )
elif new_status == RenderStatus.RUNNING: elif new_status == RenderStatus.RUNNING:
logger.debug("Show render started notification") logger.debug("show render complete notification")
notification.notify( notification.notify(
title='Render Job Started', title='Render Job Started',
message=f'{render_job.name} started rendering', message=f'{render_job.name} started rendering',
@@ -88,76 +84,17 @@ class DistributedJobManager:
except Exception as e: except Exception as e:
logger.debug(f"Unable to show UI notification: {e}") logger.debug(f"Unable to show UI notification: {e}")
# --------------------------------------------
# Create Job
# --------------------------------------------
@classmethod
def create_render_job(cls, job_data, loaded_project_local_path):
"""
Creates render jobs.
This method takes a list of job data, a local path to a loaded project, and a job directory. It creates a render
job for each job data in the list and appends the result to a list. The list of results is then returned.
Args:
job_data (dict): Job data.
loaded_project_local_path (str): The local path to the loaded project.
Returns:
worker: Created job worker
"""
# get new output path in output_dir
output_path = job_data.get('output_path')
if not output_path:
loaded_project_filename = os.path.basename(loaded_project_local_path)
output_filename = os.path.splitext(loaded_project_filename)[0]
else:
output_filename = os.path.basename(output_path)
# Prepare output path
output_dir = os.path.join(os.path.dirname(os.path.dirname(loaded_project_local_path)), 'output')
output_path = os.path.join(output_dir, output_filename)
os.makedirs(output_dir, exist_ok=True)
logger.debug(f"New job output path: {output_path}")
# create & configure jobs
worker = EngineManager.create_worker(renderer=job_data['renderer'],
input_path=loaded_project_local_path,
output_path=output_path,
engine_version=job_data.get('engine_version'),
args=job_data.get('args', {}),
parent=job_data.get('parent'),
name=job_data.get('name'))
worker.status = job_data.get("initial_status", worker.status) # todo: is this necessary?
worker.priority = int(job_data.get('priority', worker.priority))
worker.start_frame = int(job_data.get("start_frame", worker.start_frame))
worker.end_frame = int(job_data.get("end_frame", worker.end_frame))
worker.hostname = socket.gethostname()
# determine if we can / should split the job
if job_data.get("enable_split_jobs", False) and (worker.total_frames > 1) and not worker.parent:
cls.split_into_subjobs_async(worker, job_data, loaded_project_local_path)
else:
logger.debug("Not splitting into subjobs")
RenderQueue.add_to_render_queue(worker, force_start=job_data.get('force_start', False))
return worker
# --------------------------------------------
# Handling Subjobs
# --------------------------------------------
@classmethod @classmethod
def handle_subjob_status_change(cls, local_job, subjob_data): def handle_subjob_status_change(cls, local_job, subjob_data):
""" """
Responds to a status change from a remote subjob and triggers the creation or modification of subjobs as needed. Responds to a status change from a remote subjob and triggers the creation or modification of subjobs as needed.
Args: Parameters:
local_job (BaseRenderWorker): The local parent job worker. local_job (BaseRenderWorker): The local parent job worker.
subjob_data (dict): Subjob data sent from the remote server. subjob_data (dict): subjob data sent from remote server.
Returns:
None
""" """
subjob_status = string_to_status(subjob_data['status']) subjob_status = string_to_status(subjob_data['status'])
@@ -206,7 +143,7 @@ class DistributedJobManager:
RenderServerProxy(subjob_hostname).get_job_files(subjob_id, zip_file_path) RenderServerProxy(subjob_hostname).get_job_files(subjob_id, zip_file_path)
logger.info(f"File transfer complete for {logname} - Transferred {get_file_size_human(zip_file_path)}") logger.info(f"File transfer complete for {logname} - Transferred {get_file_size_human(zip_file_path)}")
except Exception as e: except Exception as e:
logger.error(f"Error downloading files from remote server: {e}") logger.exception(f"Exception downloading files from remote server: {e}")
local_job.children[child_key]['download_status'] = 'failed' local_job.children[child_key]['download_status'] = 'failed'
return False return False
@@ -282,112 +219,87 @@ class DistributedJobManager:
f"{', '.join(list(subjobs_not_downloaded().keys()))}") f"{', '.join(list(subjobs_not_downloaded().keys()))}")
time.sleep(5) time.sleep(5)
# --------------------------------------------
# Creating Subjobs
# --------------------------------------------
@classmethod @classmethod
def split_into_subjobs_async(cls, parent_worker, job_data, project_path, system_os=None): def split_into_subjobs(cls, worker, job_data, project_path, system_os=None):
# todo: I don't love this
parent_worker.status = RenderStatus.CONFIGURING
cls.background_worker = threading.Thread(target=cls.split_into_subjobs, args=(parent_worker, job_data,
project_path, system_os))
cls.background_worker.start()
@classmethod
def split_into_subjobs(cls, parent_worker, job_data, project_path, system_os=None, specific_servers=None):
"""
Splits a job into subjobs and distributes them among available servers.
This method checks the availability of servers, distributes the work among them, and creates subjobs on each
server. If a server is the local host, it adjusts the frame range of the parent job instead of creating a
subjob.
Args:
parent_worker (Worker): The worker that is handling the job.
job_data (dict): The data for the job to be split.
project_path (str): The path to the project associated with the job.
system_os (str, optional): The operating system of the servers. Default is any OS.
specific_servers (list, optional): List of specific servers to split work between. Defaults to all found.
"""
# Check availability # Check availability
parent_worker.status = RenderStatus.CONFIGURING available_servers = cls.find_available_servers(worker.renderer, system_os)
available_servers = specific_servers if specific_servers else cls.find_available_servers(parent_worker.renderer, system_os)
logger.debug(f"Splitting into subjobs - Available servers: {available_servers}") logger.debug(f"Splitting into subjobs - Available servers: {available_servers}")
all_subjob_server_data = cls.distribute_server_work(parent_worker.start_frame, parent_worker.end_frame, available_servers) subjob_servers = cls.distribute_server_work(worker.start_frame, worker.end_frame, available_servers)
local_hostname = socket.gethostname()
# Prep and submit these sub-jobs # Prep and submit these sub-jobs
logger.info(f"Job {parent_worker.id} split plan: {all_subjob_server_data}") logger.info(f"Job {worker.id} split plan: {subjob_servers}")
try: try:
for subjob_data in all_subjob_server_data: for server_data in subjob_servers:
subjob_hostname = subjob_data['hostname'] server_hostname = server_data['hostname']
if subjob_hostname != parent_worker.hostname: if server_hostname != local_hostname:
post_results = cls.__create_subjob(job_data, project_path, subjob_data, subjob_hostname, post_results = cls.__create_subjob(job_data, local_hostname, project_path, server_data,
parent_worker) server_hostname, worker)
if not post_results.ok: if post_results.ok:
ValueError(f"Failed to create subjob on {subjob_hostname}") server_data['submission_results'] = post_results.json()[0]
else:
# save child info logger.error(f"Failed to create subjob on {server_hostname}")
submission_results = post_results.json()[0] break
child_key = f"{submission_results['id']}@{subjob_hostname}"
parent_worker.children[child_key] = submission_results
else: else:
# truncate parent render_job # truncate parent render_job
parent_worker.start_frame = max(subjob_data['frame_range'][0], parent_worker.start_frame) worker.start_frame = max(server_data['frame_range'][0], worker.start_frame)
parent_worker.end_frame = min(subjob_data['frame_range'][-1], parent_worker.end_frame) worker.end_frame = min(server_data['frame_range'][-1], worker.end_frame)
logger.info(f"Local job now rendering from {parent_worker.start_frame} to {parent_worker.end_frame}") logger.info(f"Local job now rendering from {worker.start_frame} to {worker.end_frame}")
server_data['submission_results'] = worker.json()
# check that job posts were all successful.
if not all(d.get('submission_results') is not None for d in subjob_servers):
raise ValueError("Failed to create all subjobs") # look into recalculating job #s and use exising jobs
# start subjobs # start subjobs
logger.debug(f"Created {len(all_subjob_server_data) - 1} subjobs successfully") logger.debug(f"Starting {len(subjob_servers) - 1} attempted subjobs")
parent_worker.name = f"{parent_worker.name}[{parent_worker.start_frame}-{parent_worker.end_frame}]" for server_data in subjob_servers:
parent_worker.status = RenderStatus.NOT_STARTED # todo: this won't work with scheduled starts if server_data['hostname'] != local_hostname:
child_key = f"{server_data['submission_results']['id']}@{server_data['hostname']}"
worker.children[child_key] = server_data['submission_results']
worker.name = f"{worker.name}[{worker.start_frame}-{worker.end_frame}]"
except Exception as e: except Exception as e:
# cancel all the subjobs # cancel all the subjobs
logger.error(f"Failed to split job into subjobs: {e}") logger.error(f"Failed to split job into subjobs: {e}")
logger.debug(f"Cancelling {len(all_subjob_server_data) - 1} attempted subjobs") logger.debug(f"Cancelling {len(subjob_servers) - 1} attempted subjobs")
RenderServerProxy(parent_worker.hostname).cancel_job(parent_worker.id, confirm=True) # [RenderServerProxy(hostname).cancel_job(results['id'], confirm=True) for hostname, results in
# submission_results.items()] # todo: fix this
@staticmethod @staticmethod
def __create_subjob(job_data, project_path, server_data, server_hostname, parent_worker): def __create_subjob(job_data, local_hostname, project_path, server_data, server_hostname, worker):
subjob = job_data.copy() subjob = job_data.copy()
subjob['name'] = f"{parent_worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]" subjob['name'] = f"{worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
subjob['parent'] = f"{parent_worker.id}@{parent_worker.hostname}" subjob['parent'] = f"{worker.id}@{local_hostname}"
subjob['start_frame'] = server_data['frame_range'][0] subjob['start_frame'] = server_data['frame_range'][0]
subjob['end_frame'] = server_data['frame_range'][-1] subjob['end_frame'] = server_data['frame_range'][-1]
subjob['engine_version'] = parent_worker.renderer_version
logger.debug(f"Posting subjob with frames {subjob['start_frame']}-" logger.debug(f"Posting subjob with frames {subjob['start_frame']}-"
f"{subjob['end_frame']} to {server_hostname}") f"{subjob['end_frame']} to {server_hostname}")
post_results = RenderServerProxy(server_hostname).post_job_to_server( post_results = RenderServerProxy(server_hostname).post_job_to_server(
file_path=project_path, job_list=[subjob]) file_path=project_path, job_list=[subjob])
return post_results return post_results
# --------------------------------------------
# Server Handling
# --------------------------------------------
@staticmethod @staticmethod
def distribute_server_work(start_frame, end_frame, available_servers, method='cpu_benchmark'): def distribute_server_work(start_frame, end_frame, available_servers, method='cpu_count'):
""" """
Splits the frame range among available servers proportionally based on their performance (CPU count). Splits the frame range among available servers proportionally based on their performance (CPU count).
Args: :param start_frame: int, The start frame number of the animation to be rendered.
start_frame (int): The start frame number of the animation to be rendered. :param end_frame: int, The end frame number of the animation to be rendered.
end_frame (int): The end frame number of the animation to be rendered. :param available_servers: list, A list of available server dictionaries. Each server dictionary should include
available_servers (list): A list of available server dictionaries. Each server dictionary should include 'hostname' and 'cpu_count' keys (see find_available_servers)
'hostname' and 'cpu_count' keys (see find_available_servers). :param method: str, Optional. Specifies the distribution method. Possible values are 'cpu_count' and 'equally'
method (str, optional): Specifies the distribution method. Possible values are 'cpu_benchmark', 'cpu_count'
and 'evenly'.
Defaults to 'cpu_benchmark'.
Returns:
list: A list of server dictionaries where each dictionary includes the frame range and total number of :return: A list of server dictionaries where each dictionary includes the frame range and total number of frames
frames to be rendered by the server. to be rendered by the server.
""" """
# Calculate respective frames for each server # Calculate respective frames for each server
def divide_frames_by_cpu_count(frame_start, frame_end, servers): def divide_frames_by_cpu_count(frame_start, frame_end, servers):
total_frames = frame_end - frame_start + 1 total_frames = frame_end - frame_start + 1
total_cpus = sum(server['cpu_count'] for server in servers) total_performance = sum(server['cpu_count'] for server in servers)
frame_ranges = {} frame_ranges = {}
current_frame = frame_start current_frame = frame_start
@@ -398,47 +310,7 @@ class DistributedJobManager:
# Give all remaining frames to the last server # Give all remaining frames to the last server
num_frames = total_frames - allocated_frames num_frames = total_frames - allocated_frames
else: else:
num_frames = round((server['cpu_count'] / total_cpus) * total_frames) num_frames = round((server['cpu_count'] / total_performance) * total_frames)
allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1
if current_frame <= frame_end_for_server:
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
current_frame = frame_end_for_server + 1
return frame_ranges
def divide_frames_by_benchmark(frame_start, frame_end, servers):
def fetch_benchmark(server):
try:
benchmark = requests.get(f'http://{server["hostname"]}:{ZeroconfServer.server_port}'
f'/api/cpu_benchmark').text
server['cpu_benchmark'] = benchmark
logger.debug(f'Benchmark for {server["hostname"]}: {benchmark}')
except requests.exceptions.RequestException as e:
logger.error(f'Error fetching benchmark for {server["hostname"]}: {e}')
# Number of threads to use (can adjust based on your needs or number of servers)
threads = len(servers)
with ThreadPoolExecutor(max_workers=threads) as executor:
executor.map(fetch_benchmark, servers)
total_frames = frame_end - frame_start + 1
total_performance = sum(int(server['cpu_benchmark']) for server in servers)
frame_ranges = {}
current_frame = frame_start
allocated_frames = 0
for i, server in enumerate(servers):
if i == len(servers) - 1: # if it's the last server
# Give all remaining frames to the last server
num_frames = total_frames - allocated_frames
else:
num_frames = round((int(server['cpu_benchmark']) / total_performance) * total_frames)
allocated_frames += num_frames allocated_frames += num_frames
frame_end_for_server = current_frame + num_frames - 1 frame_end_for_server = current_frame + num_frames - 1
@@ -467,18 +339,12 @@ class DistributedJobManager:
return frame_ranges return frame_ranges
if len(available_servers) == 1: if method == 'equally':
breakdown = {available_servers[0]['hostname']: (start_frame, end_frame)}
else:
logger.debug(f'Splitting between {len(available_servers)} servers by {method} method')
if method == 'evenly':
breakdown = divide_frames_equally(start_frame, end_frame, available_servers) breakdown = divide_frames_equally(start_frame, end_frame, available_servers)
elif method == 'cpu_benchmark': # elif method == 'benchmark_score': # todo: implement benchmark score
breakdown = divide_frames_by_benchmark(start_frame, end_frame, available_servers) # pass
elif method == 'cpu_count':
breakdown = divide_frames_by_cpu_count(start_frame, end_frame, available_servers)
else: else:
raise ValueError(f"Invalid distribution method: {method}") breakdown = divide_frames_by_cpu_count(start_frame, end_frame, available_servers)
server_breakdown = [server for server in available_servers if breakdown.get(server['hostname']) is not None] server_breakdown = [server for server in available_servers if breakdown.get(server['hostname']) is not None]
for server in server_breakdown: for server in server_breakdown:
@@ -504,17 +370,3 @@ class DistributedJobManager:
available_servers.append(response) available_servers.append(response)
return available_servers return available_servers
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
ZeroconfServer.configure("_zordon._tcp.local.", 'testing', 8080)
ZeroconfServer.start(listen_only=True)
print("Starting Zeroconf...")
time.sleep(2)
available_servers = DistributedJobManager.find_available_servers('blender')
print(f"AVAILABLE SERVERS ({len(available_servers)}): {available_servers}")
# results = DistributedJobManager.distribute_server_work(1, 100, available_servers)
# print(f"RESULTS: {results}")
ZeroconfServer.stop()

View File

@@ -1,74 +1,22 @@
import glob
import logging
import subprocess
from src.engines.core.base_engine import BaseRenderEngine from src.engines.core.base_engine import BaseRenderEngine
logger = logging.getLogger()
class AERender(BaseRenderEngine): class AERender(BaseRenderEngine):
file_extensions = ['aepx'] supported_extensions = ['.aep']
def version(self): def version(self):
version = None version = None
try: try:
render_path = self.renderer_path() render_path = self.renderer_path()
if render_path: if render_path:
ver_out = subprocess.run([render_path, '-version'], capture_output=True, text=True) ver_out = subprocess.check_output([render_path, '-version'], timeout=SUBPROCESS_TIMEOUT)
version = ver_out.stdout.split(" ")[-1].strip() version = ver_out.decode('utf-8').split(" ")[-1].strip()
except Exception as e: except Exception as e:
logger.error(f'Failed to get {self.name()} version: {e}') logger.error(f'Failed to get {self.name()} version: {e}')
return version return version
@classmethod
def default_renderer_path(cls):
paths = glob.glob('/Applications/*After Effects*/aerender')
if len(paths) > 1:
logger.warning('Multiple After Effects installations detected')
elif not paths:
logger.error('After Effects installation not found')
return paths[0]
def get_project_info(self, project_path, timeout=10):
scene_info = {}
try:
import xml.etree.ElementTree as ET
tree = ET.parse(project_path)
root = tree.getroot()
namespace = {'ae': 'http://www.adobe.com/products/aftereffects'}
comp_names = []
for item in root.findall(".//ae:Item", namespace):
if item.find("ae:Layr", namespace) is not None:
for string in item.findall("./ae:string", namespace):
comp_names.append(string.text)
scene_info['comp_names'] = comp_names
except Exception as e:
logger.error(f'Error getting file details for .aepx file: {e}')
return scene_info
def run_javascript(self, script_path, project_path, timeout=None):
# todo: implement
pass
@classmethod @classmethod
def get_output_formats(cls): def get_output_formats(cls):
# todo: create implementation # todo: create implementation
return [] return []
def ui_options(self, project_info):
from src.engines.aerender.aerender_ui import AERenderUI
return AERenderUI.get_options(self, project_info)
@classmethod
def worker_class(cls):
from src.engines.aerender.aerender_worker import AERenderWorker
return AERenderWorker
if __name__ == "__main__":
x = AERender().get_project_info('/Users/brett/ae_testing/project.aepx')
print(x)

View File

@@ -1,8 +0,0 @@
class AERenderUI:
@staticmethod
def get_options(instance, project_info):
options = [
{'name': 'comp', 'options': project_info.get('comp_names', [])}
]
return options

View File

@@ -9,39 +9,72 @@ import time
from src.engines.core.base_worker import BaseRenderWorker, timecode_to_frames from src.engines.core.base_worker import BaseRenderWorker, timecode_to_frames
from src.engines.aerender.aerender_engine import AERender from src.engines.aerender.aerender_engine import AERender
logger = logging.getLogger()
def aerender_path():
paths = glob.glob('/Applications/*After Effects*/aerender')
if len(paths) > 1:
logging.warning('Multiple After Effects installations detected')
elif not paths:
logging.error('After Effects installation not found')
else:
return paths[0]
class AERenderWorker(BaseRenderWorker): class AERenderWorker(BaseRenderWorker):
supported_extensions = ['.aep']
engine = AERender engine = AERender
def __init__(self, input_path, output_path, engine_path, args=None, parent=None, name=None): def __init__(self, input_path, output_path, args=None, parent=None, name=None):
super(AERenderWorker, self).__init__(input_path=input_path, output_path=output_path, engine_path=engine_path, super(AERenderWorker, self).__init__(input_path=input_path, output_path=output_path, args=args,
args=args, parent=parent, name=name) parent=parent, name=name)
# temp files for processing stdout self.comp = args.get('comp', None)
self.__progress_history = [] self.render_settings = args.get('render_settings', None)
self.__temp_attributes = {} self.omsettings = args.get('omsettings', None)
self.progress = 0
self.progress_history = []
self.attributes = {}
def generate_worker_subprocess(self): def generate_worker_subprocess(self):
comp = self.args.get('comp', 'Comp 1') if os.path.exists('nexrender-cli-macos'):
render_settings = self.args.get('render_settings', None) logging.info('nexrender found')
omsettings = self.args.get('omsettings', None) # {
# "template": {
command = [self.renderer_path, '-project', self.input_path, '-comp', f'"{comp}"'] # "src": String,
# "composition": String,
if render_settings: #
command.extend(['-RStemplate', render_settings]) # "frameStart": Number,
# "frameEnd": Number,
if omsettings: # "frameIncrement": Number,
command.extend(['-OMtemplate', omsettings]) #
# "continueOnMissing": Boolean,
command.extend(['-s', self.start_frame, # "settingsTemplate": String,
'-e', self.end_frame, # "outputModule": String,
'-output', self.output_path]) # "outputExt": String,
return command # },
# "assets": [],
# "actions": {
# "prerender": [],
# "postrender": [],
# },
# "onChange": Function,
# "onRenderProgress": Function
# }
job = {'template':
{
'src': 'file://' + self.input_path, 'composition': self.comp.replace('"', ''),
'settingsTemplate': self.render_settings.replace('"', ''),
'outputModule': self.omsettings.replace('"', ''), 'outputExt': 'mov'}
}
x = ['./nexrender-cli-macos', "'{}'".format(json.dumps(job))]
else:
logging.info('nexrender not found')
x = [aerender_path(), '-project', self.input_path, '-comp', self.comp, '-RStemplate', self.render_settings,
'-OMtemplate', self.omsettings, '-output', self.output_path]
return x
def _parse_stdout(self, line): def _parse_stdout(self, line):
@@ -50,12 +83,12 @@ class AERenderWorker(BaseRenderWorker):
# print 'progress' # print 'progress'
trimmed = line.replace('PROGRESS:', '').strip() trimmed = line.replace('PROGRESS:', '').strip()
if len(trimmed): if len(trimmed):
self.__progress_history.append(line) self.progress_history.append(line)
if 'Seconds' in trimmed: if 'Seconds' in trimmed:
self._update_progress(line) self._update_progress(line)
elif ': ' in trimmed: elif ': ' in trimmed:
tmp = trimmed.split(': ') tmp = trimmed.split(': ')
self.__temp_attributes[tmp[0].strip()] = tmp[1].strip() self.attributes[tmp[0].strip()] = tmp[1].strip()
elif line.startswith('WARNING:'): elif line.startswith('WARNING:'):
trimmed = line.replace('WARNING:', '').strip() trimmed = line.replace('WARNING:', '').strip()
self.warnings.append(trimmed) self.warnings.append(trimmed)
@@ -66,28 +99,28 @@ class AERenderWorker(BaseRenderWorker):
def _update_progress(self, line): def _update_progress(self, line):
if not self.total_frames: if not self.total_frames:
duration_string = self.__temp_attributes.get('Duration', None) duration_string = self.attributes.get('Duration', None)
frame_rate = self.__temp_attributes.get('Frame Rate', '0').split(' ')[0] frame_rate = self.attributes.get('Frame Rate', '0').split(' ')[0]
self.total_frames = timecode_to_frames(duration_string.split('Duration:')[-1], float(frame_rate)) self.total_frames = timecode_to_frames(duration_string.split('Duration:')[-1], float(frame_rate))
match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line).groupdict() match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line).groupdict()
self.current_frame = match['frame'] self.last_frame = match['frame']
def average_frame_duration(self): def average_frame_duration(self):
total_durations = 0 total_durations = 0
for line in self.__progress_history: for line in self.progress_history:
match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line) match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line)
if match: if match:
total_durations += int(match.group(2)) total_durations += int(match.group(2))
average = float(total_durations) / self.current_frame average = float(total_durations) / self.last_frame
return average return average
def percent_complete(self): def percent_complete(self):
if self.total_frames: if self.total_frames:
return (float(self.current_frame) / float(self.total_frames)) * 100 return (float(self.last_frame) / float(self.total_frames)) * 100
else: else:
return 0 return 0
@@ -95,11 +128,8 @@ class AERenderWorker(BaseRenderWorker):
if __name__ == '__main__': if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.DEBUG) logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.DEBUG)
r = AERenderWorker(input_path='/Users/brett/ae_testing/project.aepx', r = AERenderWorker('/Users/brett/Desktop/Youtube_Vids/Film_Formats/Frame_Animations.aep', '"Film Pan"',
output_path='/Users/brett/ae_testing/project.mp4', '"Draft Settings"', '"ProRes"', '/Users/brett/Desktop/test_render')
engine_path=AERenderWorker.engine.default_renderer_path(),
args={'start_frame': 1, 'end_frame': 5})
r.start() r.start()
while r.is_running(): while r.is_running():
time.sleep(0.1) time.sleep(0.1)

View File

@@ -1,6 +1,5 @@
import logging import logging
import re import re
import threading
import requests import requests
@@ -44,13 +43,11 @@ class BlenderDownloader(EngineDownloader):
response = requests.get(base_url, timeout=5) response = requests.get(base_url, timeout=5)
response.raise_for_status() response.raise_for_status()
versions_pattern = \ versions_pattern = r'<a href="(?P<file>[^"]+)">blender-(?P<version>[\d\.]+)-(?P<system_os>\w+)-(?P<cpu>\w+).*</a>'
r'<a href="(?P<file>[^"]+)">blender-(?P<version>[\d\.]+)-(?P<system_os>\w+)-(?P<cpu>\w+).*</a>'
versions_data = [match.groupdict() for match in re.finditer(versions_pattern, response.text)] versions_data = [match.groupdict() for match in re.finditer(versions_pattern, response.text)]
# Filter to just the supported formats # Filter to just the supported formats
versions_data = [item for item in versions_data if any(item["file"].endswith(ext) for ext in versions_data = [item for item in versions_data if any(item["file"].endswith(ext) for ext in supported_formats)]
supported_formats)]
# Filter down OS and CPU # Filter down OS and CPU
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
@@ -81,31 +78,6 @@ class BlenderDownloader(EngineDownloader):
return lts_versions return lts_versions
@classmethod
def all_versions(cls, system_os=None, cpu=None):
majors = cls.__get_major_versions()
all_versions = []
threads = []
results = [[] for _ in majors]
def thread_function(major_version, index, system_os, cpu):
results[index] = cls.__get_minor_versions(major_version, system_os, cpu)
for i, m in enumerate(majors):
thread = threading.Thread(target=thread_function, args=(m, i, system_os, cpu))
threads.append(thread)
thread.start()
# Wait for all threads to complete
for thread in threads:
thread.join()
# Extend all_versions with the results from each thread
for result in results:
all_versions.extend(result)
return all_versions
@classmethod @classmethod
def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False): def find_most_recent_version(cls, system_os=None, cpu=None, lts_only=False):
try: try:
@@ -133,8 +105,9 @@ class BlenderDownloader(EngineDownloader):
try: try:
logger.info(f"Requesting download of blender-{version}-{system_os}-{cpu}") logger.info(f"Requesting download of blender-{version}-{system_os}-{cpu}")
major_version = '.'.join(version.split('.')[:2]) major_version = '.'.join(version.split('.')[:2])
minor_versions = [x for x in cls.__get_minor_versions(major_version, system_os, cpu) if minor_versions = [x for x in cls.__get_minor_versions(major_version, system_os, cpu) if x['version'] == version]
x['version'] == version] # we get the URL instead of calculating it ourselves. May change this
cls.download_and_extract_app(remote_url=minor_versions[0]['url'], download_location=download_location, cls.download_and_extract_app(remote_url=minor_versions[0]['url'], download_location=download_location,
timeout=timeout) timeout=timeout)
except IndexError: except IndexError:
@@ -144,4 +117,5 @@ class BlenderDownloader(EngineDownloader):
if __name__ == '__main__': if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
print(BlenderDownloader.find_most_recent_version()) print(BlenderDownloader.__get_major_versions())

View File

@@ -11,21 +11,20 @@ class Blender(BaseRenderEngine):
install_paths = ['/Applications/Blender.app/Contents/MacOS/Blender'] install_paths = ['/Applications/Blender.app/Contents/MacOS/Blender']
binary_names = {'linux': 'blender', 'windows': 'blender.exe', 'macos': 'Blender'} binary_names = {'linux': 'blender', 'windows': 'blender.exe', 'macos': 'Blender'}
file_extensions = ['blend']
@staticmethod @staticmethod
def downloader(): def downloader():
from src.engines.blender.blender_downloader import BlenderDownloader from src.engines.blender.blender_downloader import BlenderDownloader
return BlenderDownloader return BlenderDownloader
@classmethod @staticmethod
def worker_class(cls): def worker_class():
from src.engines.blender.blender_worker import BlenderRenderWorker from src.engines.blender.blender_worker import BlenderRenderWorker
return BlenderRenderWorker return BlenderRenderWorker
def ui_options(self, project_info): @staticmethod
from src.engines.blender.blender_ui import BlenderUI def supported_extensions():
return BlenderUI.get_options(self) return ['blend']
def version(self): def version(self):
version = None version = None
@@ -53,27 +52,25 @@ class Blender(BaseRenderEngine):
else: else:
raise FileNotFoundError(f'Project file not found: {project_path}') raise FileNotFoundError(f'Project file not found: {project_path}')
def run_python_script(self, script_path, project_path=None, timeout=None): def run_python_script(self, project_path, script_path, timeout=None):
if os.path.exists(project_path) and os.path.exists(script_path):
if project_path and not os.path.exists(project_path): try:
return subprocess.run([self.renderer_path(), '-b', project_path, '--python', script_path],
capture_output=True, timeout=timeout)
except Exception as e:
logger.warning(f"Error running python script in blender: {e}")
pass
elif not os.path.exists(project_path):
raise FileNotFoundError(f'Project file not found: {project_path}') raise FileNotFoundError(f'Project file not found: {project_path}')
elif not os.path.exists(script_path): elif not os.path.exists(script_path):
raise FileNotFoundError(f'Python script not found: {script_path}') raise FileNotFoundError(f'Python script not found: {script_path}')
raise Exception("Uncaught exception")
try:
command = [self.renderer_path(), '-b', '--python', script_path]
if project_path:
command.insert(2, project_path)
return subprocess.run(command, capture_output=True, timeout=timeout)
except Exception as e:
logger.exception(f"Error running python script in blender: {e}")
def get_project_info(self, project_path, timeout=10): def get_project_info(self, project_path, timeout=10):
scene_info = {} scene_info = {}
try: try:
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_file_info.py') script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_file_info.py')
results = self.run_python_script(project_path=project_path, script_path=system_safe_path(script_path), results = self.run_python_script(project_path, system_safe_path(script_path), timeout=timeout)
timeout=timeout)
result_text = results.stdout.decode() result_text = results.stdout.decode()
for line in result_text.splitlines(): for line in result_text.splitlines():
if line.startswith('SCENE_DATA:'): if line.startswith('SCENE_DATA:'):
@@ -91,8 +88,7 @@ class Blender(BaseRenderEngine):
try: try:
logger.info(f"Starting to pack Blender file: {project_path}") logger.info(f"Starting to pack Blender file: {project_path}")
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'pack_project.py') script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'pack_project.py')
results = self.run_python_script(project_path=project_path, script_path=system_safe_path(script_path), results = self.run_python_script(project_path, system_safe_path(script_path), timeout=timeout)
timeout=timeout)
result_text = results.stdout.decode() result_text = results.stdout.decode()
dir_name = os.path.dirname(project_path) dir_name = os.path.dirname(project_path)
@@ -112,7 +108,7 @@ class Blender(BaseRenderEngine):
logger.error(f'Error packing .blend file: {e}') logger.error(f'Error packing .blend file: {e}')
return None return None
def get_arguments(self): # possibly deprecate def get_arguments(self):
help_text = subprocess.check_output([self.renderer_path(), '-h']).decode('utf-8') help_text = subprocess.check_output([self.renderer_path(), '-h']).decode('utf-8')
lines = help_text.splitlines() lines = help_text.splitlines()
@@ -144,20 +140,12 @@ class Blender(BaseRenderEngine):
return options return options
def system_info(self): def get_detected_gpus(self):
return {'render_devices': self.get_render_devices()} # no longer works on 4.0
engine_output = subprocess.run([self.renderer_path(), '-E', 'help'], timeout=SUBPROCESS_TIMEOUT,
def get_render_devices(self): capture_output=True).stdout.decode('utf-8')
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scripts', 'get_system_info.py') gpu_names = re.findall(r"DETECTED GPU: (.+)", engine_output)
results = self.run_python_script(script_path=script_path) return gpu_names
output = results.stdout.decode()
match = re.search(r"GPU DATA:(\[[\s\S]*\])", output)
if match:
gpu_data_json = match.group(1)
gpus_info = json.loads(gpu_data_json)
return gpus_info
else:
logger.error("GPU data not found in the output.")
def supported_render_engines(self): def supported_render_engines(self):
engine_output = subprocess.run([self.renderer_path(), '-E', 'help'], timeout=SUBPROCESS_TIMEOUT, engine_output = subprocess.run([self.renderer_path(), '-E', 'help'], timeout=SUBPROCESS_TIMEOUT,
@@ -165,11 +153,18 @@ class Blender(BaseRenderEngine):
render_engines = [x.strip() for x in engine_output.split('Blender Engine Listing:')[-1].strip().splitlines()] render_engines = [x.strip() for x in engine_output.split('Blender Engine Listing:')[-1].strip().splitlines()]
return render_engines return render_engines
# UI and setup
def get_options(self):
options = [
{'name': 'engine', 'options': self.supported_render_engines()},
]
return options
def perform_presubmission_tasks(self, project_path): def perform_presubmission_tasks(self, project_path):
packed_path = self.pack_project_file(project_path, timeout=30) packed_path = self.pack_project_file(project_path, timeout=30)
return packed_path return packed_path
if __name__ == "__main__": if __name__ == "__main__":
x = Blender().get_render_devices() x = Blender.get_detected_gpus()
print(x) print(x)

View File

@@ -1,9 +0,0 @@
class BlenderUI:
@staticmethod
def get_options(instance):
options = [
{'name': 'engine', 'options': instance.supported_render_engines()},
{'name': 'render_device', 'options': ['Any', 'GPU', 'CPU']},
]
return options

View File

@@ -15,9 +15,20 @@ class BlenderRenderWorker(BaseRenderWorker):
super(BlenderRenderWorker, self).__init__(input_path=input_path, output_path=output_path, super(BlenderRenderWorker, self).__init__(input_path=input_path, output_path=output_path,
engine_path=engine_path, args=args, parent=parent, name=name) engine_path=engine_path, args=args, parent=parent, name=name)
# Args
self.blender_engine = self.args.get('engine', 'BLENDER_EEVEE').upper()
self.export_format = self.args.get('export_format', None) or 'JPEG'
self.camera = self.args.get('camera', None)
# Stats # Stats
self.__frame_percent_complete = 0.0 self.__frame_percent_complete = 0.0
self.current_frame = -1 # todo: is this necessary?
# Scene Info
self.scene_info = Blender(engine_path).get_project_info(input_path)
self.start_frame = int(self.scene_info.get('start_frame', 1))
self.end_frame = int(self.scene_info.get('end_frame', self.start_frame))
self.project_length = (self.end_frame - self.start_frame) + 1
self.current_frame = -1
def generate_worker_subprocess(self): def generate_worker_subprocess(self):
@@ -26,39 +37,16 @@ class BlenderRenderWorker(BaseRenderWorker):
cmd.append('-b') cmd.append('-b')
cmd.append(self.input_path) cmd.append(self.input_path)
# Start Python expressions - # todo: investigate splitting into separate 'setup' script # Python expressions
cmd.append('--python-expr') cmd.append('--python-expr')
python_exp = 'import bpy; bpy.context.scene.render.use_overwrite = False;' python_exp = 'import bpy; bpy.context.scene.render.use_overwrite = False;'
if self.camera:
# Setup Custom Camera python_exp = python_exp + f"bpy.context.scene.camera = bpy.data.objects['{self.camera}'];"
custom_camera = self.args.get('camera', None) # insert any other python exp checks here
if custom_camera:
python_exp = python_exp + f"bpy.context.scene.camera = bpy.data.objects['{custom_camera}'];"
# Set Render Device (gpu/cpu/any)
blender_engine = self.args.get('engine', 'BLENDER_EEVEE').upper()
if blender_engine == 'CYCLES':
render_device = self.args.get('render_device', 'any').lower()
if render_device not in {'any', 'gpu', 'cpu'}:
raise AttributeError(f"Invalid Cycles render device: {render_device}")
use_gpu = render_device in {'any', 'gpu'}
use_cpu = render_device in {'any', 'cpu'}
python_exp = python_exp + ("exec(\"for device in bpy.context.preferences.addons["
f"'cycles'].preferences.devices: device.use = {use_cpu} if device.type == 'CPU'"
f" else {use_gpu}\")")
# -- insert any other python exp checks / generators here --
# End Python expressions here
cmd.append(python_exp) cmd.append(python_exp)
# Export format
export_format = self.args.get('export_format', None) or 'JPEG'
path_without_ext = os.path.splitext(self.output_path)[0] + "_" path_without_ext = os.path.splitext(self.output_path)[0] + "_"
cmd.extend(['-E', blender_engine, '-o', path_without_ext, '-F', export_format]) cmd.extend(['-E', self.blender_engine, '-o', path_without_ext, '-F', self.export_format])
# set frame range # set frame range
cmd.extend(['-s', self.start_frame, '-e', self.end_frame, '-a']) cmd.extend(['-s', self.start_frame, '-e', self.end_frame, '-a'])
@@ -96,22 +84,15 @@ class BlenderRenderWorker(BaseRenderWorker):
elif line.lower().startswith('error'): elif line.lower().startswith('error'):
self.log_error(line) self.log_error(line)
elif 'Saved' in line or 'Saving' in line or 'quit' in line: elif 'Saved' in line or 'Saving' in line or 'quit' in line:
render_stats_match = re.match(r'Time: (.*) \(Saving', line) match = re.match(r'Time: (.*) \(Saving', line)
output_filename_match = re.match(r"Saved: .*_(\d+)\.\w+", line) # try to get frame # from filename if match:
if output_filename_match: time_completed = match.groups()[0]
output_file_number = output_filename_match.groups()[0]
try:
self.current_frame = int(output_file_number)
except ValueError:
pass
elif render_stats_match:
time_completed = render_stats_match.groups()[0]
frame_count = self.current_frame - self.end_frame + self.total_frames frame_count = self.current_frame - self.end_frame + self.total_frames
logger.info(f'Frame #{self.current_frame} - ' logger.info(f'Frame #{self.current_frame} - '
f'{frame_count} of {self.total_frames} completed in {time_completed} | ' f'{frame_count} of {self.total_frames} completed in {time_completed} | '
f'Total Elapsed Time: {datetime.now() - self.start_time}') f'Total Elapsed Time: {datetime.now() - self.start_time}')
else: else:
logger.debug(f'DEBUG: {line}') logger.debug(line)
else: else:
pass pass
# if len(line.strip()): # if len(line.strip()):

View File

@@ -19,8 +19,8 @@ for cam_obj in bpy.data.cameras:
data = {'cameras': cameras, data = {'cameras': cameras,
'engine': scene.render.engine, 'engine': scene.render.engine,
'start_frame': scene.frame_start, 'frame_start': scene.frame_start,
'end_frame': scene.frame_end, 'frame_end': scene.frame_end,
'resolution_x': scene.render.resolution_x, 'resolution_x': scene.render.resolution_x,
'resolution_y': scene.render.resolution_y, 'resolution_y': scene.render.resolution_y,
'resolution_percentage': scene.render.resolution_percentage, 'resolution_percentage': scene.render.resolution_percentage,

View File

@@ -1,17 +0,0 @@
import bpy
import json
# Ensure Cycles is available
bpy.context.preferences.addons['cycles'].preferences.get_devices()
# Collect the devices information
devices_info = []
for device in bpy.context.preferences.addons['cycles'].preferences.devices:
devices_info.append({
"name": device.name,
"type": device.type,
"use": device.use
})
# Print the devices information in JSON format
print("GPU DATA:" + json.dumps(devices_info))

View File

@@ -98,7 +98,7 @@ class EngineDownloader:
zip_ref.extractall(download_location) zip_ref.extractall(download_location)
logger.info( logger.info(
f'Successfully extracted {os.path.basename(temp_downloaded_file_path)} to {download_location}') f'Successfully extracted {os.path.basename(temp_downloaded_file_path)} to {download_location}')
except zipfile.BadZipFile: except zipfile.BadZipFile as e:
logger.error(f'Error: {temp_downloaded_file_path} is not a valid ZIP file.') logger.error(f'Error: {temp_downloaded_file_path} is not a valid ZIP file.')
except FileNotFoundError: except FileNotFoundError:
logger.error(f'File not found: {temp_downloaded_file_path}') logger.error(f'File not found: {temp_downloaded_file_path}')
@@ -110,8 +110,7 @@ class EngineDownloader:
for mount_point in dmg.attach(): for mount_point in dmg.attach():
try: try:
copy_directory_contents(mount_point, os.path.join(download_location, output_dir_name)) copy_directory_contents(mount_point, os.path.join(download_location, output_dir_name))
logger.info(f'Successfully copied {os.path.basename(temp_downloaded_file_path)} ' logger.info(f'Successfully copied {os.path.basename(temp_downloaded_file_path)} to {download_location}')
f'to {download_location}')
except FileNotFoundError: except FileNotFoundError:
logger.error(f'Error: The source .app bundle does not exist.') logger.error(f'Error: The source .app bundle does not exist.')
except PermissionError: except PermissionError:

View File

@@ -9,16 +9,12 @@ SUBPROCESS_TIMEOUT = 5
class BaseRenderEngine(object): class BaseRenderEngine(object):
install_paths = [] install_paths = []
file_extensions = [] supported_extensions = []
def __init__(self, custom_path=None): def __init__(self, custom_path=None):
self.custom_renderer_path = custom_path self.custom_renderer_path = custom_path
if not self.renderer_path() or not os.path.exists(self.renderer_path()): if not self.renderer_path():
raise FileNotFoundError(f"Cannot find path ({self.renderer_path()}) for renderer '{self.name()}'") raise FileNotFoundError(f"Cannot find path to renderer for {self.name()} instance")
if not os.access(self.renderer_path(), os.X_OK):
logger.warning(f"Path is not executable. Setting permissions to 755 for {self.renderer_path()}")
os.chmod(self.renderer_path(), 0o755)
def renderer_path(self): def renderer_path(self):
return self.custom_renderer_path or self.default_renderer_path() return self.custom_renderer_path or self.default_renderer_path()
@@ -47,18 +43,16 @@ class BaseRenderEngine(object):
def downloader(): # override when subclassing if using a downloader class def downloader(): # override when subclassing if using a downloader class
return None return None
@classmethod @staticmethod
def worker_class(cls): # override when subclassing to link worker class def worker_class(): # override when subclassing to link worker class
raise NotImplementedError(f"Worker class not implemented for engine {cls.name()}") raise NotImplementedError("Worker class not implemented")
def ui_options(self, project_info): # override to return options for ui
return {}
def get_help(self): # override if renderer uses different help flag def get_help(self): # override if renderer uses different help flag
path = self.renderer_path() path = self.renderer_path()
if not path: if not path:
raise FileNotFoundError("renderer path not found") raise FileNotFoundError("renderer path not found")
help_doc = subprocess.run([path, '-h'], capture_output=True, text=True).stdout.strip() help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
return help_doc return help_doc
def get_project_info(self, project_path, timeout=10): def get_project_info(self, project_path, timeout=10):
@@ -69,14 +63,12 @@ class BaseRenderEngine(object):
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}") raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
@classmethod @classmethod
def supported_extensions(cls): def get_arguments(cls):
return cls.file_extensions
def get_arguments(self):
pass pass
def system_info(self): def get_options(self): # override to return options for ui
pass return {}
def perform_presubmission_tasks(self, project_path): def perform_presubmission_tasks(self, project_path):
return project_path return project_path

View File

@@ -11,7 +11,6 @@ import psutil
from pubsub import pub from pubsub import pub
from sqlalchemy import Column, Integer, String, DateTime, JSON from sqlalchemy import Column, Integer, String, DateTime, JSON
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.mutable import MutableDict
from src.utilities.misc_helper import get_time_elapsed from src.utilities.misc_helper import get_time_elapsed
from src.utilities.status_utils import RenderStatus, string_to_status from src.utilities.status_utils import RenderStatus, string_to_status
@@ -24,7 +23,6 @@ class BaseRenderWorker(Base):
__tablename__ = 'render_workers' __tablename__ = 'render_workers'
id = Column(String, primary_key=True) id = Column(String, primary_key=True)
hostname = Column(String, nullable=True)
input_path = Column(String) input_path = Column(String)
output_path = Column(String) output_path = Column(String)
date_created = Column(DateTime) date_created = Column(DateTime)
@@ -38,8 +36,7 @@ class BaseRenderWorker(Base):
start_frame = Column(Integer) start_frame = Column(Integer)
end_frame = Column(Integer, nullable=True) end_frame = Column(Integer, nullable=True)
parent = Column(String, nullable=True) parent = Column(String, nullable=True)
children = Column(MutableDict.as_mutable(JSON)) children = Column(JSON)
args = Column(MutableDict.as_mutable(JSON))
name = Column(String) name = Column(String)
file_hash = Column(String) file_hash = Column(String)
_status = Column(String) _status = Column(String)
@@ -63,7 +60,6 @@ class BaseRenderWorker(Base):
# Essential Info # Essential Info
self.id = generate_id() self.id = generate_id()
self.hostname = None
self.input_path = input_path self.input_path = input_path
self.output_path = output_path self.output_path = output_path
self.args = args or {} self.args = args or {}
@@ -76,23 +72,19 @@ class BaseRenderWorker(Base):
self.parent = parent self.parent = parent
self.children = {} self.children = {}
self.name = name or os.path.basename(input_path) self.name = name or os.path.basename(input_path)
self.maximum_attempts = 3
# Frame Ranges # Frame Ranges
self.project_length = 0 # is this necessary? self.project_length = -1
self.current_frame = 0 self.current_frame = 0 # should this be a 1 ?
self.start_frame = 0 # should this be a 1 ?
# Get Project Info self.end_frame = None
self.scene_info = self.engine(engine_path).get_project_info(project_path=input_path)
self.start_frame = int(self.scene_info.get('start_frame', 1))
self.end_frame = int(self.scene_info.get('end_frame', self.start_frame))
# Logging # Logging
self.start_time = None self.start_time = None
self.end_time = None self.end_time = None
# History # History
self.status = RenderStatus.NOT_STARTED self.status = RenderStatus.CONFIGURING
self.warnings = [] self.warnings = []
self.errors = [] self.errors = []
@@ -164,14 +156,14 @@ class BaseRenderWorker(Base):
if not os.path.exists(self.input_path): if not os.path.exists(self.input_path):
self.status = RenderStatus.ERROR self.status = RenderStatus.ERROR
msg = f'Cannot find input path: {self.input_path}' msg = 'Cannot find input path: {}'.format(self.input_path)
logger.error(msg) logger.error(msg)
self.errors.append(msg) self.errors.append(msg)
return return
if not os.path.exists(self.renderer_path): if not os.path.exists(self.renderer_path):
self.status = RenderStatus.ERROR self.status = RenderStatus.ERROR
msg = f'Cannot find render engine path for {self.engine.name()}' msg = 'Cannot find render engine path for {}'.format(self.engine.name())
logger.error(msg) logger.error(msg)
self.errors.append(msg) self.errors.append(msg)
return return
@@ -189,37 +181,28 @@ class BaseRenderWorker(Base):
subprocess_cmds = self.generate_subprocess() subprocess_cmds = self.generate_subprocess()
initial_file_count = len(self.file_list()) initial_file_count = len(self.file_list())
failed_attempts = 0 attempt_number = 0
with open(self.log_path(), "a") as f: with open(self.log_path(), "a") as f:
f.write(f"{self.start_time.isoformat()} - Starting {self.engine.name()} {self.renderer_version} " f.write(f"{self.start_time.isoformat()} - Starting {self.engine.name()} {self.renderer_version} "
f"render for {self.input_path}\n\n") f"render for {self.input_path}\n\n")
f.write(f"Running command: \"{' '.join(subprocess_cmds)}\"\n") f.write(f"Running command: {subprocess_cmds}\n")
f.write('=' * 80 + '\n\n') f.write('=' * 80 + '\n\n')
while True: while True:
# Log attempt # # Log attempt #
if failed_attempts: if attempt_number:
if failed_attempts >= self.maximum_attempts: f.write(f'\n{"=" * 80} Attempt #{attempt_number} {"=" * 30}\n\n')
err_msg = f"Maximum attempts exceeded ({self.maximum_attempts})" logger.warning(f"Restarting render - Attempt #{attempt_number}")
logger.error(err_msg) attempt_number += 1
self.status = RenderStatus.ERROR
self.errors.append(err_msg)
return
else:
f.write(f'\n{"=" * 20} Attempt #{failed_attempts + 1} {"=" * 20}\n\n')
logger.warning(f"Restarting render - Attempt #{failed_attempts + 1}")
# Start process and get updates # Start process and get updates
self.status = RenderStatus.RUNNING
self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
universal_newlines=False) universal_newlines=False)
for c in io.TextIOWrapper(self.__process.stdout, encoding="utf-8"): # or another encoding for c in io.TextIOWrapper(self.__process.stdout, encoding="utf-8"): # or another encoding
f.write(c) f.write(c)
f.flush()
os.fsync(f.fileno())
self.last_output = c.strip() self.last_output = c.strip()
self._parse_stdout(c.strip()) self._parse_stdout(c.strip())
@@ -235,26 +218,21 @@ class BaseRenderWorker(Base):
f.write(message) f.write(message)
return return
# if file output hasn't increased, return as error, otherwise restart process. if not return_code:
if len(self.file_list()) <= initial_file_count: message = f"{'=' * 50}\n\n{self.engine.name()} render completed successfully in {self.time_elapsed()}"
err_msg = f"File count has not increased. Count is still {len(self.file_list())}"
f.write(f'Error: {err_msg}\n\n')
self.errors.append(err_msg)
self.status = RenderStatus.ERROR
# Handle completed - All else counts as failed attempt
if (self.status == RenderStatus.COMPLETED) and not return_code:
message = (f"{'=' * 50}\n\n{self.engine.name()} render completed successfully in "
f"{self.time_elapsed()}\n")
f.write(message) f.write(message)
break break
# Handle non-zero return codes # Handle non-zero return codes
message = f"{'=' * 50}\n\n{self.engine.name()} render failed with code {return_code} " \ message = f"{'=' * 50}\n\n{self.engine.name()} render failed with code {return_code} " \
f"after {self.time_elapsed()}\n\n" f"after {self.time_elapsed()}"
f.write(message) f.write(message)
self.errors.append(message) self.errors.append(message)
failed_attempts += 1
# if file output hasn't increased, return as error, otherwise restart process.
if len(self.file_list()) <= initial_file_count:
self.status = RenderStatus.ERROR
return
if self.children: if self.children:
from src.distributed_job_manager import DistributedJobManager from src.distributed_job_manager import DistributedJobManager
@@ -298,8 +276,6 @@ class BaseRenderWorker(Base):
self.status = RenderStatus.CANCELLED self.status = RenderStatus.CANCELLED
def percent_complete(self): def percent_complete(self):
if self.status == RenderStatus.COMPLETED:
return 1.0
return 0 return 0
def _parse_stdout(self, line): def _parse_stdout(self, line):
@@ -321,7 +297,6 @@ class BaseRenderWorker(Base):
job_dict = { job_dict = {
'id': self.id, 'id': self.id,
'name': self.name, 'name': self.name,
'hostname': self.hostname,
'input_path': self.input_path, 'input_path': self.input_path,
'output_path': self.output_path, 'output_path': self.output_path,
'priority': self.priority, 'priority': self.priority,
@@ -341,8 +316,7 @@ class BaseRenderWorker(Base):
'end_frame': self.end_frame, 'end_frame': self.end_frame,
'total_frames': self.total_frames, 'total_frames': self.total_frames,
'last_output': getattr(self, 'last_output', None), 'last_output': getattr(self, 'last_output', None),
'log_path': self.log_path(), 'log_path': self.log_path()
'args': self.args
} }
# convert to json and back to auto-convert dates to iso format # convert to json and back to auto-convert dates to iso format

View File

@@ -2,11 +2,9 @@ import logging
import os import os
import shutil import shutil
import threading import threading
import concurrent.futures
from src.engines.blender.blender_engine import Blender from src.engines.blender.blender_engine import Blender
from src.engines.ffmpeg.ffmpeg_engine import FFMPEG from src.engines.ffmpeg.ffmpeg_engine import FFMPEG
from src.engines.aerender.aerender_engine import AERender
from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu from src.utilities.misc_helper import system_safe_path, current_system_os, current_system_cpu
logger = logging.getLogger() logger = logging.getLogger()
@@ -19,7 +17,7 @@ class EngineManager:
@staticmethod @staticmethod
def supported_engines(): def supported_engines():
return [Blender, FFMPEG, AERender] return [Blender, FFMPEG]
@classmethod @classmethod
def engine_with_name(cls, engine_name): def engine_with_name(cls, engine_name):
@@ -28,80 +26,57 @@ class EngineManager:
return obj return obj
@classmethod @classmethod
def get_engines(cls, filter_name=None): def all_engines(cls):
if not cls.engines_path: if not cls.engines_path:
raise FileNotFoundError("Engine path is not set") raise FileNotFoundError("Engines path must be set before requesting downloads")
# Parse downloaded engine directory # Parse downloaded engine directory
results = [] results = []
try: try:
all_items = os.listdir(cls.engines_path) all_items = os.listdir(cls.engines_path)
all_directories = [item for item in all_items if os.path.isdir(os.path.join(cls.engines_path, item))] all_directories = [item for item in all_items if os.path.isdir(os.path.join(cls.engines_path, item))]
keys = ["engine", "version", "system_os", "cpu"] # Define keys for result dictionary
for directory in all_directories: for directory in all_directories:
# Split directory name into segments # Split the input string by dashes to get segments
segments = directory.split('-') segments = directory.split('-')
# Create a dictionary mapping keys to corresponding segments
# Create a dictionary with named keys
keys = ["engine", "version", "system_os", "cpu"]
result_dict = {keys[i]: segments[i] for i in range(min(len(keys), len(segments)))} result_dict = {keys[i]: segments[i] for i in range(min(len(keys), len(segments)))}
result_dict['type'] = 'managed' result_dict['type'] = 'managed'
# Initialize binary_name with engine name # Figure out the binary name for the path
binary_name = result_dict['engine'].lower() binary_name = result_dict['engine'].lower()
# Determine the correct binary name based on the engine and system_os
for eng in cls.supported_engines(): for eng in cls.supported_engines():
if eng.name().lower() == result_dict['engine']: if eng.name().lower() == result_dict['engine']:
binary_name = eng.binary_names.get(result_dict['system_os'], binary_name) binary_name = eng.binary_names.get(result_dict['system_os'], binary_name)
# Find path to binary
path = None
for root, _, files in os.walk(system_safe_path(os.path.join(cls.engines_path, directory))):
if binary_name in files:
path = os.path.join(root, binary_name)
break break
# Find the path to the binary file
path = next(
(os.path.join(root, binary_name) for root, _, files in
os.walk(system_safe_path(os.path.join(cls.engines_path, directory))) if binary_name in files),
None
)
result_dict['path'] = path result_dict['path'] = path
# Add the result dictionary to results if it matches the filter_name or if no filter is applied
if not filter_name or filter_name == result_dict['engine']:
results.append(result_dict) results.append(result_dict)
except FileNotFoundError as e: except FileNotFoundError as e:
logger.warning(f"Cannot find local engines download directory: {e}") logger.warning(f"Cannot find local engines download directory: {e}")
# add system installs to this list - use bg thread because it can be slow # add system installs to this list
def fetch_engine_details(eng): for eng in cls.supported_engines():
return { if eng.default_renderer_path():
'engine': eng.name(), results.append({'engine': eng.name(), 'version': eng().version(),
'version': eng().version(),
'system_os': current_system_os(), 'system_os': current_system_os(),
'cpu': current_system_cpu(), 'cpu': current_system_cpu(),
'path': eng.default_renderer_path(), 'path': eng.default_renderer_path(), 'type': 'system'})
'type': 'system'
}
if not filter_name:
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(fetch_engine_details, eng): eng.name()
for eng in cls.supported_engines()
if eng.default_renderer_path()
}
for future in concurrent.futures.as_completed(futures):
result = future.result()
if result:
results.append(result)
else:
results.append(fetch_engine_details(cls.engine_with_name(filter_name)))
return results return results
@classmethod @classmethod
def all_versions_for_engine(cls, engine_name): def all_versions_for_engine(cls, engine):
versions = cls.get_engines(filter_name=engine_name) return [x for x in cls.all_engines() if x['engine'] == engine]
sorted_versions = sorted(versions, key=lambda x: x['version'], reverse=True)
return sorted_versions
@classmethod @classmethod
def newest_engine_version(cls, engine, system_os=None, cpu=None): def newest_engine_version(cls, engine, system_os=None, cpu=None):
@@ -109,9 +84,9 @@ class EngineManager:
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
try: try:
filtered = [x for x in cls.all_versions_for_engine(engine) if x['system_os'] == system_os and filtered = [x for x in cls.all_engines() if x['engine'] == engine and x['system_os'] == system_os and x['cpu'] == cpu]
x['cpu'] == cpu] versions = sorted(filtered, key=lambda x: x['version'], reverse=True)
return filtered[0] return versions[0]
except IndexError: except IndexError:
logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}") logger.error(f"Cannot find newest engine version for {engine}-{system_os}-{cpu}")
return None return None
@@ -121,8 +96,8 @@ class EngineManager:
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
filtered = [x for x in cls.get_engines(filter_name=engine) if x['system_os'] == system_os and filtered = [x for x in cls.all_engines() if
x['cpu'] == cpu and x['version'] == version] x['engine'] == engine and x['system_os'] == system_os and x['cpu'] == cpu and x['version'] == version]
return filtered[0] if filtered else False return filtered[0] if filtered else False
@classmethod @classmethod
@@ -131,7 +106,6 @@ class EngineManager:
downloader = cls.engine_with_name(engine).downloader() downloader = cls.engine_with_name(engine).downloader()
return downloader.version_is_available_to_download(version=version, system_os=system_os, cpu=cpu) return downloader.version_is_available_to_download(version=version, system_os=system_os, cpu=cpu)
except Exception as e: except Exception as e:
logger.debug(f"Exception in version_is_available_to_download: {e}")
return None return None
@classmethod @classmethod
@@ -140,11 +114,10 @@ class EngineManager:
downloader = cls.engine_with_name(engine).downloader() downloader = cls.engine_with_name(engine).downloader()
return downloader.find_most_recent_version(system_os=system_os, cpu=cpu) return downloader.find_most_recent_version(system_os=system_os, cpu=cpu)
except Exception as e: except Exception as e:
logger.debug(f"Exception in find_most_recent_version: {e}")
return None return None
@classmethod @classmethod
def get_existing_download_task(cls, engine, version, system_os=None, cpu=None): def is_already_downloading(cls, engine, version, system_os=None, cpu=None):
for task in cls.download_tasks: for task in cls.download_tasks:
task_parts = task.name.split('-') task_parts = task.name.split('-')
task_engine, task_version, task_system_os, task_cpu = task_parts[:4] task_engine, task_version, task_system_os, task_cpu = task_parts[:4]
@@ -152,17 +125,26 @@ class EngineManager:
if engine == task_engine and version == task_version: if engine == task_engine and version == task_version:
if system_os in (task_system_os, None) and cpu in (task_cpu, None): if system_os in (task_system_os, None) and cpu in (task_cpu, None):
return task return task
return None return False
@classmethod @classmethod
def download_engine(cls, engine, version, system_os=None, cpu=None, background=False): def download_engine(cls, engine, version, system_os=None, cpu=None, background=False):
def download_engine_task(engine, version, system_os=None, cpu=None):
existing_download = cls.is_version_downloaded(engine, version, system_os, cpu)
if existing_download:
logger.info(f"Requested download of {engine} {version}, but local copy already exists")
return existing_download
# Get the appropriate downloader class based on the engine type
cls.engine_with_name(engine).downloader().download_engine(version, download_location=cls.engines_path,
system_os=system_os, cpu=cpu, timeout=300)
engine_to_download = cls.engine_with_name(engine) engine_to_download = cls.engine_with_name(engine)
existing_task = cls.get_existing_download_task(engine, version, system_os, cpu) existing_task = cls.is_already_downloading(engine, version, system_os, cpu)
if existing_task: if existing_task:
logger.debug(f"Already downloading {engine} {version}") logger.debug(f"Already downloading {engine} {version}")
if not background: if not background:
existing_task.join() # If download task exists, wait until it's done downloading existing_task.join() # If download task exists, wait until its done downloading
return return
elif not engine_to_download.downloader(): elif not engine_to_download.downloader():
logger.warning("No valid downloader for this engine. Please update this software manually.") logger.warning("No valid downloader for this engine. Please update this software manually.")
@@ -170,7 +152,8 @@ class EngineManager:
elif not cls.engines_path: elif not cls.engines_path:
raise FileNotFoundError("Engines path must be set before requesting downloads") raise FileNotFoundError("Engines path must be set before requesting downloads")
thread = EngineDownloadWorker(engine, version, system_os, cpu) thread = threading.Thread(target=download_engine_task, args=(engine, version, system_os, cpu),
name=f'{engine}-{version}-{system_os}-{cpu}')
cls.download_tasks.append(thread) cls.download_tasks.append(thread)
thread.start() thread.start()
@@ -183,6 +166,7 @@ class EngineManager:
logger.error(f"Error downloading {engine}") logger.error(f"Error downloading {engine}")
return found_engine return found_engine
@classmethod @classmethod
def delete_engine_download(cls, engine, version, system_os=None, cpu=None): def delete_engine_download(cls, engine, version, system_os=None, cpu=None):
logger.info(f"Requested deletion of engine: {engine}-{version}") logger.info(f"Requested deletion of engine: {engine}-{version}")
@@ -205,14 +189,14 @@ class EngineManager:
@classmethod @classmethod
def update_all_engines(cls): def update_all_engines(cls):
def engine_update_task(engine_class): def engine_update_task(engine):
logger.debug(f"Checking for updates to {engine_class.name()}") logger.debug(f"Checking for updates to {engine.name()}")
latest_version = engine_class.downloader().find_most_recent_version() latest_version = engine.downloader().find_most_recent_version()
if latest_version: if latest_version:
logger.debug(f"Latest version of {engine_class.name()} available: {latest_version.get('version')}") logger.debug(f"Latest version of {engine.name()} available: {latest_version.get('version')}")
if not cls.is_version_downloaded(engine_class.name(), latest_version.get('version')): if not cls.is_version_downloaded(engine.name(), latest_version.get('version')):
logger.info(f"Downloading latest version of {engine_class.name()}...") logger.info(f"Downloading latest version of {engine.name()}...")
cls.download_engine(engine=engine_class.name(), version=latest_version['version'], background=True) cls.download_engine(engine=engine.name(), version=latest_version['version'], background=True)
else: else:
logger.warning(f"Unable to get check for updates for {engine.name()}") logger.warning(f"Unable to get check for updates for {engine.name()}")
@@ -224,19 +208,20 @@ class EngineManager:
threads.append(thread) threads.append(thread)
thread.start() thread.start()
@classmethod @classmethod
def create_worker(cls, renderer, input_path, output_path, engine_version=None, args=None, parent=None, name=None): def create_worker(cls, renderer, input_path, output_path, engine_version=None, args=None, parent=None, name=None):
worker_class = cls.engine_with_name(renderer).worker_class() worker_class = cls.engine_with_name(renderer).worker_class()
# check to make sure we have versions installed # check to make sure we have versions installed
all_versions = cls.all_versions_for_engine(renderer) all_versions = EngineManager.all_versions_for_engine(renderer)
if not all_versions: if not all_versions:
raise FileNotFoundError(f"Cannot find any installed {renderer} engines") raise FileNotFoundError(f"Cannot find any installed {renderer} engines")
# Find the path to the requested engine version or use default # Find the path to the requested engine version or use default
engine_path = None engine_path = None if engine_version else all_versions[0]['path']
if engine_version and engine_version != 'latest': if engine_version:
for ver in all_versions: for ver in all_versions:
if ver['version'] == engine_version: if ver['version'] == engine_version:
engine_path = ver['path'] engine_path = ver['path']
@@ -244,14 +229,11 @@ class EngineManager:
# Download the required engine if not found locally # Download the required engine if not found locally
if not engine_path: if not engine_path:
download_result = cls.download_engine(renderer, engine_version) download_result = EngineManager.download_engine(renderer, engine_version)
if not download_result: if not download_result:
raise FileNotFoundError(f"Cannot download requested version: {renderer} {engine_version}") raise FileNotFoundError(f"Cannot download requested version: {renderer} {engine_version}")
engine_path = download_result['path'] engine_path = download_result['path']
logger.info("Engine downloaded. Creating worker.") logger.info("Engine downloaded. Creating worker.")
else:
logger.debug(f"Using latest engine version ({all_versions[0]['version']})")
engine_path = all_versions[0]['path']
if not engine_path: if not engine_path:
raise FileNotFoundError(f"Cannot find requested engine version {engine_version}") raise FileNotFoundError(f"Cannot find requested engine version {engine_version}")
@@ -261,7 +243,7 @@ class EngineManager:
@classmethod @classmethod
def engine_for_project_path(cls, path): def engine_for_project_path(cls, path):
_, extension = os.path.splitext(path) name, extension = os.path.splitext(path)
extension = extension.lower().strip('.') extension = extension.lower().strip('.')
for engine in cls.supported_engines(): for engine in cls.supported_engines():
if extension in engine.supported_extensions(): if extension in engine.supported_extensions():
@@ -270,34 +252,11 @@ class EngineManager:
return undefined_renderer_support[0] return undefined_renderer_support[0]
class EngineDownloadWorker(threading.Thread):
def __init__(self, engine, version, system_os=None, cpu=None):
super().__init__()
self.engine = engine
self.version = version
self.system_os = system_os
self.cpu = cpu
def run(self):
existing_download = EngineManager.is_version_downloaded(self.engine, self.version, self.system_os, self.cpu)
if existing_download:
logger.info(f"Requested download of {self.engine} {self.version}, but local copy already exists")
return existing_download
# Get the appropriate downloader class based on the engine type
EngineManager.engine_with_name(self.engine).downloader().download_engine(
self.version, download_location=EngineManager.engines_path, system_os=self.system_os, cpu=self.cpu,
timeout=300)
# remove itself from the downloader list
EngineManager.download_tasks.remove(self)
if __name__ == '__main__': if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# print(EngineManager.newest_engine_version('blender', 'macos', 'arm64')) # print(EngineManager.newest_engine_version('blender', 'macos', 'arm64'))
# EngineManager.delete_engine_download('blender', '3.2.1', 'macos', 'a') # EngineManager.delete_engine_download('blender', '3.2.1', 'macos', 'a')
EngineManager.engines_path = "/Users/brettwilliams/zordon-uploads/engines/" EngineManager.engines_path = "/Users/brettwilliams/zordon-uploads/engines"
# print(EngineManager.is_version_downloaded("ffmpeg", "6.0")) # print(EngineManager.is_version_downloaded("ffmpeg", "6.0"))
print(EngineManager.get_engines()) print(EngineManager.all_engines())

View File

@@ -90,7 +90,7 @@ class FFMPEGDownloader(EngineDownloader):
return releases return releases
@classmethod @classmethod
def all_versions(cls, system_os=None, cpu=None): def __all_versions(cls, system_os=None, cpu=None):
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
versions_per_os = {'linux': cls.__get_linux_versions, 'macos': cls.__get_macos_versions, versions_per_os = {'linux': cls.__get_linux_versions, 'macos': cls.__get_macos_versions,
@@ -131,14 +131,14 @@ class FFMPEGDownloader(EngineDownloader):
try: try:
system_os = system_os or current_system_os() system_os = system_os or current_system_os()
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
return cls.all_versions(system_os, cpu)[0] return cls.__all_versions(system_os, cpu)[0]
except (IndexError, requests.exceptions.RequestException): except (IndexError, requests.exceptions.RequestException):
logger.error(f"Cannot get most recent version of ffmpeg") logger.error(f"Cannot get most recent version of ffmpeg")
return {} return {}
@classmethod @classmethod
def version_is_available_to_download(cls, version, system_os=None, cpu=None): def version_is_available_to_download(cls, version, system_os=None, cpu=None):
for ver in cls.all_versions(system_os, cpu): for ver in cls.__all_versions(system_os, cpu):
if ver['version'] == version: if ver['version'] == version:
return ver return ver
return None return None
@@ -149,7 +149,7 @@ class FFMPEGDownloader(EngineDownloader):
cpu = cpu or current_system_cpu() cpu = cpu or current_system_cpu()
# Verify requested version is available # Verify requested version is available
found_version = [item for item in cls.all_versions(system_os, cpu) if item['version'] == version] found_version = [item for item in cls.__all_versions(system_os, cpu) if item['version'] == version]
if not found_version: if not found_version:
logger.error(f"Cannot find FFMPEG version {version} for {system_os} and {cpu}") logger.error(f"Cannot find FFMPEG version {version} for {system_os} and {cpu}")
return return
@@ -182,5 +182,4 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# print(FFMPEGDownloader.download_engine('6.0', '/Users/brett/zordon-uploads/engines/')) # print(FFMPEGDownloader.download_engine('6.0', '/Users/brett/zordon-uploads/engines/'))
# print(FFMPEGDownloader.find_most_recent_version(system_os='linux')) # print(FFMPEGDownloader.find_most_recent_version(system_os='linux'))
print(FFMPEGDownloader.download_engine(version='6.0', download_location='/Users/brett/zordon-uploads/engines/', print(FFMPEGDownloader.download_engine(version='6.0', download_location='/Users/brett/zordon-uploads/engines/', system_os='linux', cpu='x64'))
system_os='linux', cpu='x64'))

View File

@@ -5,6 +5,7 @@ from src.engines.core.base_engine import *
class FFMPEG(BaseRenderEngine): class FFMPEG(BaseRenderEngine):
binary_names = {'linux': 'ffmpeg', 'windows': 'ffmpeg.exe', 'macos': 'ffmpeg'} binary_names = {'linux': 'ffmpeg', 'windows': 'ffmpeg.exe', 'macos': 'ffmpeg'}
@staticmethod @staticmethod
@@ -12,33 +13,27 @@ class FFMPEG(BaseRenderEngine):
from src.engines.ffmpeg.ffmpeg_downloader import FFMPEGDownloader from src.engines.ffmpeg.ffmpeg_downloader import FFMPEGDownloader
return FFMPEGDownloader return FFMPEGDownloader
@classmethod @staticmethod
def worker_class(cls): def worker_class():
from src.engines.ffmpeg.ffmpeg_worker import FFMPEGRenderWorker from src.engines.ffmpeg.ffmpeg_worker import FFMPEGRenderWorker
return FFMPEGRenderWorker return FFMPEGRenderWorker
def ui_options(self, project_info):
from src.engines.ffmpeg.ffmpeg_ui import FFMPEGUI
return FFMPEGUI.get_options(self, project_info)
@classmethod @classmethod
def supported_extensions(cls): def supported_extensions(cls):
if not cls.file_extensions:
help_text = (subprocess.check_output([cls().renderer_path(), '-h', 'full'], stderr=subprocess.STDOUT) help_text = (subprocess.check_output([cls().renderer_path(), '-h', 'full'], stderr=subprocess.STDOUT)
.decode('utf-8')) .decode('utf-8'))
found = re.findall(r'extensions that .* is allowed to access \(default "(.*)"', help_text) found = re.findall('extensions that .* is allowed to access \(default "(.*)"', help_text)
found_extensions = set() found_extensions = set()
for match in found: for match in found:
found_extensions.update(match.split(',')) found_extensions.update(match.split(','))
cls.file_extensions = list(found_extensions) return list(found_extensions)
return cls.file_extensions
def version(self): def version(self):
version = None version = None
try: try:
ver_out = subprocess.check_output([self.renderer_path(), '-version'], ver_out = subprocess.check_output([self.renderer_path(), '-version'],
timeout=SUBPROCESS_TIMEOUT).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
match = re.match(r".*version\s*([\w.*]+)\W*", ver_out) match = re.match(".*version\s*(\S+)\s*Copyright", ver_out)
if match: if match:
version = match.groups()[0] version = match.groups()[0]
except Exception as e: except Exception as e:
@@ -52,8 +47,8 @@ class FFMPEG(BaseRenderEngine):
'ffprobe', '-v', 'quiet', '-print_format', 'json', 'ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_streams', '-select_streams', 'v', project_path '-show_streams', '-select_streams', 'v', project_path
] ]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True) result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
video_info = json.loads(output) video_info = json.loads(result.stdout)
# Extract the necessary information # Extract the necessary information
video_stream = video_info['streams'][0] video_stream = video_info['streams'][0]
@@ -84,7 +79,7 @@ class FFMPEG(BaseRenderEngine):
def get_encoders(self): def get_encoders(self):
raw_stdout = subprocess.check_output([self.renderer_path(), '-encoders'], stderr=subprocess.DEVNULL, raw_stdout = subprocess.check_output([self.renderer_path(), '-encoders'], stderr=subprocess.DEVNULL,
timeout=SUBPROCESS_TIMEOUT).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
pattern = r'(?P<type>[VASFXBD.]{6})\s+(?P<name>\S{2,})\s+(?P<description>.*)' pattern = '(?P<type>[VASFXBD.]{6})\s+(?P<name>\S{2,})\s+(?P<description>.*)'
encoders = [m.groupdict() for m in re.finditer(pattern, raw_stdout)] encoders = [m.groupdict() for m in re.finditer(pattern, raw_stdout)]
return encoders return encoders
@@ -96,7 +91,7 @@ class FFMPEG(BaseRenderEngine):
try: try:
formats_raw = subprocess.check_output([self.renderer_path(), '-formats'], stderr=subprocess.DEVNULL, formats_raw = subprocess.check_output([self.renderer_path(), '-formats'], stderr=subprocess.DEVNULL,
timeout=SUBPROCESS_TIMEOUT).decode('utf-8') timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
pattern = r'(?P<type>[DE]{1,2})\s+(?P<id>\S{2,})\s+(?P<name>.*)' pattern = '(?P<type>[DE]{1,2})\s+(?P<id>\S{2,})\s+(?P<name>.*)'
all_formats = [m.groupdict() for m in re.finditer(pattern, formats_raw)] all_formats = [m.groupdict() for m in re.finditer(pattern, formats_raw)]
return all_formats return all_formats
except Exception as e: except Exception as e:
@@ -126,7 +121,6 @@ class FFMPEG(BaseRenderEngine):
if match: if match:
frame_number = int(match[-1]) frame_number = int(match[-1])
return frame_number return frame_number
return -1
def get_arguments(self): def get_arguments(self):
help_text = (subprocess.check_output([self.renderer_path(), '-h', 'long'], stderr=subprocess.STDOUT) help_text = (subprocess.check_output([self.renderer_path(), '-h', 'long'], stderr=subprocess.STDOUT)

View File

@@ -1,5 +0,0 @@
class FFMPEGUI:
@staticmethod
def get_options(instance, project_info):
options = []
return options

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import re import re
import subprocess
from src.engines.core.base_worker import BaseRenderWorker from src.engines.core.base_worker import BaseRenderWorker
from src.engines.ffmpeg.ffmpeg_engine import FFMPEG from src.engines.ffmpeg.ffmpeg_engine import FFMPEG
@@ -16,7 +17,7 @@ class FFMPEGRenderWorker(BaseRenderWorker):
def generate_worker_subprocess(self): def generate_worker_subprocess(self):
cmd = [self.renderer_path, '-y', '-stats', '-i', self.input_path] cmd = [self.engine.default_renderer_path(), '-y', '-stats', '-i', self.input_path]
# Resize frame # Resize frame
if self.args.get('x_resolution', None) and self.args.get('y_resolution', None): if self.args.get('x_resolution', None) and self.args.get('y_resolution', None):
@@ -28,7 +29,7 @@ class FFMPEGRenderWorker(BaseRenderWorker):
cmd.extend(raw_args.split(' ')) cmd.extend(raw_args.split(' '))
# Close with output path # Close with output path
cmd.extend(['-max_muxing_queue_size', '1024', self.output_path]) cmd.append(self.output_path)
return cmd return cmd
def percent_complete(self): def percent_complete(self):

View File

@@ -8,10 +8,10 @@ from collections import deque
from PyQt6.QtCore import QObject, pyqtSignal from PyQt6.QtCore import QObject, pyqtSignal
from PyQt6.QtWidgets import QApplication from PyQt6.QtWidgets import QApplication
from .render_queue import RenderQueue
from .ui.main_window import MainWindow
from src.api.api_server import start_server from src.api.api_server import start_server
from src.engines.engine_manager import EngineManager
from src.render_queue import RenderQueue
from src.ui.main_window import MainWindow
from src.utilities.config import Config from src.utilities.config import Config
from src.utilities.misc_helper import system_safe_path from src.utilities.misc_helper import system_safe_path
@@ -24,13 +24,10 @@ def run() -> int:
int: The exit status code. int: The exit status code.
""" """
try:
# Load Config YAML # Load Config YAML
Config.setup_config_dir() config_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'config')
Config.load_config(system_safe_path(os.path.join(Config.config_dir(), 'config.yaml'))) Config.load_config(system_safe_path(os.path.join(config_dir, 'config.yaml')))
EngineManager.engines_path = system_safe_path(
os.path.join(os.path.join(os.path.expanduser(Config.upload_folder),
'engines')))
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
level=Config.server_log_level.upper()) level=Config.server_log_level.upper())
@@ -52,11 +49,6 @@ def run() -> int:
window.show() window.show()
return_code = app.exec() return_code = app.exec()
except Exception as e:
logging.error(f"Unhandled exception: {e}")
return_code = 1
finally:
RenderQueue.prepare_for_shutdown() RenderQueue.prepare_for_shutdown()
return sys.exit(return_code) return sys.exit(return_code)

View File

@@ -32,7 +32,7 @@ class RenderQueue:
def add_to_render_queue(cls, render_job, force_start=False): def add_to_render_queue(cls, render_job, force_start=False):
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job)) logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job))
cls.job_queue.append(render_job) cls.job_queue.append(render_job)
if force_start and render_job.status in (RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED): if force_start:
cls.start_job(render_job) cls.start_job(render_job)
cls.session.add(render_job) cls.session.add(render_job)
cls.save_state() cls.save_state()
@@ -97,6 +97,9 @@ class RenderQueue:
@classmethod @classmethod
def is_available_for_job(cls, renderer, priority=2): def is_available_for_job(cls, renderer, priority=2):
if not EngineManager.all_versions_for_engine(renderer):
return False
instances = cls.renderer_instances() instances = cls.renderer_instances()
higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < priority] higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < priority]
max_allowed_instances = cls.maximum_renderer_instances.get(renderer, 1) max_allowed_instances = cls.maximum_renderer_instances.get(renderer, 1)

View File

@@ -21,17 +21,10 @@ from src.utilities.zeroconf_server import ZeroconfServer
class NewRenderJobForm(QWidget): class NewRenderJobForm(QWidget):
def __init__(self, project_path=None): def __init__(self, project_path=None):
super().__init__() super().__init__()
self.notes_group = None
self.frame_rate_input = None
self.resolution_x_input = None
self.renderer_group = None
self.output_settings_group = None
self.resolution_y_input = None
self.project_path = project_path self.project_path = project_path
# UI # UI
self.project_group = None
self.load_file_group = None
self.current_engine_options = None self.current_engine_options = None
self.file_format_combo = None self.file_format_combo = None
self.renderer_options_layout = None self.renderer_options_layout = None
@@ -80,41 +73,41 @@ class NewRenderJobForm(QWidget):
# Main Layout # Main Layout
main_layout = QVBoxLayout(self) main_layout = QVBoxLayout(self)
# Loading File Group # Scene File Group
self.load_file_group = QGroupBox("Loading") scene_file_group = QGroupBox("Project")
load_file_layout = QVBoxLayout(self.load_file_group) scene_file_layout = QVBoxLayout(scene_file_group)
# progress bar
progress_layout = QHBoxLayout()
self.process_progress_bar = QProgressBar()
self.process_progress_bar.setMinimum(0)
self.process_progress_bar.setMaximum(0)
self.process_label = QLabel("Processing")
progress_layout.addWidget(self.process_label)
progress_layout.addWidget(self.process_progress_bar)
load_file_layout.addLayout(progress_layout)
main_layout.addWidget(self.load_file_group)
# Project Group
self.project_group = QGroupBox("Project")
server_layout = QVBoxLayout(self.project_group)
# File Path
scene_file_picker_layout = QHBoxLayout() scene_file_picker_layout = QHBoxLayout()
self.scene_file_input = QLineEdit() self.scene_file_input = QLineEdit()
self.scene_file_input.setText(self.project_path) self.scene_file_input.setText(self.project_path)
self.scene_file_browse_button = QPushButton("Browse...") self.scene_file_browse_button = QPushButton("Browse...")
self.scene_file_browse_button.clicked.connect(self.browse_scene_file) self.scene_file_browse_button.clicked.connect(self.browse_scene_file)
scene_file_picker_layout.addWidget(QLabel("File:"))
scene_file_picker_layout.addWidget(self.scene_file_input) scene_file_picker_layout.addWidget(self.scene_file_input)
scene_file_picker_layout.addWidget(self.scene_file_browse_button) scene_file_picker_layout.addWidget(self.scene_file_browse_button)
server_layout.addLayout(scene_file_picker_layout) scene_file_layout.addLayout(scene_file_picker_layout)
# progress bar
progress_layout = QHBoxLayout()
self.process_progress_bar = QProgressBar()
self.process_progress_bar.setMinimum(0)
self.process_progress_bar.setMaximum(0)
self.process_progress_bar.setHidden(True)
self.process_label = QLabel("Processing")
self.process_label.setHidden(True)
progress_layout.addWidget(self.process_label)
progress_layout.addWidget(self.process_progress_bar)
scene_file_layout.addLayout(progress_layout)
main_layout.addWidget(scene_file_group)
# Server Group
# Server List # Server List
self.server_group = QGroupBox("Server")
server_layout = QVBoxLayout(self.server_group)
server_list_layout = QHBoxLayout() server_list_layout = QHBoxLayout()
server_list_layout.setSpacing(0) server_list_layout.setSpacing(0)
self.server_input = QComboBox() self.server_input = QComboBox()
server_list_layout.addWidget(QLabel("Hostname:"), 1) server_list_layout.addWidget(QLabel("Hostname:"), 1)
server_list_layout.addWidget(self.server_input, 3) server_list_layout.addWidget(self.server_input, 3)
server_layout.addLayout(server_list_layout) server_layout.addLayout(server_list_layout)
main_layout.addWidget(self.project_group) main_layout.addWidget(self.server_group)
self.update_server_list() self.update_server_list()
# Priority # Priority
priority_layout = QHBoxLayout() priority_layout = QHBoxLayout()
@@ -192,7 +185,6 @@ class NewRenderJobForm(QWidget):
# Version # Version
renderer_layout.addWidget(QLabel("Version:")) renderer_layout.addWidget(QLabel("Version:"))
self.renderer_version_combo = QComboBox() self.renderer_version_combo = QComboBox()
self.renderer_version_combo.addItem('latest')
renderer_layout.addWidget(self.renderer_version_combo) renderer_layout.addWidget(self.renderer_version_combo)
renderer_group_layout.addLayout(renderer_layout) renderer_group_layout.addLayout(renderer_layout)
# dynamic options # dynamic options
@@ -243,7 +235,7 @@ class NewRenderJobForm(QWidget):
def update_renderer_info(self): def update_renderer_info(self):
# get the renderer info and add them all to the ui # get the renderer info and add them all to the ui
self.renderer_info = self.server_proxy.get_renderer_info(response_type='full') self.renderer_info = self.server_proxy.get_renderer_info()
self.renderer_type.addItems(self.renderer_info.keys()) self.renderer_type.addItems(self.renderer_info.keys())
# select the best renderer for the file type # select the best renderer for the file type
engine = EngineManager.engine_for_project_path(self.project_path) engine = EngineManager.engine_for_project_path(self.project_path)
@@ -255,7 +247,6 @@ class NewRenderJobForm(QWidget):
# load the version numbers # load the version numbers
current_renderer = self.renderer_type.currentText().lower() or self.renderer_type.itemText(0) current_renderer = self.renderer_type.currentText().lower() or self.renderer_type.itemText(0)
self.renderer_version_combo.clear() self.renderer_version_combo.clear()
self.renderer_version_combo.addItem('latest')
self.file_format_combo.clear() self.file_format_combo.clear()
if current_renderer: if current_renderer:
renderer_vers = [version_info['version'] for version_info in self.renderer_info[current_renderer]['versions']] renderer_vers = [version_info['version'] for version_info in self.renderer_info[current_renderer]['versions']]
@@ -319,25 +310,16 @@ class NewRenderJobForm(QWidget):
self.output_path_input.setText(os.path.basename(input_path)) self.output_path_input.setText(os.path.basename(input_path))
# cleanup progress UI # cleanup progress UI
self.load_file_group.setHidden(True) self.process_progress_bar.setHidden(True)
self.process_label.setHidden(True)
self.toggle_renderer_enablement(True) self.toggle_renderer_enablement(True)
# -- Load scene data # Load scene data
# start / end frames self.start_frame_input.setValue(self.project_info.get('frame_start'))
self.start_frame_input.setValue(self.project_info.get('start_frame', 0)) self.end_frame_input.setValue(self.project_info.get('frame_end'))
self.end_frame_input.setValue(self.project_info.get('end_frame', 0)) self.resolution_x_input.setValue(self.project_info.get('resolution_x'))
self.start_frame_input.setEnabled(bool(self.project_info.get('start_frame'))) self.resolution_y_input.setValue(self.project_info.get('resolution_y'))
self.end_frame_input.setEnabled(bool(self.project_info.get('start_frame'))) self.frame_rate_input.setValue(self.project_info.get('fps'))
# resolution
self.resolution_x_input.setValue(self.project_info.get('resolution_x', 1920))
self.resolution_y_input.setValue(self.project_info.get('resolution_y', 1080))
self.resolution_x_input.setEnabled(bool(self.project_info.get('resolution_x')))
self.resolution_y_input.setEnabled(bool(self.project_info.get('resolution_y')))
# frame rate
self.frame_rate_input.setValue(self.project_info.get('fps', 24))
self.frame_rate_input.setEnabled(bool(self.project_info.get('fps')))
# Cameras # Cameras
self.cameras_list.clear() self.cameras_list.clear()
@@ -360,10 +342,10 @@ class NewRenderJobForm(QWidget):
# Dynamic Engine Options # Dynamic Engine Options
clear_layout(self.renderer_options_layout) # clear old options clear_layout(self.renderer_options_layout) # clear old options
# dynamically populate option list # dynamically populate option list
self.current_engine_options = engine().ui_options(self.project_info) self.current_engine_options = engine().get_options()
for option in self.current_engine_options: for option in self.current_engine_options:
h_layout = QHBoxLayout() h_layout = QHBoxLayout()
label = QLabel(option['name'].replace('_', ' ').capitalize() + ':') label = QLabel(option['name'].capitalize() + ':')
h_layout.addWidget(label) h_layout.addWidget(label)
if option.get('options'): if option.get('options'):
combo_box = QComboBox() combo_box = QComboBox()
@@ -374,12 +356,12 @@ class NewRenderJobForm(QWidget):
text_box = QLineEdit() text_box = QLineEdit()
h_layout.addWidget(text_box) h_layout.addWidget(text_box)
self.renderer_options_layout.addLayout(h_layout) self.renderer_options_layout.addLayout(h_layout)
except AttributeError: except AttributeError as e:
pass pass
def toggle_renderer_enablement(self, enabled=False): def toggle_renderer_enablement(self, enabled=False):
"""Toggle on/off all the render settings""" """Toggle on/off all the render settings"""
self.project_group.setHidden(not enabled) self.server_group.setHidden(not enabled)
self.output_settings_group.setHidden(not enabled) self.output_settings_group.setHidden(not enabled)
self.renderer_group.setHidden(not enabled) self.renderer_group.setHidden(not enabled)
self.notes_group.setHidden(not enabled) self.notes_group.setHidden(not enabled)
@@ -460,7 +442,7 @@ class SubmitWorker(QThread):
hostname = self.window.server_input.currentText() hostname = self.window.server_input.currentText()
job_json = {'owner': psutil.Process().username() + '@' + socket.gethostname(), job_json = {'owner': psutil.Process().username() + '@' + socket.gethostname(),
'renderer': self.window.renderer_type.currentText().lower(), 'renderer': self.window.renderer_type.currentText().lower(),
'engine_version': self.window.renderer_version_combo.currentText(), 'renderer_version': self.window.renderer_version_combo.currentText(),
'args': {'raw': self.window.raw_args.text()}, 'args': {'raw': self.window.raw_args.text()},
'output_path': self.window.output_path_input.text(), 'output_path': self.window.output_path_input.text(),
'start_frame': self.window.start_frame_input.value(), 'start_frame': self.window.start_frame_input.value(),
@@ -506,12 +488,8 @@ class SubmitWorker(QThread):
engine = EngineManager.engine_with_name(self.window.renderer_type.currentText().lower()) engine = EngineManager.engine_with_name(self.window.renderer_type.currentText().lower())
input_path = engine().perform_presubmission_tasks(input_path) input_path = engine().perform_presubmission_tasks(input_path)
# submit # submit
result = None
try:
result = self.window.server_proxy.post_job_to_server(file_path=input_path, job_list=job_list, result = self.window.server_proxy.post_job_to_server(file_path=input_path, job_list=job_list,
callback=create_callback) callback=create_callback)
except Exception as e:
pass
self.message_signal.emit(result) self.message_signal.emit(result)

View File

@@ -4,7 +4,6 @@ import subprocess
import sys import sys
import threading import threading
from PyQt6.QtCore import QTimer
from PyQt6.QtWidgets import ( from PyQt6.QtWidgets import (
QMainWindow, QWidget, QVBoxLayout, QPushButton, QTableWidget, QTableWidgetItem, QHBoxLayout, QAbstractItemView, QMainWindow, QWidget, QVBoxLayout, QPushButton, QTableWidget, QTableWidgetItem, QHBoxLayout, QAbstractItemView,
QHeaderView, QProgressBar, QLabel, QMessageBox QHeaderView, QProgressBar, QLabel, QMessageBox
@@ -12,7 +11,7 @@ from PyQt6.QtWidgets import (
from src.api.server_proxy import RenderServerProxy from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager from src.engines.engine_manager import EngineManager
from src.utilities.misc_helper import is_localhost, launch_url from src.utilities.misc_helper import is_localhost
class EngineBrowserWindow(QMainWindow): class EngineBrowserWindow(QMainWindow):
@@ -29,7 +28,6 @@ class EngineBrowserWindow(QMainWindow):
self.setGeometry(100, 100, 500, 300) self.setGeometry(100, 100, 500, 300)
self.engine_data = [] self.engine_data = []
self.initUI() self.initUI()
self.init_timer()
def initUI(self): def initUI(self):
# Central widget # Central widget
@@ -84,12 +82,6 @@ class EngineBrowserWindow(QMainWindow):
self.update_download_status() self.update_download_status()
def init_timer(self):
# Set up the timer
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_download_status)
self.timer.start(1000)
def update_table(self): def update_table(self):
def update_table_worker(): def update_table_worker():
@@ -98,7 +90,7 @@ class EngineBrowserWindow(QMainWindow):
return return
table_data = [] # convert the data into a flat list table_data = [] # convert the data into a flat list
for _, engine_data in raw_server_data.items(): for engine_name, engine_data in raw_server_data.items():
table_data.extend(engine_data['versions']) table_data.extend(engine_data['versions'])
self.engine_data = table_data self.engine_data = table_data
@@ -132,19 +124,21 @@ class EngineBrowserWindow(QMainWindow):
hide_progress = not bool(running_tasks) hide_progress = not bool(running_tasks)
self.progress_bar.setHidden(hide_progress) self.progress_bar.setHidden(hide_progress)
self.progress_label.setHidden(hide_progress) self.progress_label.setHidden(hide_progress)
# Update the status labels
if len(EngineManager.download_tasks) == 0: # todo: update progress bar with status
new_status = "" self.progress_label.setText(f"Downloading {len(running_tasks)} engines")
elif len(EngineManager.download_tasks) == 1:
task = EngineManager.download_tasks[0]
new_status = f"Downloading {task.engine.capitalize()} {task.version}..."
else:
new_status = f"Downloading {len(EngineManager.download_tasks)} engines..."
self.progress_label.setText(new_status)
def launch_button_click(self): def launch_button_click(self):
engine_info = self.engine_data[self.table_widget.currentRow()] engine_info = self.engine_data[self.table_widget.currentRow()]
launch_url(engine_info['path']) path = engine_info['path']
if sys.platform.startswith('darwin'):
subprocess.run(['open', path])
elif sys.platform.startswith('win32'):
os.startfile(path)
elif sys.platform.startswith('linux'):
subprocess.run(['xdg-open', path])
else:
raise OSError("Unsupported operating system")
def install_button_click(self): def install_button_click(self):
self.update_download_status() self.update_download_status()

View File

@@ -1,14 +1,13 @@
''' app/ui/main_window.py ''' ''' app/ui/main_window.py '''
import datetime import datetime
import io
import logging import logging
import os import os
import socket
import subprocess import subprocess
import sys import sys
import threading import threading
import time import time
import PIL
from PIL import Image from PIL import Image
from PyQt6.QtCore import Qt, QByteArray, QBuffer, QIODevice, QThread from PyQt6.QtCore import Qt, QByteArray, QBuffer, QIODevice, QThread
from PyQt6.QtGui import QPixmap, QImage, QFont, QIcon from PyQt6.QtGui import QPixmap, QImage, QFont, QIcon
@@ -16,6 +15,7 @@ from PyQt6.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QListWidget, QTab
QTableWidgetItem, QLabel, QVBoxLayout, QHeaderView, QMessageBox, QGroupBox, QPushButton, QListWidgetItem, \ QTableWidgetItem, QLabel, QVBoxLayout, QHeaderView, QMessageBox, QGroupBox, QPushButton, QListWidgetItem, \
QFileDialog QFileDialog
from src.api.server_proxy import RenderServerProxy
from src.render_queue import RenderQueue from src.render_queue import RenderQueue
from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost from src.utilities.misc_helper import get_time_elapsed, resources_dir, is_localhost
from src.utilities.status_utils import RenderStatus from src.utilities.status_utils import RenderStatus
@@ -29,7 +29,6 @@ from .widgets.proportional_image_label import ProportionalImageLabel
from .widgets.statusbar import StatusBar from .widgets.statusbar import StatusBar
from .widgets.toolbar import ToolBar from .widgets.toolbar import ToolBar
from src.api.serverproxy_manager import ServerProxyManager from src.api.serverproxy_manager import ServerProxyManager
from src.utilities.misc_helper import launch_url
logger = logging.getLogger() logger = logging.getLogger()
@@ -49,11 +48,6 @@ class MainWindow(QMainWindow):
super().__init__() super().__init__()
# Load the queue # Load the queue
self.job_list_view = None
self.server_info_ram = None
self.server_info_cpu = None
self.server_info_os = None
self.server_info_hostname = None
self.engine_browser_window = None self.engine_browser_window = None
self.server_info_group = None self.server_info_group = None
self.current_hostname = None self.current_hostname = None
@@ -73,7 +67,7 @@ class MainWindow(QMainWindow):
# Create a QLabel widget to display the image # Create a QLabel widget to display the image
self.image_label = ProportionalImageLabel() self.image_label = ProportionalImageLabel()
self.image_label.setMaximumSize(700, 500) self.image_label.setMaximumSize(700, 500)
self.image_label.setFixedHeight(300) self.image_label.setFixedHeight(500)
self.image_label.setAlignment(Qt.AlignmentFlag.AlignTop | Qt.AlignmentFlag.AlignHCenter) self.image_label.setAlignment(Qt.AlignmentFlag.AlignTop | Qt.AlignmentFlag.AlignHCenter)
self.load_image_path(os.path.join(resources_dir(), 'Rectangle.png')) self.load_image_path(os.path.join(resources_dir(), 'Rectangle.png'))
@@ -284,25 +278,15 @@ class MainWindow(QMainWindow):
def fetch_preview(job_id): def fetch_preview(job_id):
try: try:
default_image_path = "error.png"
before_fetch_hostname = self.current_server_proxy.hostname before_fetch_hostname = self.current_server_proxy.hostname
response = self.current_server_proxy.request(f'job/{job_id}/thumbnail?size=big') response = self.current_server_proxy.request(f'job/{job_id}/thumbnail?size=big')
if response.ok: if response.ok:
try: import io
with io.BytesIO(response.content) as image_data_stream: image_data = response.content
image = Image.open(image_data_stream) image = Image.open(io.BytesIO(image_data))
if self.current_server_proxy.hostname == before_fetch_hostname and job_id == \ if self.current_server_proxy.hostname == before_fetch_hostname and job_id == \
self.selected_job_ids()[0]: self.selected_job_ids()[0]:
self.load_image_data(image) self.load_image_data(image)
return
except PIL.UnidentifiedImageError:
default_image_path = response.text
else:
default_image_path = default_image_path or response.text
self.load_image_path(os.path.join(resources_dir(), default_image_path))
except ConnectionError as e: except ConnectionError as e:
logger.error(f"Connection error fetching image: {e}") logger.error(f"Connection error fetching image: {e}")
except Exception as e: except Exception as e:
@@ -345,15 +329,12 @@ class MainWindow(QMainWindow):
self.topbar.actions_call['Open Files'].setVisible(False) self.topbar.actions_call['Open Files'].setVisible(False)
def selected_job_ids(self): def selected_job_ids(self):
try:
selected_rows = self.job_list_view.selectionModel().selectedRows() selected_rows = self.job_list_view.selectionModel().selectedRows()
job_ids = [] job_ids = []
for selected_row in selected_rows: for selected_row in selected_rows:
id_item = self.job_list_view.item(selected_row.row(), 0) id_item = self.job_list_view.item(selected_row.row(), 0)
job_ids.append(id_item.text()) job_ids.append(id_item.text())
return job_ids return job_ids
except AttributeError:
return []
def refresh_job_headers(self): def refresh_job_headers(self):
self.job_list_view.setHorizontalHeaderLabels(["ID", "Name", "Renderer", "Priority", "Status", self.job_list_view.setHorizontalHeaderLabels(["ID", "Name", "Renderer", "Priority", "Status",
@@ -420,7 +401,7 @@ class MainWindow(QMainWindow):
for hostname in found_servers: for hostname in found_servers:
if hostname not in current_server_list: if hostname not in current_server_list:
properties = ZeroconfServer.get_hostname_properties(hostname) properties = ZeroconfServer.get_hostname_properties(hostname)
image_path = os.path.join(resources_dir(), f"{properties.get('system_os', 'Monitor')}.png") image_path = os.path.join(resources_dir(), 'icons', f"{properties.get('system_os', 'Monitor')}.png")
list_widget = QListWidgetItem(QIcon(image_path), hostname) list_widget = QListWidgetItem(QIcon(image_path), hostname)
self.server_list_view.addItem(list_widget) self.server_list_view.addItem(list_widget)
@@ -457,22 +438,23 @@ class MainWindow(QMainWindow):
# Top Toolbar Buttons # Top Toolbar Buttons
self.topbar.add_button( self.topbar.add_button(
"Console", f"{resources_directory}/Console.png", self.open_console_window) "New Job", f"{resources_directory}/icons/AddProduct.png", self.new_job)
self.topbar.add_button( self.topbar.add_button(
"Engines", f"{resources_directory}/SoftwareInstaller.png", self.engine_browser) "Engines", f"{resources_directory}/icons/SoftwareInstaller.png", self.engine_browser)
self.topbar.add_button(
"Console", f"{resources_directory}/icons/Console.png", self.open_console_window)
self.topbar.add_separator() self.topbar.add_separator()
self.topbar.add_button( self.topbar.add_button(
"Stop Job", f"{resources_directory}/StopSign.png", self.stop_job) "Stop Job", f"{resources_directory}/icons/StopSign.png", self.stop_job)
self.topbar.add_button( self.topbar.add_button(
"Delete Job", f"{resources_directory}/Trash.png", self.delete_job) "Delete Job", f"{resources_directory}/icons/Trash.png", self.delete_job)
self.topbar.add_button( self.topbar.add_button(
"Render Log", f"{resources_directory}/Document.png", self.job_logs) "Render Log", f"{resources_directory}/icons/Document.png", self.job_logs)
self.topbar.add_button( self.topbar.add_button(
"Download", f"{resources_directory}/Download.png", self.download_files) "Download", f"{resources_directory}/icons/Download.png", self.download_files)
self.topbar.add_button( self.topbar.add_button(
"Open Files", f"{resources_directory}/SearchFolder.png", self.open_files) "Open Files", f"{resources_directory}/icons/SearchFolder.png", self.open_files)
self.topbar.add_button(
"New Job", f"{resources_directory}/AddProduct.png", self.new_job)
self.addToolBar(Qt.ToolBarArea.TopToolBarArea, self.topbar) self.addToolBar(Qt.ToolBarArea.TopToolBarArea, self.topbar)
# -- Toolbar Buttons -- # # -- Toolbar Buttons -- #
@@ -564,7 +546,15 @@ class MainWindow(QMainWindow):
for job_id in job_ids: for job_id in job_ids:
job_info = self.current_server_proxy.get_job_info(job_id) job_info = self.current_server_proxy.get_job_info(job_id)
path = os.path.dirname(job_info['output_path']) path = os.path.dirname(job_info['output_path'])
launch_url(path)
if sys.platform.startswith('darwin'):
subprocess.run(['open', path])
elif sys.platform.startswith('win32'):
os.startfile(path)
elif sys.platform.startswith('linux'):
subprocess.run(['xdg-open', path])
else:
raise OSError("Unsupported operating system")
def new_job(self) -> None: def new_job(self) -> None:

1
src/ui/widgets/dialog.py Normal file
View File

@@ -0,0 +1 @@
''' app/ui/widgets/dialog.py '''

View File

@@ -9,7 +9,6 @@ from PyQt6.QtGui import QPixmap
from PyQt6.QtWidgets import QStatusBar, QLabel from PyQt6.QtWidgets import QStatusBar, QLabel
from src.api.server_proxy import RenderServerProxy from src.api.server_proxy import RenderServerProxy
from src.engines.engine_manager import EngineManager
from src.utilities.misc_helper import resources_dir from src.utilities.misc_helper import resources_dir
@@ -29,23 +28,17 @@ class StatusBar(QStatusBar):
proxy = RenderServerProxy(socket.gethostname()) proxy = RenderServerProxy(socket.gethostname())
proxy.start_background_update() proxy.start_background_update()
image_names = {'Ready': 'GreenCircle.png', 'Offline': "RedSquare.png"} image_names = {'Ready': 'GreenCircle.png', 'Offline': "RedSquare.png"}
last_update = None
# Check for status change every 1s on background thread # Check for status change every 1s on background thread
while True: while True:
new_status = proxy.status() new_status = proxy.status()
if new_status is not last_update:
new_image_name = image_names.get(new_status, 'Synchronize.png') new_image_name = image_names.get(new_status, 'Synchronize.png')
image_path = os.path.join(resources_dir(), new_image_name) image_path = os.path.join(resources_dir(), 'icons', new_image_name)
self.label.setPixmap((QPixmap(image_path).scaled(16, 16, Qt.AspectRatioMode.KeepAspectRatio))) self.label.setPixmap((QPixmap(image_path).scaled(16, 16, Qt.AspectRatioMode.KeepAspectRatio)))
# add download status
if EngineManager.download_tasks:
if len(EngineManager.download_tasks) == 1:
task = EngineManager.download_tasks[0]
new_status = f"{new_status} | Downloading {task.engine.capitalize()} {task.version}..."
else:
new_status = f"{new_status} | Downloading {len(EngineManager.download_tasks)} engines"
self.messageLabel.setText(new_status) self.messageLabel.setText(new_status)
last_update = new_status
time.sleep(1) time.sleep(1)
background_thread = threading.Thread(target=background_update,) background_thread = threading.Thread(target=background_update,)
@@ -54,7 +47,7 @@ class StatusBar(QStatusBar):
# Create a label that holds an image # Create a label that holds an image
self.label = QLabel() self.label = QLabel()
image_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'resources', image_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'resources', 'icons',
'RedSquare.png') 'RedSquare.png')
pixmap = (QPixmap(image_path).scaled(16, 16, Qt.AspectRatioMode.KeepAspectRatio)) pixmap = (QPixmap(image_path).scaled(16, 16, Qt.AspectRatioMode.KeepAspectRatio))
self.label.setPixmap(pixmap) self.label.setPixmap(pixmap)

View File

@@ -1,78 +0,0 @@
import concurrent.futures
import os
import time
import logging
logger = logging.getLogger()
def cpu_workload(n):
# Simple arithmetic operation for workload
while n > 0:
n -= 1
return n
def cpu_benchmark(duration_seconds=10):
# Determine the number of available CPU cores
num_cores = os.cpu_count()
# Calculate workload per core, assuming a large number for the workload
workload_per_core = 10000000
# Record start time
start_time = time.time()
# Use ProcessPoolExecutor to utilize all CPU cores
with concurrent.futures.ProcessPoolExecutor() as executor:
# Launching tasks for each core
futures = [executor.submit(cpu_workload, workload_per_core) for _ in range(num_cores)]
# Wait for all futures to complete, with a timeout to limit the benchmark duration
concurrent.futures.wait(futures, timeout=duration_seconds)
# Record end time
end_time = time.time()
# Calculate the total number of operations (workload) done by all cores
total_operations = workload_per_core * num_cores
# Calculate the total time taken
total_time = end_time - start_time
# Calculate operations per second as the score
score = total_operations / total_time
score = score * 0.0001
return int(score)
def disk_io_benchmark(file_size_mb=100, filename='benchmark_test_file'):
write_speed = None
read_speed = None
# Measure write speed
start_time = time.time()
with open(filename, 'wb') as f:
f.write(os.urandom(file_size_mb * 1024 * 1024)) # Write random bytes to file
end_time = time.time()
write_time = end_time - start_time
write_speed = file_size_mb / write_time
# Measure read speed
start_time = time.time()
with open(filename, 'rb') as f:
content = f.read()
end_time = time.time()
read_time = end_time - start_time
read_speed = file_size_mb / read_time
# Cleanup
os.remove(filename)
logger.debug(f"Disk Write Speed: {write_speed:.2f} MB/s")
logger.debug(f"Disk Read Speed: {read_speed:.2f} MB/s")
return write_speed, read_speed
if __name__ == '__main__':
print(cpu_benchmark())
print(disk_io_benchmark())

View File

@@ -1,6 +1,5 @@
import os import os
import yaml import yaml
from src.utilities.misc_helper import current_system_os, copy_directory_contents
class Config: class Config:
@@ -35,40 +34,3 @@ class Config:
cls.port_number = cfg.get('port_number', cls.port_number) cls.port_number = cfg.get('port_number', cls.port_number)
cls.enable_split_jobs = cfg.get('enable_split_jobs', cls.enable_split_jobs) cls.enable_split_jobs = cfg.get('enable_split_jobs', cls.enable_split_jobs)
cls.download_timeout_seconds = cfg.get('download_timeout_seconds', cls.download_timeout_seconds) cls.download_timeout_seconds = cfg.get('download_timeout_seconds', cls.download_timeout_seconds)
@classmethod
def config_dir(cls):
# Set up the config path
if current_system_os() == 'macos':
local_config_path = os.path.expanduser('~/Library/Application Support/Zordon')
elif current_system_os() == 'windows':
local_config_path = os.path.join(os.environ['APPDATA'], 'Zordon')
else:
local_config_path = os.path.expanduser('~/.config/Zordon')
return local_config_path
@classmethod
def setup_config_dir(cls):
# Set up the config path
local_config_dir = cls.config_dir()
if os.path.exists(local_config_dir):
return
try:
# Create the local configuration directory
os.makedirs(local_config_dir)
# Determine the template path
resource_environment_path = os.environ.get('RESOURCEPATH')
if resource_environment_path:
template_path = os.path.join(resource_environment_path, 'config')
else:
template_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'config')
# Copy contents from the template to the local configuration directory
copy_directory_contents(template_path, local_config_dir)
except Exception as e:
print(f"An error occurred while setting up the config directory: {e}")
raise

View File

@@ -4,10 +4,9 @@ from src.engines.ffmpeg.ffmpeg_engine import FFMPEG
def image_sequence_to_video(source_glob_pattern, output_path, framerate=24, encoder="prores_ks", profile=4, def image_sequence_to_video(source_glob_pattern, output_path, framerate=24, encoder="prores_ks", profile=4,
start_frame=1): start_frame=1):
subprocess.run([FFMPEG.default_renderer_path(), "-framerate", str(framerate), "-start_number", subprocess.run([FFMPEG.default_renderer_path(), "-framerate", str(framerate), "-start_number", str(start_frame), "-i",
str(start_frame), "-i", f"{source_glob_pattern}", "-c:v", encoder, "-profile:v", str(profile), f"{source_glob_pattern}", "-c:v", encoder, "-profile:v", str(profile), '-pix_fmt', 'yuva444p10le',
'-pix_fmt', 'yuva444p10le', output_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, output_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
check=True)
def save_first_frame(source_path, dest_path, max_width=1280): def save_first_frame(source_path, dest_path, max_width=1280):

View File

@@ -1,9 +1,7 @@
import logging import logging
import os import os
import platform import platform
import shutil
import socket import socket
import string
import subprocess import subprocess
from datetime import datetime from datetime import datetime
@@ -11,27 +9,14 @@ logger = logging.getLogger()
def launch_url(url): def launch_url(url):
logger = logging.getLogger(__name__) if subprocess.run(['which', 'xdg-open'], capture_output=True).returncode == 0:
subprocess.run(['xdg-open', url]) # linux
if shutil.which('xdg-open'): elif subprocess.run(['which', 'open'], capture_output=True).returncode == 0:
opener = 'xdg-open' subprocess.run(['open', url]) # macos
elif shutil.which('open'): elif subprocess.run(['which', 'start'], capture_output=True).returncode == 0:
opener = 'open' subprocess.run(['start', url]) # windows - need to validate this works
elif shutil.which('cmd'):
opener = 'start'
else: else:
error_message = f"No valid launchers found to launch URL: {url}" logger.error(f"No valid launchers found to launch url: {url}")
logger.error(error_message)
raise OSError(error_message)
try:
if opener == 'start':
# For Windows, use 'cmd /c start'
subprocess.run(['cmd', '/c', 'start', url], shell=False)
else:
subprocess.run([opener, url])
except Exception as e:
logger.error(f"Failed to launch URL: {url}. Error: {e}")
def file_exists_in_mounts(filepath): def file_exists_in_mounts(filepath):
@@ -49,9 +34,9 @@ def file_exists_in_mounts(filepath):
path = os.path.normpath(path) path = os.path.normpath(path)
components = [] components = []
while True: while True:
path, comp = os.path.split(path) path, component = os.path.split(path)
if comp: if component:
components.append(comp) components.append(component)
else: else:
if path: if path:
components.append(path) components.append(path)
@@ -77,17 +62,20 @@ def file_exists_in_mounts(filepath):
def get_time_elapsed(start_time=None, end_time=None): def get_time_elapsed(start_time=None, end_time=None):
from string import Template
class DeltaTemplate(Template):
delimiter = "%"
def strfdelta(tdelta, fmt='%H:%M:%S'): def strfdelta(tdelta, fmt='%H:%M:%S'):
days = tdelta.days d = {"D": tdelta.days}
hours, rem = divmod(tdelta.seconds, 3600) hours, rem = divmod(tdelta.seconds, 3600)
minutes, seconds = divmod(rem, 60) minutes, seconds = divmod(rem, 60)
d["H"] = '{:02d}'.format(hours)
# Using f-strings for formatting d["M"] = '{:02d}'.format(minutes)
formatted_str = fmt.replace('%D', f'{days}') d["S"] = '{:02d}'.format(seconds)
formatted_str = formatted_str.replace('%H', f'{hours:02d}') t = DeltaTemplate(fmt)
formatted_str = formatted_str.replace('%M', f'{minutes:02d}') return t.substitute(**d)
formatted_str = formatted_str.replace('%S', f'{seconds:02d}')
return formatted_str
# calculate elapsed time # calculate elapsed time
elapsed_time = None elapsed_time = None
@@ -105,7 +93,7 @@ def get_time_elapsed(start_time=None, end_time=None):
def get_file_size_human(file_path): def get_file_size_human(file_path):
size_in_bytes = os.path.getsize(file_path) size_in_bytes = os.path.getsize(file_path)
# Convert size to a human-readable format # Convert size to a human readable format
if size_in_bytes < 1024: if size_in_bytes < 1024:
return f"{size_in_bytes} B" return f"{size_in_bytes} B"
elif size_in_bytes < 1024 ** 2: elif size_in_bytes < 1024 ** 2:
@@ -139,24 +127,15 @@ def current_system_cpu():
def resources_dir(): def resources_dir():
resource_environment_path = os.environ.get('RESOURCEPATH', None) resources_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
if resource_environment_path: # running inside resource bundle 'resources')
return os.path.join(resource_environment_path, 'resources') return resources_directory
else:
return os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'resources')
def copy_directory_contents(src_dir, dst_dir): def config_dir():
""" config_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
Copy the contents of the source directory (src_dir) to the destination directory (dst_dir). 'config')
""" return config_directory
for item in os.listdir(src_dir):
src_path = os.path.join(src_dir, item)
dst_path = os.path.join(dst_dir, item)
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path, dirs_exist_ok=True)
else:
shutil.copy2(src_path, dst_path)
def is_localhost(comparison_hostname): def is_localhost(comparison_hostname):
@@ -167,19 +146,3 @@ def is_localhost(comparison_hostname):
return comparison_hostname == local_hostname return comparison_hostname == local_hostname
except AttributeError: except AttributeError:
return False return False
def num_to_alphanumeric(num):
# List of possible alphanumeric characters
characters = string.ascii_letters + string.digits
# Make sure number is positive
num = abs(num)
# Convert number to alphanumeric
result = ""
while num > 0:
num, remainder = divmod(num, len(characters))
result += characters[remainder]
return result[::-1] # Reverse the result to get the correct alphanumeric string

View File

@@ -22,10 +22,6 @@ class ZeroconfServer:
cls.service_type = service_type cls.service_type = service_type
cls.server_name = server_name cls.server_name = server_name
cls.server_port = server_port cls.server_port = server_port
try: # Stop any previously running instances
socket.gethostbyname(socket.gethostname())
except socket.gaierror:
cls.stop()
@classmethod @classmethod
def start(cls, listen_only=False): def start(cls, listen_only=False):
@@ -56,7 +52,7 @@ class ZeroconfServer:
cls.service_info = info cls.service_info = info
cls.zeroconf.register_service(info) cls.zeroconf.register_service(info)
logger.info(f"Registered zeroconf service: {cls.service_info.name}") logger.info(f"Registered zeroconf service: {cls.service_info.name}")
except (NonUniqueNameException, socket.gaierror) as e: except NonUniqueNameException as e:
logger.error(f"Error establishing zeroconf: {e}") logger.error(f"Error establishing zeroconf: {e}")
@classmethod @classmethod
@@ -74,34 +70,34 @@ class ZeroconfServer:
@classmethod @classmethod
def _on_service_discovered(cls, zeroconf, service_type, name, state_change): def _on_service_discovered(cls, zeroconf, service_type, name, state_change):
info = zeroconf.get_service_info(service_type, name) info = zeroconf.get_service_info(service_type, name)
hostname = name.split(f'.{cls.service_type}')[0] logger.debug(f"Zeroconf: {name} {state_change}")
logger.debug(f"Zeroconf: {hostname} {state_change}")
if service_type == cls.service_type: if service_type == cls.service_type:
if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated: if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated:
cls.client_cache[hostname] = info cls.client_cache[name] = info
else: else:
cls.client_cache.pop(hostname) cls.client_cache.pop(name)
pub.sendMessage('zeroconf_state_change', hostname=hostname, state_change=state_change) pub.sendMessage('zeroconf_state_change', hostname=name, state_change=state_change, info=info)
@classmethod @classmethod
def found_hostnames(cls): def found_hostnames(cls):
fetched_hostnames = [x.split(f'.{cls.service_type}')[0] for x in cls.client_cache.keys()]
local_hostname = socket.gethostname() local_hostname = socket.gethostname()
# Define a sort key function
def sort_key(hostname): def sort_key(hostname):
# Return 0 if it's the local hostname so it comes first, else return 1 # Return 0 if it's the local hostname so it comes first, else return 1
return False if hostname == local_hostname else True return False if hostname == local_hostname else True
# Sort the list with the local hostname first # Sort the list with the local hostname first
sorted_hostnames = sorted(cls.client_cache.keys(), key=sort_key) sorted_hostnames = sorted(fetched_hostnames, key=sort_key)
return sorted_hostnames return sorted_hostnames
@classmethod @classmethod
def get_hostname_properties(cls, hostname): def get_hostname_properties(cls, hostname):
server_info = cls.client_cache.get(hostname).properties new_key = hostname + '.' + cls.service_type
server_info = cls.client_cache.get(new_key).properties
decoded_server_info = {key.decode('utf-8'): value.decode('utf-8') for key, value in server_info.items()} decoded_server_info = {key.decode('utf-8'): value.decode('utf-8') for key, value in server_info.items()}
return decoded_server_info return decoded_server_info
# Example usage: # Example usage:
if __name__ == "__main__": if __name__ == "__main__":
ZeroconfServer.configure("_zordon._tcp.local.", "foobar.local", 8080) ZeroconfServer.configure("_zordon._tcp.local.", "foobar.local", 8080)

View File

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

Before

Width:  |  Height:  |  Size: 995 B

After

Width:  |  Height:  |  Size: 995 B

View File

Before

Width:  |  Height:  |  Size: 81 KiB

After

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

Before

Width:  |  Height:  |  Size: 66 KiB

After

Width:  |  Height:  |  Size: 66 KiB

View File

@@ -0,0 +1,64 @@
const grid = new gridjs.Grid({
columns: [
{ data: (row) => row.id,
name: 'Thumbnail',
formatter: (cell) => gridjs.html(`<img src="/api/job/${cell}/thumbnail?video_ok" style='width: 200px; min-width: 120px;'>`),
sort: {enabled: false}
},
{ id: 'name',
name: 'Name',
data: (row) => row.name,
formatter: (name, row) => gridjs.html(`<a href="/ui/job/${row.cells[0].data}/full_details">${name}</a>`)
},
{ id: 'renderer', data: (row) => `${row.renderer}-${row.renderer_version}`, name: 'Renderer' },
{ id: 'priority', name: 'Priority' },
{ id: 'status',
name: 'Status',
data: (row) => row,
formatter: (cell, row) => gridjs.html(`
<span class="tag ${(cell.status == 'running') ? 'is-hidden' : ''} ${(cell.status == 'cancelled') ?
'is-warning' : (cell.status == 'error') ? 'is-danger' : (cell.status == 'not_started') ?
'is-light' : 'is-primary'}">${cell.status}</span>
<progress class="progress is-primary ${(cell.status != 'running') ? 'is-hidden': ''}"
value="${(parseFloat(cell.percent_complete) * 100.0)}" max="100">${cell.status}</progress>
`)},
{ id: 'time_elapsed', name: 'Time Elapsed' },
{ data: (row) => row.total_frames ?? 'N/A', name: 'Frame Count' },
{ id: 'client', name: 'Client'},
{ data: (row) => row.last_output ?? 'N/A',
name: 'Last Output',
formatter: (output, row) => gridjs.html(`<a href="/api/job/${row.cells[0].data}/logs">${output}</a>`)
},
{ data: (row) => row,
name: 'Commands',
formatter: (cell, row) => gridjs.html(`
<div class="field has-addons" style='white-space: nowrap; display: inline-block;'>
<button class="button is-info" onclick="window.location.href='/ui/job/${row.cells[0].data}/full_details';">
<span class="icon"><i class="fa-solid fa-info"></i></span>
</button>
<button class="button is-link" onclick="window.location.href='/api/job/${row.cells[0].data}/logs';">
<span class="icon"><i class="fa-regular fa-file-lines"></i></span>
</button>
<button class="button is-warning is-active ${(cell.status != 'running') ? 'is-hidden': ''}" onclick="window.location.href='/api/job/${row.cells[0].data}/cancel?confirm=True&redirect=True';">
<span class="icon"><i class="fa-solid fa-x"></i></span>
</button>
<button class="button is-success ${(cell.status != 'completed') ? 'is-hidden': ''}" onclick="window.location.href='/api/job/${row.cells[0].data}/download_all';">
<span class="icon"><i class="fa-solid fa-download"></i></span>
<span>${cell.file_count}</span>
</button>
<button class="button is-danger" onclick="window.location.href='/api/job/${row.cells[0].data}/delete?confirm=True&redirect=True'">
<span class="icon"><i class="fa-regular fa-trash-can"></i></span>
</button>
</div>
`),
sort: false
},
{ id: 'owner', name: 'Owner' }
],
autoWidth: true,
server: {
url: '/api/jobs',
then: results => results['jobs'],
},
sort: true,
}).render(document.getElementById('table'));

View File

@@ -0,0 +1,44 @@
document.addEventListener('DOMContentLoaded', () => {
// Functions to open and close a modal
function openModal($el) {
$el.classList.add('is-active');
}
function closeModal($el) {
$el.classList.remove('is-active');
}
function closeAllModals() {
(document.querySelectorAll('.modal') || []).forEach(($modal) => {
closeModal($modal);
});
}
// Add a click event on buttons to open a specific modal
(document.querySelectorAll('.js-modal-trigger') || []).forEach(($trigger) => {
const modal = $trigger.dataset.target;
const $target = document.getElementById(modal);
$trigger.addEventListener('click', () => {
openModal($target);
});
});
// Add a click event on various child elements to close the parent modal
(document.querySelectorAll('.modal-background, .modal-close, .modal-card-head .delete, .modal-card-foot .button') || []).forEach(($close) => {
const $target = $close.closest('.modal');
$close.addEventListener('click', () => {
closeModal($target);
});
});
// Add a keyboard event to close all modals
document.addEventListener('keydown', (event) => {
const e = event || window.event;
if (e.keyCode === 27) { // Escape key
closeAllModals();
}
});
});

View File

@@ -0,0 +1,48 @@
{% extends 'layout.html' %}
{% block body %}
<div class="container" style="text-align:center; width: 100%">
<br>
{% if media_url: %}
<video width="1280" height="720" controls>
<source src="{{media_url}}" type="video/mp4">
Your browser does not support the video tag.
</video>
{% elif job_status == 'Running': %}
<div style="width: 100%; height: 720px; position: relative; background: black; text-align: center; color: white;">
<img src="/static/images/gears.png" style="vertical-align: middle; width: auto; height: auto; position:absolute; margin: auto; top: 0; bottom: 0; left: 0; right: 0;">
<span style="height: auto; position:absolute; margin: auto; top: 58%; left: 0; right: 0; color: white; width: 60%">
<progress class="progress is-primary" value="{{job.worker_data()['percent_complete'] * 100}}" max="100" style="margin-top: 6px;" id="progress-bar">Rendering</progress>
Rendering in Progress - <span id="percent-complete">{{(job.worker_data()['percent_complete'] * 100) | int}}%</span>
<br>Time Elapsed: <span id="time-elapsed">{{job.worker_data()['time_elapsed']}}</span>
</span>
<script>
var startingStatus = '{{job.status.value}}';
function update_job() {
$.getJSON('/api/job/{{job.id}}', function(data) {
document.getElementById('progress-bar').value = (data.percent_complete * 100);
document.getElementById('percent-complete').innerHTML = (data.percent_complete * 100).toFixed(0) + '%';
document.getElementById('time-elapsed').innerHTML = data.time_elapsed;
if (data.status != startingStatus){
clearInterval(renderingTimer);
window.location.reload(true);
};
});
}
if (startingStatus == 'running'){
var renderingTimer = setInterval(update_job, 1000);
};
</script>
</div>
{% else %}
<div style="width: 100%; height: 720px; position: relative; background: black;">
<img src="/static/images/{{job_status}}.png" style="vertical-align: middle; width: auto; height: auto; position:absolute; margin: auto; top: 0; bottom: 0; left: 0; right: 0;">
<span style="height: auto; position:absolute; margin: auto; top: 58%; left: 0; right: 0; color: white;">
{{job_status}}
</span>
</div>
{% endif %}
<br>
{{detail_table|safe}}
</div>
{% endblock %}

View File

@@ -0,0 +1,8 @@
{% extends 'layout.html' %}
{% block body %}
<div class="container is-fluid" style="padding-top: 20px;">
<div id="table" class="table"></div>
</div>
<script src="/static/js/job_table.js"></script>
{% endblock %}

View File

@@ -0,0 +1,236 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Zordon Dashboard</title>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/gridjs/dist/gridjs.umd.js"></script>
<link href="https://unpkg.com/gridjs/dist/theme/mermaid.min.css" rel="stylesheet" />
<script src="https://kit.fontawesome.com/698705d14d.js" crossorigin="anonymous"></script>
<script type="text/javascript" src="/static/js/modals.js"></script>
</head>
<body onload="rendererChanged(document.getElementById('renderer'))">
<nav class="navbar is-dark" role="navigation" aria-label="main navigation">
<div class="navbar-brand">
<a class="navbar-item" href="/">
<img src="/static/images/logo.png">
</a>
</div>
<div id="navbarBasicExample" class="navbar-menu">
<div class="navbar-start">
<a class="navbar-item" href="/">
Home
</a>
</div>
<div class="navbar-end">
<div class="navbar-item">
<button class="button is-primary js-modal-trigger" data-target="add-job-modal">
<span class="icon">
<i class="fa-solid fa-upload"></i>
</span>
<span>Submit Job</span>
</button>
</div>
</div>
</div>
</nav>
{% block body %}
{% endblock %}
<div id="add-job-modal" class="modal">
<!-- Start Add Form -->
<form id="submit_job" action="/api/add_job?redirect=True" method="POST" enctype="multipart/form-data">
<div class="modal-background"></div>
<div class="modal-card">
<header class="modal-card-head">
<p class="modal-card-title">Submit New Job</p>
<button class="delete" aria-label="close" type="button"></button>
</header>
<section class="modal-card-body">
<!-- File Uploader -->
<label class="label">Upload File</label>
<div id="file-uploader" class="file has-name is-fullwidth">
<label class="file-label">
<input class="file-input is-small" type="file" name="file">
<span class="file-cta">
<span class="file-icon">
<i class="fas fa-upload"></i>
</span>
<span class="file-label">
Choose a file…
</span>
</span>
<span class="file-name">
No File Uploaded
</span>
</label>
</div>
<br>
<script>
const fileInput = document.querySelector('#file-uploader input[type=file]');
fileInput.onchange = () => {
if (fileInput.files.length > 0) {
const fileName = document.querySelector('#file-uploader .file-name');
fileName.textContent = fileInput.files[0].name;
}
}
const presets = {
{% for preset in preset_list: %}
{{preset}}: {
name: '{{preset_list[preset]['name']}}',
renderer: '{{preset_list[preset]['renderer']}}',
args: '{{preset_list[preset]['args']}}',
},
{% endfor %}
};
function rendererChanged(ddl1) {
var renderers = {
{% for renderer in renderer_info: %}
{% if renderer_info[renderer]['supported_export_formats']: %}
{{renderer}}: [
{% for format in renderer_info[renderer]['supported_export_formats']: %}
'{{format}}',
{% endfor %}
],
{% endif %}
{% endfor %}
};
var selectedRenderer = ddl1.value;
var ddl3 = document.getElementById('preset_list');
ddl3.options.length = 0;
createOption(ddl3, '-Presets-', '');
for (var preset_name in presets) {
if (presets[preset_name]['renderer'] == selectedRenderer) {
createOption(ddl3, presets[preset_name]['name'], preset_name);
};
};
document.getElementById('raw_args').value = "";
var ddl2 = document.getElementById('export_format');
ddl2.options.length = 0;
var options = renderers[selectedRenderer];
for (i = 0; i < options.length; i++) {
createOption(ddl2, options[i], options[i]);
};
}
function createOption(ddl, text, value) {
var opt = document.createElement('option');
opt.value = value;
opt.text = text;
ddl.options.add(opt);
}
function addPresetTextToInput(presetfield, textfield) {
var p = presets[presetfield.value];
textfield.value = p['args'];
}
</script>
<!-- Renderer & Priority -->
<div class="field is-grouped">
<p class="control">
<label class="label">Renderer</label>
<span class="select">
<select id="renderer" name="renderer" onchange="rendererChanged(this)">
{% for renderer in renderer_info: %}
<option name="renderer" value="{{renderer}}">{{renderer}}</option>
{% endfor %}
</select>
</span>
</p>
<p class="control">
<label class="label">Client</label>
<span class="select">
<select name="client">
<option name="client" value="">First Available</option>
{% for client in render_clients: %}
<option name="client" value="{{client}}">{{client}}</option>
{% endfor %}
</select>
</span>
</p>
<p class="control">
<label class="label">Priority</label>
<span class="select">
<select name="priority">
<option name="priority" value="1">1</option>
<option name="priority" value="2" selected="selected">2</option>
<option name="priority" value="3">3</option>
</select>
</span>
</p>
</div>
<!-- Output Path -->
<label class="label">Output</label>
<div class="field has-addons">
<div class="control is-expanded">
<input class="input is-small" type="text" placeholder="Output Name" name="output_path" value="output.mp4">
</div>
<p class="control">
<span class="select is-small">
<select id="export_format" name="export_format">
<option value="ar">option</option>
</select>
</span>
</p>
</div>
<!-- Resolution -->
<!-- <label class="label">Resolution</label>-->
<!-- <div class="field is-grouped">-->
<!-- <p class="control">-->
<!-- <input class="input" type="text" placeholder="auto" maxlength="5" size="8" name="AnyRenderer-arg_x_resolution">-->
<!-- </p>-->
<!-- <label class="label"> x </label>-->
<!-- <p class="control">-->
<!-- <input class="input" type="text" placeholder="auto" maxlength="5" size="8" name="AnyRenderer-arg_y_resolution">-->
<!-- </p>-->
<!-- <label class="label"> @ </label>-->
<!-- <p class="control">-->
<!-- <input class="input" type="text" placeholder="auto" maxlength="3" size="5" name="AnyRenderer-arg_frame_rate">-->
<!-- </p>-->
<!-- <label class="label"> fps </label>-->
<!-- </div>-->
<label class="label">Command Line Arguments</label>
<div class="field has-addons">
<p class="control">
<span class="select is-small">
<select id="preset_list" onchange="addPresetTextToInput(this, document.getElementById('raw_args'))">
<option value="preset-placeholder">presets</option>
</select>
</span>
</p>
<p class="control is-expanded">
<input class="input is-small" type="text" placeholder="Args" id="raw_args" name="raw_args">
</p>
</div>
<!-- End Add Form -->
</section>
<footer class="modal-card-foot">
<input class="button is-link" type="submit"/>
<button class="button" type="button">Cancel</button>
</footer>
</div>
</form>
</div>
</body>
</html>

View File

@@ -0,0 +1,62 @@
<html>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script>
$(function() {
$('#renderer').change(function() {
$('.render_settings').hide();
$('#' + $(this).val()).show();
});
});
</script>
<body>
<h3>Upload a file</h3>
<div>
<form action="/add_job" method="POST"
enctype="multipart/form-data">
<div>
<input type="file" name="file"/><br>
</div>
<input type="hidden" id="origin" name="origin" value="html">
<div id="client">
Render Client:
<select name="client">
{% for client in render_clients %}
<option value="{{client}}">{{client}}</option>
{% endfor %}
</select>
</div>
<div id="priority">
Priority:
<select name="priority">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>
</div>
<div>
<label for="renderer">Renderer:</label>
<select id="renderer" name="renderer">
{% for renderer in supported_renderers %}
<option value="{{renderer}}">{{renderer}}</option>
{% endfor %}
</select>
</div>
<div id="blender" class="render_settings" style="display:none">
Engine:
<select name="blender+engine">
<option value="CYCLES">Cycles</option>
<option value="BLENDER_EEVEE">Eevee</option>
</select>
</div>
<br>
<input type="submit"/>
</form>
</div>
</body>
</html>