Major file reorganization (#26)
* Major file reorganization * Rearrange imports * Fix default log level
0
src/__init__.py
Normal file
592
src/api_server.py
Executable file
@@ -0,0 +1,592 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import shutil
|
||||
import socket
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from urllib.request import urlretrieve
|
||||
from zipfile import ZipFile
|
||||
|
||||
import json2html
|
||||
import psutil
|
||||
import yaml
|
||||
from flask import Flask, request, render_template, send_file, after_this_request, Response, redirect, url_for, abort
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
from src.distributed_job_manager import DistributedJobManager
|
||||
from src.render_queue import RenderQueue, JobNotFoundError
|
||||
from src.server_proxy import RenderServerProxy
|
||||
from src.utilities.server_helper import generate_thumbnail_for_job
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
from src.worker_factory import RenderWorkerFactory
|
||||
from src.workers.base_worker import string_to_status, RenderStatus
|
||||
|
||||
logger = logging.getLogger()
|
||||
server = Flask(__name__, template_folder='web/templates', static_folder='web/static')
|
||||
ssl._create_default_https_context = ssl._create_unverified_context # disable SSL for downloads
|
||||
|
||||
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
|
||||
RenderStatus.COMPLETED, RenderStatus.CANCELLED]
|
||||
|
||||
|
||||
def sorted_jobs(all_jobs, sort_by_date=True):
|
||||
if not sort_by_date:
|
||||
sorted_job_list = []
|
||||
if all_jobs:
|
||||
for status_category in categories:
|
||||
found_jobs = [x for x in all_jobs if x.status == status_category.value]
|
||||
if found_jobs:
|
||||
sorted_found_jobs = sorted(found_jobs, key=lambda d: d.date_created, reverse=True)
|
||||
sorted_job_list.extend(sorted_found_jobs)
|
||||
else:
|
||||
sorted_job_list = sorted(all_jobs, key=lambda d: d.date_created, reverse=True)
|
||||
return sorted_job_list
|
||||
|
||||
|
||||
@server.route('/')
|
||||
@server.route('/index')
|
||||
def index():
|
||||
with open('config/presets.yaml') as f:
|
||||
render_presets = yaml.load(f, Loader=yaml.FullLoader)
|
||||
|
||||
return render_template('index.html', all_jobs=sorted_jobs(RenderQueue.all_jobs()),
|
||||
hostname=server.config['HOSTNAME'], renderer_info=renderer_info(),
|
||||
render_clients=[server.config['HOSTNAME']], preset_list=render_presets)
|
||||
|
||||
|
||||
@server.get('/api/jobs')
|
||||
def jobs_json():
|
||||
try:
|
||||
hash_token = request.args.get('token', None)
|
||||
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
|
||||
job_cache_token = str(json.dumps(all_jobs).__hash__())
|
||||
|
||||
if hash_token and hash_token == job_cache_token:
|
||||
return [], 204 # no need to update
|
||||
else:
|
||||
return {'jobs': all_jobs, 'token': job_cache_token}
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception fetching all_jobs_cached: {e}")
|
||||
return [], 500
|
||||
|
||||
|
||||
@server.route('/ui/job/<job_id>/full_details')
|
||||
def job_detail(job_id):
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
table_html = json2html.json2html.convert(json=found_job.json(),
|
||||
table_attributes='class="table is-narrow is-striped is-fullwidth"')
|
||||
media_url = None
|
||||
if found_job.file_list() and found_job.status == RenderStatus.COMPLETED:
|
||||
media_basename = os.path.basename(found_job.file_list()[0])
|
||||
media_url = f"/api/job/{job_id}/file/{media_basename}"
|
||||
return render_template('details.html', detail_table=table_html, media_url=media_url,
|
||||
hostname=server.config['HOSTNAME'], job_status=found_job.status.value.title(),
|
||||
job=found_job, renderer_info=renderer_info())
|
||||
|
||||
|
||||
@server.route('/api/job/<job_id>/thumbnail')
|
||||
def job_thumbnail(job_id):
|
||||
big_thumb = request.args.get('size', False) == "big"
|
||||
video_ok = request.args.get('video_ok', False)
|
||||
found_job = RenderQueue.job_with_id(job_id, none_ok=True)
|
||||
if found_job:
|
||||
|
||||
os.makedirs(server.config['THUMBS_FOLDER'], exist_ok=True)
|
||||
thumb_video_path = os.path.join(server.config['THUMBS_FOLDER'], found_job.id + '.mp4')
|
||||
thumb_image_path = os.path.join(server.config['THUMBS_FOLDER'], found_job.id + '.jpg')
|
||||
big_video_path = os.path.join(server.config['THUMBS_FOLDER'], found_job.id + '_big.mp4')
|
||||
big_image_path = os.path.join(server.config['THUMBS_FOLDER'], found_job.id + '_big.jpg')
|
||||
|
||||
# generate regular thumb if it doesn't exist
|
||||
if not os.path.exists(thumb_video_path) and not os.path.exists(thumb_video_path + '_IN-PROGRESS') and \
|
||||
found_job.status not in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
|
||||
generate_thumbnail_for_job(found_job, thumb_video_path, thumb_image_path, max_width=240)
|
||||
|
||||
# generate big thumb if it doesn't exist
|
||||
if not os.path.exists(big_video_path) and not os.path.exists(big_image_path + '_IN-PROGRESS') and \
|
||||
found_job.status not in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
|
||||
generate_thumbnail_for_job(found_job, big_video_path, big_image_path, max_width=800)
|
||||
|
||||
# generated videos
|
||||
if video_ok:
|
||||
if big_thumb and os.path.exists(big_video_path) and not os.path.exists(
|
||||
big_video_path + '_IN-PROGRESS'):
|
||||
return send_file(big_video_path, mimetype="video/mp4")
|
||||
elif os.path.exists(thumb_video_path) and not os.path.exists(thumb_video_path + '_IN-PROGRESS'):
|
||||
return send_file(thumb_video_path, mimetype="video/mp4")
|
||||
|
||||
# Generated thumbs
|
||||
if big_thumb and os.path.exists(big_image_path):
|
||||
return send_file(big_image_path, mimetype='image/jpeg')
|
||||
elif os.path.exists(thumb_image_path):
|
||||
return send_file(thumb_image_path, mimetype='image/jpeg')
|
||||
|
||||
# Misc status icons
|
||||
if found_job.status == RenderStatus.RUNNING:
|
||||
return send_file('web/static/images/gears.png', mimetype="image/png")
|
||||
elif found_job.status == RenderStatus.CANCELLED:
|
||||
return send_file('web/static/images/cancelled.png', mimetype="image/png")
|
||||
elif found_job.status == RenderStatus.SCHEDULED:
|
||||
return send_file('web/static/images/scheduled.png', mimetype="image/png")
|
||||
elif found_job.status == RenderStatus.NOT_STARTED:
|
||||
return send_file('web/static/images/not_started.png', mimetype="image/png")
|
||||
# errors
|
||||
return send_file('web/static/images/error.png', mimetype="image/png")
|
||||
|
||||
|
||||
# Get job file routing
|
||||
@server.route('/api/job/<job_id>/file/<filename>', methods=['GET'])
|
||||
def get_job_file(job_id, filename):
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
try:
|
||||
for full_path in found_job.file_list():
|
||||
if filename in full_path:
|
||||
return send_file(path_or_file=full_path)
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
|
||||
@server.get('/api/jobs/<status_val>')
|
||||
def filtered_jobs_json(status_val):
|
||||
state = string_to_status(status_val)
|
||||
jobs = [x.json() for x in RenderQueue.jobs_with_status(state)]
|
||||
if jobs:
|
||||
return jobs
|
||||
else:
|
||||
return f'Cannot find jobs with status {status_val}', 400
|
||||
|
||||
|
||||
@server.post('/api/job/<job_id>/notify_parent_of_status_change')
|
||||
def subjob_status_change(job_id):
|
||||
try:
|
||||
subjob_details = request.json
|
||||
logger.info(f"Subjob to job id: {job_id} is now {subjob_details['status']}")
|
||||
DistributedJobManager.handle_subjob_status_change(RenderQueue.job_with_id(job_id), subjob_data=subjob_details)
|
||||
return Response(status=200)
|
||||
except JobNotFoundError:
|
||||
return "Job not found", 404
|
||||
|
||||
|
||||
@server.errorhandler(JobNotFoundError)
|
||||
def handle_job_not_found(job_error):
|
||||
return f'Cannot find job with ID {job_error.job_id}', 400
|
||||
|
||||
|
||||
@server.get('/api/job/<job_id>')
|
||||
def get_job_status(job_id):
|
||||
return RenderQueue.job_with_id(job_id).json()
|
||||
|
||||
|
||||
@server.get('/api/job/<job_id>/logs')
|
||||
def get_job_logs(job_id):
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
log_path = found_job.log_path()
|
||||
log_data = None
|
||||
if log_path and os.path.exists(log_path):
|
||||
with open(log_path) as file:
|
||||
log_data = file.read()
|
||||
return Response(log_data, mimetype='text/plain')
|
||||
|
||||
|
||||
@server.get('/api/job/<job_id>/file_list')
|
||||
def get_file_list(job_id):
|
||||
return RenderQueue.job_with_id(job_id).file_list()
|
||||
|
||||
|
||||
@server.get('/api/job/<job_id>/make_ready')
|
||||
def make_job_ready(job_id):
|
||||
try:
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
if found_job.status in [RenderStatus.CONFIGURING, RenderStatus.NOT_STARTED]:
|
||||
if found_job.children:
|
||||
for child_key in found_job.children.keys():
|
||||
child_id = child_key.split('@')[0]
|
||||
hostname = child_key.split('@')[-1]
|
||||
RenderServerProxy(hostname).request_data(f'job/{child_id}/make_ready')
|
||||
found_job.status = RenderStatus.NOT_STARTED
|
||||
RenderQueue.save_state()
|
||||
return found_job.json(), 200
|
||||
except Exception as e:
|
||||
return "Error making job ready: {e}", 500
|
||||
return "Not valid command", 405
|
||||
|
||||
|
||||
@server.route('/api/job/<job_id>/download_all')
|
||||
def download_all(job_id):
|
||||
zip_filename = None
|
||||
|
||||
@after_this_request
|
||||
def clear_zip(response):
|
||||
if zip_filename and os.path.exists(zip_filename):
|
||||
os.remove(zip_filename)
|
||||
return response
|
||||
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
output_dir = os.path.dirname(found_job.output_path)
|
||||
if os.path.exists(output_dir):
|
||||
zip_filename = os.path.join('/tmp', pathlib.Path(found_job.input_path).stem + '.zip')
|
||||
with ZipFile(zip_filename, 'w') as zipObj:
|
||||
for f in os.listdir(output_dir):
|
||||
zipObj.write(filename=os.path.join(output_dir, f),
|
||||
arcname=os.path.basename(f))
|
||||
return send_file(zip_filename, mimetype="zip", as_attachment=True, )
|
||||
else:
|
||||
return f'Cannot find project files for job {job_id}', 500
|
||||
|
||||
|
||||
@server.get('/api/presets')
|
||||
def presets():
|
||||
with open('config/presets.yaml') as f:
|
||||
presets = yaml.load(f, Loader=yaml.FullLoader)
|
||||
return presets
|
||||
|
||||
|
||||
@server.get('/api/full_status')
|
||||
def full_status():
|
||||
full_results = {'timestamp': datetime.now().isoformat(), 'servers': {}}
|
||||
|
||||
try:
|
||||
snapshot_results = snapshot()
|
||||
server_data = {'status': snapshot_results.get('status', {}), 'jobs': snapshot_results.get('jobs', {}),
|
||||
'is_online': True}
|
||||
full_results['servers'][server.config['HOSTNAME']] = server_data
|
||||
except Exception as e:
|
||||
logger.error(f"Exception fetching full status: {e}")
|
||||
|
||||
return full_results
|
||||
|
||||
|
||||
@server.get('/api/snapshot')
|
||||
def snapshot():
|
||||
server_status = status()
|
||||
server_jobs = [x.json() for x in RenderQueue.all_jobs()]
|
||||
server_data = {'status': server_status, 'jobs': server_jobs, 'timestamp': datetime.now().isoformat()}
|
||||
return server_data
|
||||
|
||||
|
||||
@server.get('/api/_detected_clients')
|
||||
def detected_clients():
|
||||
# todo: dev/debug only. Should not ship this - probably.
|
||||
return ZeroconfServer.found_clients()
|
||||
|
||||
|
||||
@server.post('/api/add_job')
|
||||
def add_job_handler():
|
||||
# initial handling of raw data
|
||||
try:
|
||||
if request.is_json:
|
||||
jobs_list = [request.json] if not isinstance(request.json, list) else request.json
|
||||
elif request.form.get('json', None):
|
||||
jobs_list = json.loads(request.form['json'])
|
||||
else:
|
||||
# Cleanup flat form data into nested structure
|
||||
form_dict = {k: v for k, v in dict(request.form).items() if v}
|
||||
args = {}
|
||||
arg_keys = [k for k in form_dict.keys() if '-arg_' in k]
|
||||
for server_hostname in arg_keys:
|
||||
if form_dict['renderer'] in server_hostname or 'AnyRenderer' in server_hostname:
|
||||
cleaned_key = server_hostname.split('-arg_')[-1]
|
||||
args[cleaned_key] = form_dict[server_hostname]
|
||||
form_dict.pop(server_hostname)
|
||||
args['raw'] = form_dict.get('raw_args', None)
|
||||
form_dict['args'] = args
|
||||
jobs_list = [form_dict]
|
||||
except Exception as e:
|
||||
err_msg = f"Error processing job data: {e}"
|
||||
logger.error(err_msg)
|
||||
return err_msg, 500
|
||||
|
||||
# start handling project files
|
||||
try:
|
||||
# handle uploaded files
|
||||
logger.debug(f"Incoming new job request: {jobs_list}")
|
||||
uploaded_project = request.files.get('file', None)
|
||||
project_url = jobs_list[0].get('url', None)
|
||||
local_path = jobs_list[0].get('local_path', None)
|
||||
renderer = jobs_list[0].get('renderer')
|
||||
|
||||
downloaded_file_url = None
|
||||
if uploaded_project and uploaded_project.filename:
|
||||
referred_name = os.path.basename(uploaded_project.filename)
|
||||
elif project_url:
|
||||
# download and save url - have to download first to know filename due to redirects
|
||||
logger.info(f"Attempting to download URL: {project_url}")
|
||||
try:
|
||||
downloaded_file_url, info = urlretrieve(project_url)
|
||||
referred_name = info.get_filename() or os.path.basename(project_url)
|
||||
except Exception as e:
|
||||
err_msg = f"Error downloading file: {e}"
|
||||
logger.error(err_msg)
|
||||
return err_msg, 406
|
||||
elif local_path and os.path.exists(local_path):
|
||||
referred_name = os.path.basename(local_path)
|
||||
else:
|
||||
return "Cannot find any valid project paths", 400
|
||||
|
||||
# prep local filepath
|
||||
cleaned_path_name = os.path.splitext(referred_name)[0].replace(' ', '_')
|
||||
job_dir = os.path.join(server.config['UPLOAD_FOLDER'], '-'.join(
|
||||
[datetime.now().strftime("%Y.%m.%d_%H.%M.%S"), renderer, cleaned_path_name]))
|
||||
os.makedirs(job_dir, exist_ok=True)
|
||||
upload_dir = os.path.join(job_dir, 'source')
|
||||
os.makedirs(upload_dir, exist_ok=True)
|
||||
|
||||
# move projects to their work directories
|
||||
loaded_project_local_path = None
|
||||
if uploaded_project and uploaded_project.filename:
|
||||
loaded_project_local_path = os.path.join(upload_dir, secure_filename(uploaded_project.filename))
|
||||
uploaded_project.save(loaded_project_local_path)
|
||||
logger.info(f"Transfer complete for {loaded_project_local_path.split(server.config['UPLOAD_FOLDER'])[-1]}")
|
||||
elif project_url:
|
||||
loaded_project_local_path = os.path.join(upload_dir, referred_name)
|
||||
shutil.move(downloaded_file_url, loaded_project_local_path)
|
||||
logger.info(f"Download complete for {loaded_project_local_path.split(server.config['UPLOAD_FOLDER'])[-1]}")
|
||||
elif local_path:
|
||||
loaded_project_local_path = os.path.join(upload_dir, referred_name)
|
||||
shutil.copy(local_path, loaded_project_local_path)
|
||||
logger.info(f"Import complete for {loaded_project_local_path.split(server.config['UPLOAD_FOLDER'])[-1]}")
|
||||
|
||||
# process uploaded zip files
|
||||
zip_path = loaded_project_local_path if loaded_project_local_path.lower().endswith('.zip') else None
|
||||
if zip_path:
|
||||
zip_path = loaded_project_local_path
|
||||
work_path = os.path.dirname(zip_path)
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, 'r') as myzip:
|
||||
myzip.extractall(os.path.dirname(zip_path))
|
||||
|
||||
project_files = [x for x in os.listdir(work_path) if os.path.isfile(os.path.join(work_path, x))]
|
||||
project_files = [x for x in project_files if '.zip' not in x]
|
||||
supported_exts = RenderWorkerFactory.class_for_name(renderer).engine.supported_extensions
|
||||
if supported_exts:
|
||||
project_files = [file for file in project_files if
|
||||
any(file.endswith(ext) for ext in supported_exts)]
|
||||
|
||||
if len(project_files) != 1: # we have to narrow down to 1 main project file, otherwise error
|
||||
return {'error': f'Cannot find valid project file in {os.path.basename(zip_path)}'}, 400
|
||||
|
||||
extracted_project_path = os.path.join(work_path, project_files[0])
|
||||
logger.info(f"Extracted zip file to {extracted_project_path}")
|
||||
loaded_project_local_path = extracted_project_path
|
||||
except (zipfile.BadZipFile, zipfile.LargeZipFile) as e:
|
||||
err_msg = f"Error processing zip file: {e}"
|
||||
logger.error(err_msg)
|
||||
return err_msg, 500
|
||||
|
||||
# create and add jobs to render queue
|
||||
results = []
|
||||
for job_data in jobs_list:
|
||||
try:
|
||||
# prepare output paths
|
||||
output_dir = os.path.join(job_dir, job_data.get('name') if len(jobs_list) > 1 else 'output')
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# get new output path in output_dir
|
||||
job_data['output_path'] = os.path.join(output_dir, os.path.basename(
|
||||
job_data.get('output_path', None) or loaded_project_local_path
|
||||
))
|
||||
|
||||
# create & configure jobs
|
||||
worker = RenderWorkerFactory.create_worker(renderer=job_data['renderer'],
|
||||
input_path=loaded_project_local_path,
|
||||
output_path=job_data["output_path"],
|
||||
args=job_data.get('args', {}))
|
||||
worker.status = job_data.get("initial_status", worker.status)
|
||||
worker.parent = job_data.get("parent", worker.parent)
|
||||
worker.name = job_data.get("name", worker.name)
|
||||
worker.priority = int(job_data.get('priority', worker.priority))
|
||||
worker.start_frame = int(job_data.get("start_frame", worker.start_frame))
|
||||
worker.end_frame = int(job_data.get("end_frame", worker.end_frame))
|
||||
|
||||
# determine if we can / should split the job
|
||||
if server.config.get('enable_split_jobs', False) and (worker.total_frames > 1) and not worker.parent:
|
||||
DistributedJobManager.split_into_subjobs(worker, job_data, zip_path or loaded_project_local_path)
|
||||
|
||||
RenderQueue.add_to_render_queue(worker, force_start=job_data.get('force_start', False))
|
||||
if not worker.parent:
|
||||
make_job_ready(worker.id)
|
||||
results.append(worker.json())
|
||||
except Exception as e:
|
||||
err_msg = f"Exception creating render job: {e}"
|
||||
logger.exception(err_msg)
|
||||
results.append({'error': err_msg})
|
||||
|
||||
# return any errors from results list
|
||||
for response in results:
|
||||
if response.get('error', None):
|
||||
return results, 400
|
||||
|
||||
# redirect to index if requested
|
||||
if request.args.get('redirect', False):
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
return results, 200
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Unknown error adding job: {e}")
|
||||
return 'unknown error', 500
|
||||
|
||||
|
||||
@server.get('/api/job/<job_id>/cancel')
|
||||
def cancel_job(job_id):
|
||||
if not request.args.get('confirm', False):
|
||||
return 'Confirmation required to cancel job', 400
|
||||
|
||||
if RenderQueue.cancel_job(RenderQueue.job_with_id(job_id)):
|
||||
if request.args.get('redirect', False):
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
return "Job cancelled"
|
||||
else:
|
||||
return "Unknown error", 500
|
||||
|
||||
|
||||
@server.route('/api/job/<job_id>/delete', methods=['POST', 'GET'])
|
||||
def delete_job(job_id):
|
||||
try:
|
||||
if not request.args.get('confirm', False):
|
||||
return 'Confirmation required to delete job', 400
|
||||
|
||||
# Check if we can remove the 'output' directory
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
output_dir = os.path.dirname(found_job.output_path)
|
||||
if server.config['UPLOAD_FOLDER'] in output_dir and os.path.exists(output_dir):
|
||||
shutil.rmtree(output_dir)
|
||||
|
||||
# Remove any thumbnails
|
||||
for filename in os.listdir(server.config['THUMBS_FOLDER']):
|
||||
if job_id in filename:
|
||||
os.remove(os.path.join(server.config['THUMBS_FOLDER'], filename))
|
||||
|
||||
thumb_path = os.path.join(server.config['THUMBS_FOLDER'], found_job.id + '.mp4')
|
||||
if os.path.exists(thumb_path):
|
||||
os.remove(thumb_path)
|
||||
|
||||
# See if we own the project_dir (i.e. was it uploaded)
|
||||
project_dir = os.path.dirname(os.path.dirname(found_job.input_path))
|
||||
if server.config['UPLOAD_FOLDER'] in project_dir and os.path.exists(project_dir):
|
||||
# check to see if any other projects are sharing the same project file
|
||||
project_dir_files = [f for f in os.listdir(project_dir) if not f.startswith('.')]
|
||||
if len(project_dir_files) == 0 or (len(project_dir_files) == 1 and 'source' in project_dir_files[0]):
|
||||
logger.info(f"Removing project directory: {project_dir}")
|
||||
shutil.rmtree(project_dir)
|
||||
|
||||
RenderQueue.delete_job(found_job)
|
||||
if request.args.get('redirect', False):
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
return "Job deleted", 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting job: {e}")
|
||||
return f"Error deleting job: {e}", 500
|
||||
|
||||
|
||||
@server.get('/api/clear_history')
|
||||
def clear_history():
|
||||
RenderQueue.clear_history()
|
||||
return 'success'
|
||||
|
||||
|
||||
@server.route('/api/status')
|
||||
def status():
|
||||
renderer_data = {}
|
||||
for render_class in RenderWorkerFactory.supported_classes():
|
||||
if render_class.engine.renderer_path(): # only return renderers installed on host
|
||||
renderer_data[render_class.engine.name()] = \
|
||||
{'version': render_class.engine.version(),
|
||||
'is_available': RenderQueue.is_available_for_job(render_class.engine.name())
|
||||
}
|
||||
|
||||
return {"timestamp": datetime.now().isoformat(),
|
||||
"platform": platform.platform(),
|
||||
"cpu_percent": psutil.cpu_percent(percpu=False),
|
||||
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
|
||||
"cpu_count": psutil.cpu_count(logical=False),
|
||||
"memory_total": psutil.virtual_memory().total,
|
||||
"memory_available": psutil.virtual_memory().available,
|
||||
"memory_percent": psutil.virtual_memory().percent,
|
||||
"job_counts": RenderQueue.job_counts(),
|
||||
"renderers": renderer_data,
|
||||
"hostname": server.config['HOSTNAME'],
|
||||
"port": server.config['PORT']
|
||||
}
|
||||
|
||||
|
||||
@server.get('/api/renderer_info')
|
||||
def renderer_info():
|
||||
renderer_data = {}
|
||||
for r in RenderWorkerFactory.supported_renderers():
|
||||
engine = RenderWorkerFactory.class_for_name(r).engine
|
||||
if engine.renderer_path():
|
||||
renderer_data[r] = {'is_available': RenderQueue.is_available_for_job(engine.name()),
|
||||
'version': engine.version(),
|
||||
'supported_extensions': engine.supported_extensions,
|
||||
'supported_export_formats': engine.get_output_formats(),
|
||||
'path': engine.renderer_path()}
|
||||
return renderer_data
|
||||
|
||||
|
||||
@server.route('/upload')
|
||||
def upload_file_page():
|
||||
return render_template('upload.html', supported_renderers=RenderWorkerFactory.supported_renderers())
|
||||
|
||||
|
||||
def start_server(background_thread=False):
|
||||
def eval_loop(delay_sec=1):
|
||||
while True:
|
||||
RenderQueue.evaluate_queue()
|
||||
time.sleep(delay_sec)
|
||||
|
||||
with open('config/config.yaml') as f:
|
||||
config = yaml.load(f, Loader=yaml.FullLoader)
|
||||
|
||||
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
|
||||
level=config.get('server_log_level', 'INFO').upper())
|
||||
|
||||
# get hostname
|
||||
local_hostname = socket.gethostname()
|
||||
local_hostname = local_hostname + (".local" if not local_hostname.endswith(".local") else "")
|
||||
|
||||
# load flask settings
|
||||
server.config['HOSTNAME'] = local_hostname
|
||||
server.config['PORT'] = int(config.get('port_number', 8080))
|
||||
server.config['UPLOAD_FOLDER'] = os.path.expanduser(config['upload_folder'])
|
||||
server.config['THUMBS_FOLDER'] = os.path.join(os.path.expanduser(config['upload_folder']), 'thumbs')
|
||||
server.config['MAX_CONTENT_PATH'] = config['max_content_path']
|
||||
server.config['enable_split_jobs'] = config.get('enable_split_jobs', False)
|
||||
|
||||
# disable most Flask logging
|
||||
flask_log = logging.getLogger('werkzeug')
|
||||
flask_log.setLevel(config.get('flask_log_level', 'ERROR').upper())
|
||||
|
||||
# Set up the RenderQueue object
|
||||
RenderQueue.start_queue()
|
||||
DistributedJobManager.start()
|
||||
|
||||
thread = threading.Thread(target=eval_loop, kwargs={'delay_sec': config.get('queue_eval_seconds', 1)}, daemon=True)
|
||||
thread.start()
|
||||
|
||||
logging.info(f"Starting Zordon Render Server - Hostname: '{server.config['HOSTNAME']}:'")
|
||||
ZeroconfServer.configure("_zordon._tcp.local.", server.config['HOSTNAME'], server.config['PORT'])
|
||||
ZeroconfServer.start()
|
||||
|
||||
try:
|
||||
if background_thread:
|
||||
server_thread = threading.Thread(
|
||||
target=lambda: server.run(host='0.0.0.0', port=server.config['PORT'], debug=False, use_reloader=False))
|
||||
server_thread.start()
|
||||
server_thread.join()
|
||||
else:
|
||||
server.run(host='0.0.0.0', port=server.config['PORT'], debug=config.get('flask_debug_enable', False),
|
||||
use_reloader=False, threaded=True)
|
||||
finally:
|
||||
RenderQueue.save_state()
|
||||
ZeroconfServer.stop()
|
||||
0
src/client/__init__.py
Normal file
399
src/client/dashboard_window.py
Normal file
@@ -0,0 +1,399 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import tkinter as tk
|
||||
from tkinter import ttk, messagebox, simpledialog
|
||||
|
||||
from PIL import Image, ImageTk
|
||||
|
||||
from src.client.new_job_window import NewJobWindow
|
||||
# from src.client.server_details import create_server_popup
|
||||
from src.server_proxy import RenderServerProxy
|
||||
from src.utilities.misc_helper import launch_url, file_exists_in_mounts, get_time_elapsed
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
from src.workers.base_worker import RenderStatus
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def sort_column(tree, col, reverse=False):
|
||||
data = [(tree.set(child, col), child) for child in tree.get_children('')]
|
||||
data.sort(reverse=reverse)
|
||||
for index, (_, child) in enumerate(data):
|
||||
tree.move(child, '', index)
|
||||
|
||||
|
||||
def make_sortable(tree):
|
||||
for col in tree["columns"]:
|
||||
tree.heading(col, text=col, command=lambda c=col: sort_column(tree, c))
|
||||
|
||||
|
||||
class DashboardWindow:
|
||||
|
||||
lib_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
image_path = os.path.join(lib_path, 'web', 'static', 'images')
|
||||
default_image = Image.open(os.path.join(image_path, 'desktop.png'))
|
||||
|
||||
def __init__(self):
|
||||
|
||||
# Create a Treeview widget
|
||||
self.root = tk.Tk()
|
||||
self.root.title("Zordon Dashboard")
|
||||
self.current_hostname = None
|
||||
self.server_proxies = {}
|
||||
self.added_hostnames = []
|
||||
|
||||
# Setup zeroconf
|
||||
ZeroconfServer.configure("_zordon._tcp.local.", socket.gethostname(), 8080)
|
||||
ZeroconfServer.start(listen_only=True)
|
||||
|
||||
# Setup photo preview
|
||||
photo_pad = tk.Frame(self.root, background="gray")
|
||||
photo_pad.pack(fill=tk.BOTH, pady=5, padx=5)
|
||||
self.photo_label = tk.Label(photo_pad, height=500)
|
||||
self.photo_label.pack(fill=tk.BOTH, expand=True)
|
||||
self.set_image(self.default_image)
|
||||
|
||||
server_frame = tk.LabelFrame(self.root, text="Server")
|
||||
server_frame.pack(fill=tk.BOTH, pady=5, padx=5, expand=True)
|
||||
|
||||
# Create server tree
|
||||
left_frame = tk.Frame(server_frame)
|
||||
left_frame.pack(side=tk.LEFT, expand=True, fill=tk.BOTH)
|
||||
self.server_tree = ttk.Treeview(left_frame, show="headings")
|
||||
self.server_tree.pack(expand=True, fill=tk.BOTH)
|
||||
self.server_tree["columns"] = ("Server", "Status")
|
||||
self.server_tree.bind("<<TreeviewSelect>>", self.server_picked)
|
||||
self.server_tree.column("Server", width=200)
|
||||
self.server_tree.column("Status", width=80)
|
||||
|
||||
left_button_frame = tk.Frame(left_frame)
|
||||
left_button_frame.pack(side=tk.BOTTOM, fill=tk.X, padx=5, pady=5, expand=False)
|
||||
|
||||
# Create buttons
|
||||
self.remove_server_button = tk.Button(left_button_frame, text="-", command=self.remove_server_button)
|
||||
self.remove_server_button.pack(side=tk.RIGHT)
|
||||
self.remove_server_button.config(state='disabled')
|
||||
add_server_button = tk.Button(left_button_frame, text="+", command=self.add_server_button)
|
||||
add_server_button.pack(side=tk.RIGHT)
|
||||
|
||||
# Create separator
|
||||
separator = ttk.Separator(server_frame, orient=tk.VERTICAL)
|
||||
separator.pack(side=tk.LEFT, padx=5, pady=5, fill=tk.Y)
|
||||
|
||||
# Setup the Tree
|
||||
self.job_tree = ttk.Treeview(server_frame, show="headings")
|
||||
self.job_tree.tag_configure(RenderStatus.RUNNING.value, background='lawn green', font=('', 0, 'bold'))
|
||||
self.job_tree.bind("<<TreeviewSelect>>", self.job_picked)
|
||||
self.job_tree["columns"] = ("id", "Name", "Renderer", "Priority", "Status", "Time Elapsed", "Frames",
|
||||
"Date Added", "Parent", "")
|
||||
|
||||
# Format the columns
|
||||
self.job_tree.column("id", width=0, stretch=False)
|
||||
self.job_tree.column("Name", width=300)
|
||||
self.job_tree.column("Renderer", width=100, stretch=False)
|
||||
self.job_tree.column("Priority", width=50, stretch=False)
|
||||
self.job_tree.column("Status", width=100, stretch=False)
|
||||
self.job_tree.column("Time Elapsed", width=100, stretch=False)
|
||||
self.job_tree.column("Frames", width=50, stretch=False)
|
||||
self.job_tree.column("Date Added", width=150, stretch=True)
|
||||
self.job_tree.column("Parent", width=250, stretch=True)
|
||||
|
||||
# Create the column headings
|
||||
for name in self.job_tree['columns']:
|
||||
self.job_tree.heading(name, text=name)
|
||||
|
||||
# Pack the Treeview widget
|
||||
self.job_tree.pack(fill=tk.BOTH, expand=True)
|
||||
|
||||
button_frame = tk.Frame(server_frame)
|
||||
button_frame.pack(pady=5, fill=tk.X, expand=False)
|
||||
|
||||
# Create buttons
|
||||
self.logs_button = tk.Button(button_frame, text="Logs", command=self.open_logs)
|
||||
self.show_files_button = tk.Button(button_frame, text="Show Files", command=self.show_files)
|
||||
self.stop_button = tk.Button(button_frame, text="Stop", command=self.stop_job)
|
||||
self.delete_button = tk.Button(button_frame, text="Delete", command=self.delete_job)
|
||||
add_job_button = tk.Button(button_frame, text="Add Job", command=self.show_new_job_window)
|
||||
|
||||
# Pack the buttons in the frame
|
||||
self.stop_button.pack(side=tk.LEFT)
|
||||
self.stop_button.config(state='disabled')
|
||||
self.delete_button.pack(side=tk.LEFT)
|
||||
self.delete_button.config(state='disabled')
|
||||
self.show_files_button.pack(side=tk.LEFT)
|
||||
self.show_files_button.config(state='disabled')
|
||||
self.logs_button.pack(side=tk.LEFT)
|
||||
self.logs_button.config(state='disabled')
|
||||
add_job_button.pack(side=tk.RIGHT)
|
||||
|
||||
# Start the Tkinter event loop
|
||||
self.root.geometry("500x600+300+300")
|
||||
self.root.maxsize(width=2000, height=1200)
|
||||
self.root.minsize(width=900, height=800)
|
||||
make_sortable(self.job_tree)
|
||||
make_sortable(self.server_tree)
|
||||
|
||||
# update servers
|
||||
self.update_servers()
|
||||
try:
|
||||
selected_server = self.server_tree.get_children()[0]
|
||||
self.server_tree.selection_set(selected_server)
|
||||
self.server_picked()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# update jobs
|
||||
self.update_jobs()
|
||||
try:
|
||||
selected_job = self.job_tree.get_children()[0]
|
||||
self.job_tree.selection_set(selected_job)
|
||||
self.job_picked()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# start background update
|
||||
x = threading.Thread(target=self.__background_update)
|
||||
x.daemon = True
|
||||
x.start()
|
||||
|
||||
@property
|
||||
def current_server_proxy(self):
|
||||
return self.server_proxies.get(self.current_hostname, None)
|
||||
|
||||
def remove_server_button(self):
|
||||
new_hostname = self.server_tree.selection()[0]
|
||||
if new_hostname in self.added_hostnames:
|
||||
self.added_hostnames.remove(new_hostname)
|
||||
self.update_servers()
|
||||
if self.server_tree.get_children():
|
||||
self.server_tree.selection_set(self.server_tree.get_children()[0])
|
||||
self.server_picked(event=None)
|
||||
|
||||
def add_server_button(self):
|
||||
hostname = simpledialog.askstring("Server Hostname", "Enter the server hostname to add:")
|
||||
if hostname:
|
||||
hostname = hostname.strip()
|
||||
if hostname not in self.added_hostnames:
|
||||
if RenderServerProxy(hostname=hostname).connect():
|
||||
self.added_hostnames.append(hostname)
|
||||
self.update_servers()
|
||||
else:
|
||||
messagebox.showerror("Cannot Connect", f"Cannot connect to server at hostname: '{hostname}'")
|
||||
|
||||
def server_picked(self, event=None):
|
||||
try:
|
||||
new_hostname = self.server_tree.selection()[0]
|
||||
self.remove_server_button.config(state="normal" if new_hostname in self.added_hostnames else "disabled")
|
||||
if self.current_hostname == new_hostname:
|
||||
return
|
||||
self.current_hostname = new_hostname
|
||||
self.update_jobs(clear_table=True)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def selected_job_ids(self):
|
||||
selected_items = self.job_tree.selection() # Get the selected item
|
||||
row_data = [self.job_tree.item(item) for item in selected_items] # Get the text of the selected item
|
||||
job_ids = [row['values'][0] for row in row_data]
|
||||
return job_ids
|
||||
|
||||
def stop_job(self):
|
||||
job_ids = self.selected_job_ids()
|
||||
for job_id in job_ids:
|
||||
self.current_server_proxy.cancel_job(job_id, confirm=True)
|
||||
self.update_jobs(clear_table=True)
|
||||
|
||||
def delete_job(self):
|
||||
job_ids = self.selected_job_ids()
|
||||
if len(job_ids) == 1:
|
||||
job = next((d for d in self.current_server_proxy.get_all_jobs() if d.get('id') == job_ids[0]), None)
|
||||
display_name = job['name'] or os.path.basename(job['input_path'])
|
||||
message = f"Are you sure you want to delete the job:\n{display_name}?"
|
||||
else:
|
||||
message = f"Are you sure you want to delete these {len(job_ids)} jobs?"
|
||||
|
||||
result = messagebox.askyesno("Confirmation", message)
|
||||
if result:
|
||||
for job_id in job_ids:
|
||||
self.current_server_proxy.request_data(f'job/{job_id}/delete?confirm=true')
|
||||
self.update_jobs(clear_table=True)
|
||||
|
||||
def set_image(self, image):
|
||||
thumb_image = ImageTk.PhotoImage(image)
|
||||
if thumb_image:
|
||||
self.photo_label.configure(image=thumb_image)
|
||||
self.photo_label.image = thumb_image
|
||||
|
||||
def job_picked(self, event=None):
|
||||
job_id = self.selected_job_ids()[0] if self.selected_job_ids() else None
|
||||
if job_id:
|
||||
# update thumb
|
||||
def fetch_preview():
|
||||
try:
|
||||
before_fetch_hostname = self.current_server_proxy.hostname
|
||||
response = self.current_server_proxy.request(f'job/{job_id}/thumbnail?size=big')
|
||||
if response.ok:
|
||||
import io
|
||||
image_data = response.content
|
||||
image = Image.open(io.BytesIO(image_data))
|
||||
if self.current_server_proxy.hostname == before_fetch_hostname and job_id == self.selected_job_ids()[0]:
|
||||
self.set_image(image)
|
||||
except ConnectionError as e:
|
||||
logger.error(f"Connection error fetching image: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching image: {e}")
|
||||
|
||||
fetch_thread = threading.Thread(target=fetch_preview)
|
||||
fetch_thread.daemon = True
|
||||
fetch_thread.start()
|
||||
else:
|
||||
self.set_image(self.default_image)
|
||||
|
||||
# update button status
|
||||
current_jobs = self.current_server_proxy.get_all_jobs() or []
|
||||
job = next((d for d in current_jobs if d.get('id') == job_id), None)
|
||||
stop_button_state = 'normal' if job and job['status'] == RenderStatus.RUNNING.value else 'disabled'
|
||||
self.stop_button.config(state=stop_button_state)
|
||||
|
||||
generic_button_state = 'normal' if job else 'disabled'
|
||||
self.show_files_button.config(state=generic_button_state)
|
||||
self.delete_button.config(state=generic_button_state)
|
||||
self.logs_button.config(state=generic_button_state)
|
||||
|
||||
def show_files(self):
|
||||
if not self.selected_job_ids():
|
||||
return
|
||||
|
||||
job = next((d for d in self.current_server_proxy.get_all_jobs() if d.get('id') == self.selected_job_ids()[0]), None)
|
||||
output_path = os.path.dirname(job['output_path']) # check local filesystem
|
||||
if not os.path.exists(output_path):
|
||||
output_path = file_exists_in_mounts(output_path) # check any attached network shares
|
||||
if not output_path:
|
||||
return messagebox.showerror("File Not Found", "The file could not be found. Check your network mounts.")
|
||||
launch_url(output_path)
|
||||
|
||||
def open_logs(self):
|
||||
if self.selected_job_ids():
|
||||
url = f'http://{self.current_server_proxy.hostname}:{self.current_server_proxy.port}/api/job/{self.selected_job_ids()[0]}/logs'
|
||||
launch_url(url)
|
||||
|
||||
def mainloop(self):
|
||||
self.root.mainloop()
|
||||
|
||||
def __background_update(self):
|
||||
while True:
|
||||
self.update_servers()
|
||||
self.update_jobs()
|
||||
time.sleep(1)
|
||||
|
||||
def update_servers(self):
|
||||
|
||||
def update_row(tree, id, new_values, tags=None):
|
||||
for item in tree.get_children():
|
||||
values = tree.item(item, "values")
|
||||
if values[0] == id:
|
||||
if tags:
|
||||
tree.item(item, values=new_values, tags=tags)
|
||||
else:
|
||||
tree.item(item, values=new_values)
|
||||
break
|
||||
|
||||
current_servers = list(set(ZeroconfServer.found_clients() + self.added_hostnames))
|
||||
for hostname in current_servers:
|
||||
if not self.server_proxies.get(hostname, None):
|
||||
new_proxy = RenderServerProxy(hostname=hostname)
|
||||
new_proxy.start_background_update()
|
||||
self.server_proxies[hostname] = new_proxy
|
||||
|
||||
try:
|
||||
for hostname, proxy in self.server_proxies.items():
|
||||
if hostname not in self.server_tree.get_children():
|
||||
self.server_tree.insert("", tk.END, iid=hostname, values=(hostname, proxy.status(), ))
|
||||
else:
|
||||
update_row(self.server_tree, hostname, new_values=(hostname, proxy.status()))
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
# remove any servers that don't belong
|
||||
for row in self.server_tree.get_children():
|
||||
if row not in current_servers:
|
||||
self.server_tree.delete(row)
|
||||
proxy = self.server_proxies.get(row, None)
|
||||
if proxy:
|
||||
proxy.stop_background_update()
|
||||
self.server_proxies.pop(row)
|
||||
|
||||
def update_jobs(self, clear_table=False):
|
||||
|
||||
if not self.current_server_proxy:
|
||||
return
|
||||
|
||||
def update_row(tree, id, new_values, tags=None):
|
||||
for item in tree.get_children():
|
||||
values = tree.item(item, "values")
|
||||
if values[0] == id:
|
||||
tree.item(item, values=new_values, tags=tags)
|
||||
break
|
||||
|
||||
if clear_table:
|
||||
self.job_tree.delete(*self.job_tree.get_children())
|
||||
|
||||
job_fetch = self.current_server_proxy.get_all_jobs(ignore_token=clear_table)
|
||||
if job_fetch:
|
||||
for job in job_fetch:
|
||||
display_status = job['status'] if job['status'] != RenderStatus.RUNNING.value else \
|
||||
('%.0f%%' % (job['percent_complete'] * 100)) # if running, show percent, otherwise just show status
|
||||
tags = (job['status'],)
|
||||
start_time = datetime.datetime.fromisoformat(job['start_time']) if job['start_time'] else None
|
||||
end_time = datetime.datetime.fromisoformat(job['end_time']) if job['end_time'] else None
|
||||
|
||||
time_elapsed = "" if (job['status'] != RenderStatus.RUNNING.value and not end_time) else \
|
||||
get_time_elapsed(start_time, end_time)
|
||||
|
||||
values = (job['id'],
|
||||
job['name'] or os.path.basename(job['input_path']),
|
||||
job['renderer'] + "-" + job['renderer_version'],
|
||||
job['priority'],
|
||||
display_status,
|
||||
time_elapsed,
|
||||
job['total_frames'],
|
||||
job['date_created'],
|
||||
job['parent'])
|
||||
try:
|
||||
if self.job_tree.exists(job['id']):
|
||||
update_row(self.job_tree, job['id'], new_values=values, tags=tags)
|
||||
else:
|
||||
self.job_tree.insert("", tk.END, iid=job['id'], values=values, tags=tags)
|
||||
except tk.TclError:
|
||||
pass
|
||||
|
||||
# remove any jobs that don't belong
|
||||
all_job_ids = [job['id'] for job in job_fetch]
|
||||
for row in self.job_tree.get_children():
|
||||
if row not in all_job_ids:
|
||||
self.job_tree.delete(row)
|
||||
|
||||
def show_new_job_window(self):
|
||||
new_window = tk.Toplevel(self.root)
|
||||
new_window.title("New Window")
|
||||
new_window.geometry("500x600+300+300")
|
||||
new_window.resizable(False, height=True)
|
||||
x = NewJobWindow(parent=new_window, clients=list(self.server_tree.get_children()))
|
||||
x.pack()
|
||||
|
||||
|
||||
def start_client():
|
||||
|
||||
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(module)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S',
|
||||
level='INFO'.upper())
|
||||
|
||||
x = DashboardWindow()
|
||||
x.mainloop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_client()
|
||||
452
src/client/new_job_window.py
Executable file
@@ -0,0 +1,452 @@
|
||||
#!/usr/bin/env python3
|
||||
import copy
|
||||
import logging
|
||||
import os.path
|
||||
import pathlib
|
||||
import socket
|
||||
import threading
|
||||
from tkinter import *
|
||||
from tkinter import filedialog, messagebox
|
||||
from tkinter.ttk import Frame, Label, Entry, Combobox, Progressbar
|
||||
|
||||
import psutil
|
||||
|
||||
from src.server_proxy import RenderServerProxy
|
||||
from src.workers.blender_worker import Blender
|
||||
from src.workers.ffmpeg_worker import FFMPEG
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
label_width = 9
|
||||
header_padding = 6
|
||||
|
||||
|
||||
# CheckListBox source - https://stackoverflow.com/questions/50398649/python-tkinter-tk-support-checklist-box
|
||||
class ChecklistBox(Frame):
|
||||
def __init__(self, parent, choices, **kwargs):
|
||||
super().__init__(parent, **kwargs)
|
||||
|
||||
self.vars = []
|
||||
for choice in choices:
|
||||
var = StringVar(value="")
|
||||
self.vars.append(var)
|
||||
cb = Checkbutton(self, text=choice, onvalue=choice, offvalue="", anchor="w", width=20,
|
||||
relief="flat", highlightthickness=0, variable=var)
|
||||
cb.pack(side="top", fill="x", anchor="w")
|
||||
|
||||
def getCheckedItems(self):
|
||||
values = []
|
||||
for var in self.vars:
|
||||
value = var.get()
|
||||
if value:
|
||||
values.append(value)
|
||||
return values
|
||||
|
||||
def resetCheckedItems(self):
|
||||
values = []
|
||||
for var in self.vars:
|
||||
var.set(value='')
|
||||
return values
|
||||
|
||||
|
||||
class NewJobWindow(Frame):
|
||||
|
||||
def __init__(self, parent=None, clients=None):
|
||||
super().__init__(parent)
|
||||
|
||||
self.root = parent
|
||||
self.clients = clients or []
|
||||
self.server_proxy = RenderServerProxy(hostname=clients[0] if clients else None)
|
||||
self.chosen_file = None
|
||||
self.project_info = {}
|
||||
self.presets = {}
|
||||
self.renderer_info = {}
|
||||
self.priority = IntVar(value=2)
|
||||
|
||||
self.master.title("New Job")
|
||||
self.pack(fill=BOTH, expand=True)
|
||||
|
||||
# project frame
|
||||
job_frame = LabelFrame(self, text="Job Settings")
|
||||
job_frame.pack(fill=X, padx=5, pady=5)
|
||||
|
||||
# project frame
|
||||
project_frame = Frame(job_frame)
|
||||
project_frame.pack(fill=X)
|
||||
|
||||
project_label = Label(project_frame, text="Project", width=label_width)
|
||||
project_label.pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
self.project_button = Button(project_frame, text="no file selected", width=6, command=self.choose_file_button)
|
||||
self.project_button.pack(fill=X, padx=5, expand=True)
|
||||
|
||||
# client frame
|
||||
client_frame = Frame(job_frame)
|
||||
client_frame.pack(fill=X)
|
||||
|
||||
Label(client_frame, text="Client", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
self.client_combo = Combobox(client_frame, state="readonly")
|
||||
self.client_combo.pack(fill=X, padx=5, expand=True)
|
||||
self.client_combo.bind('<<ComboboxSelected>>', self.client_picked)
|
||||
self.client_combo['values'] = self.clients
|
||||
if self.clients:
|
||||
self.client_combo.current(0)
|
||||
|
||||
# renderer frame
|
||||
renderer_frame = Frame(job_frame)
|
||||
renderer_frame.pack(fill=X)
|
||||
|
||||
Label(renderer_frame, text="Renderer", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
self.renderer_combo = Combobox(renderer_frame, state="readonly")
|
||||
self.renderer_combo.pack(fill=X, padx=5, expand=True)
|
||||
self.renderer_combo.bind('<<ComboboxSelected>>', self.refresh_renderer_settings)
|
||||
|
||||
# priority frame
|
||||
priority_frame = Frame(job_frame)
|
||||
priority_frame.pack(fill=X)
|
||||
|
||||
Label(priority_frame, text="Priority", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
Radiobutton(priority_frame, text="1", value=1, variable=self.priority).pack(anchor=W, side=LEFT, padx=5)
|
||||
Radiobutton(priority_frame, text="2", value=2, variable=self.priority).pack(anchor=W, side=LEFT, padx=5)
|
||||
Radiobutton(priority_frame, text="3", value=3, variable=self.priority).pack(anchor=W, side=LEFT, padx=5)
|
||||
|
||||
# presets
|
||||
presets_frame = Frame(job_frame)
|
||||
presets_frame.pack(fill=X)
|
||||
|
||||
Label(presets_frame, text="Presets", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
self.presets_combo = Combobox(presets_frame, state="readonly")
|
||||
self.presets_combo.pack(side=LEFT, padx=5, pady=5, expand=True, fill=X)
|
||||
self.presets_combo.bind('<<ComboboxSelected>>', self.chose_preset)
|
||||
|
||||
# output frame
|
||||
output_frame = Frame(job_frame)
|
||||
output_frame.pack(fill=X)
|
||||
|
||||
Label(output_frame, text="Output", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
self.output_entry = Entry(output_frame)
|
||||
self.output_entry.pack(side=LEFT, padx=5, expand=True, fill=X)
|
||||
|
||||
self.output_format = Combobox(output_frame, state="readonly", values=['JPG', 'MOV', 'PNG'], width=9)
|
||||
self.output_format.pack(side=LEFT, padx=5, pady=5)
|
||||
self.output_format['state'] = DISABLED
|
||||
|
||||
# frame_range frame
|
||||
frame_range_frame = Frame(job_frame)
|
||||
frame_range_frame.pack(fill=X)
|
||||
|
||||
Label(frame_range_frame, text="Frames", width=label_width).pack(side=LEFT, padx=5, pady=5, expand=False)
|
||||
|
||||
self.start_frame_spinbox = Spinbox(frame_range_frame, from_=0, to=50000, width=5)
|
||||
self.start_frame_spinbox.pack(side=LEFT, expand=False, padx=5, pady=5)
|
||||
|
||||
Label(frame_range_frame, text="to").pack(side=LEFT, pady=5, expand=False)
|
||||
self.end_frame_spinbox = Spinbox(frame_range_frame, from_=0, to=50000, width=5)
|
||||
self.end_frame_spinbox.pack(side=LEFT, expand=False, padx=5, pady=5)
|
||||
|
||||
# Blender
|
||||
self.blender_frame = None
|
||||
self.blender_cameras_frame = None
|
||||
self.blender_engine = StringVar(value='CYCLES')
|
||||
self.blender_pack_textures = BooleanVar(value=False)
|
||||
self.blender_multiple_cameras = BooleanVar(value=False)
|
||||
self.blender_cameras_list = None
|
||||
|
||||
# Custom Args / Submit Button
|
||||
self.custom_args_frame = None
|
||||
self.custom_args_entry = None
|
||||
self.submit_frame = None
|
||||
|
||||
self.progress_frame = None
|
||||
self.progress_label = None
|
||||
self.progress_bar = None
|
||||
self.upload_status = None
|
||||
|
||||
self.fetch_server_data()
|
||||
|
||||
def client_picked(self, event=None):
|
||||
self.server_proxy.hostname = self.client_combo.get()
|
||||
self.fetch_server_data()
|
||||
|
||||
def fetch_server_data(self):
|
||||
self.renderer_info = self.server_proxy.request_data('renderer_info', timeout=3) or {}
|
||||
self.presets = self.server_proxy.request_data('presets', timeout=3) or {}
|
||||
|
||||
# update available renders
|
||||
self.renderer_combo['values'] = list(self.renderer_info.keys())
|
||||
if self.renderer_info.keys():
|
||||
self.renderer_combo.current(0)
|
||||
|
||||
self.refresh_renderer_settings()
|
||||
|
||||
def choose_file_button(self):
|
||||
self.chosen_file = filedialog.askopenfilename()
|
||||
button_text = os.path.basename(self.chosen_file) if self.chosen_file else "no file selected"
|
||||
self.project_button.configure(text=button_text)
|
||||
|
||||
# Update the output label
|
||||
self.output_entry.delete(0, END)
|
||||
if self.chosen_file:
|
||||
# Generate a default output name
|
||||
output_name = os.path.splitext(os.path.basename(self.chosen_file))[-1].strip('.')
|
||||
self.output_entry.insert(0, os.path.basename(output_name))
|
||||
|
||||
# Try to determine file type
|
||||
extension = os.path.splitext(self.chosen_file)[-1].strip('.') # not the best way to do this
|
||||
for renderer, renderer_info in self.renderer_info.items():
|
||||
supported = [x.lower().strip('.') for x in renderer_info.get('supported_extensions', [])]
|
||||
if extension.lower().strip('.') in supported:
|
||||
if renderer in self.renderer_combo['values']:
|
||||
self.renderer_combo.set(renderer)
|
||||
|
||||
self.refresh_renderer_settings()
|
||||
|
||||
def chose_preset(self, event=None):
|
||||
preset_name = self.presets_combo.get()
|
||||
renderer = self.renderer_combo.get()
|
||||
|
||||
presets_to_show = {key: value for key, value in self.presets.items() if value.get("renderer") == renderer}
|
||||
matching_dict = next((value for value in presets_to_show.values() if value.get("name") == preset_name), None)
|
||||
if matching_dict:
|
||||
self.custom_args_entry.delete(0, END)
|
||||
self.custom_args_entry.insert(0, matching_dict['args'])
|
||||
|
||||
def refresh_renderer_settings(self, event=None):
|
||||
renderer = self.renderer_combo.get()
|
||||
|
||||
# clear old settings
|
||||
if self.blender_frame:
|
||||
self.blender_frame.pack_forget()
|
||||
|
||||
if not self.chosen_file:
|
||||
return
|
||||
|
||||
if renderer == 'blender':
|
||||
self.project_info = Blender.get_scene_info(self.chosen_file)
|
||||
self.draw_blender_settings()
|
||||
elif renderer == 'ffmpeg':
|
||||
f = FFMPEG.get_frame_count(self.chosen_file)
|
||||
self.project_info['frame_end'] = f
|
||||
|
||||
# set frame start / end numbers fetched from fils
|
||||
if self.project_info.get('frame_start'):
|
||||
self.start_frame_spinbox.delete(0, 'end')
|
||||
self.start_frame_spinbox.insert(0, self.project_info['frame_start'])
|
||||
if self.project_info.get('frame_end'):
|
||||
self.end_frame_spinbox.delete(0, 'end')
|
||||
self.end_frame_spinbox.insert(0, self.project_info['frame_end'])
|
||||
|
||||
# redraw lower ui
|
||||
self.draw_custom_args()
|
||||
self.draw_submit_button()
|
||||
|
||||
# check supported export formats
|
||||
if self.renderer_info.get(renderer, {}).get('supported_export_formats', None):
|
||||
formats = self.renderer_info[renderer]['supported_export_formats']
|
||||
if formats and isinstance(formats[0], dict):
|
||||
formats = [x.get('name', str(x)) for x in formats]
|
||||
formats.sort()
|
||||
self.output_format['values'] = formats
|
||||
self.output_format['state'] = NORMAL
|
||||
self.output_format.current(0)
|
||||
else:
|
||||
self.output_format['values'] = []
|
||||
self.output_format['state'] = DISABLED
|
||||
|
||||
# update presets
|
||||
presets_to_show = {key: value for key, value in self.presets.items() if value.get("renderer") == renderer}
|
||||
self.presets_combo['values'] = [value['name'] for value in presets_to_show.values()]
|
||||
|
||||
def draw_custom_args(self):
|
||||
if hasattr(self, 'custom_args_frame') and self.custom_args_frame:
|
||||
self.custom_args_frame.forget()
|
||||
self.custom_args_frame = LabelFrame(self, text="Advanced")
|
||||
self.custom_args_frame.pack(side=TOP, fill=X, expand=False, padx=5, pady=5)
|
||||
Label(self.custom_args_frame, text="Custom Args", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
self.custom_args_entry = Entry(self.custom_args_frame)
|
||||
self.custom_args_entry.pack(side=TOP, padx=5, expand=True, fill=X)
|
||||
|
||||
def draw_submit_button(self):
|
||||
if hasattr(self, 'submit_frame') and self.submit_frame:
|
||||
self.submit_frame.forget()
|
||||
self.submit_frame = Frame(self)
|
||||
self.submit_frame.pack(fill=BOTH, expand=True)
|
||||
# Label(self.submit_frame, text="").pack(fill=BOTH, expand=True)
|
||||
submit_button = Button(self.submit_frame, text="Submit", command=self.submit_job)
|
||||
submit_button.pack(fill=Y, anchor="s", pady=header_padding)
|
||||
|
||||
def draw_progress_frame(self):
|
||||
if hasattr(self, 'progress_frame') and self.progress_frame:
|
||||
self.progress_frame.forget()
|
||||
self.progress_frame = LabelFrame(self, text="Job Submission")
|
||||
self.progress_frame.pack(side=TOP, fill=X, expand=False, padx=5, pady=5)
|
||||
self.progress_bar = Progressbar(self.progress_frame, length=300, mode="determinate")
|
||||
self.progress_bar.pack()
|
||||
self.progress_label = Label(self.progress_frame, text="Starting Up")
|
||||
self.progress_label.pack(pady=5, padx=5)
|
||||
|
||||
def draw_blender_settings(self):
|
||||
|
||||
# blender settings
|
||||
self.blender_frame = LabelFrame(self, text="Blender Settings")
|
||||
self.blender_frame.pack(fill=X, padx=5)
|
||||
|
||||
blender_engine_frame = Frame(self.blender_frame)
|
||||
blender_engine_frame.pack(fill=X)
|
||||
|
||||
Label(blender_engine_frame, text="Engine", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
Radiobutton(blender_engine_frame, text="Cycles", value="CYCLES", variable=self.blender_engine).pack(
|
||||
anchor=W, side=LEFT, padx=5)
|
||||
Radiobutton(blender_engine_frame, text="Eevee", value="BLENDER_EEVEE", variable=self.blender_engine).pack(
|
||||
anchor=W, side=LEFT, padx=5)
|
||||
|
||||
# options
|
||||
pack_frame = Frame(self.blender_frame)
|
||||
pack_frame.pack(fill=X)
|
||||
|
||||
Label(pack_frame, text="Options", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
Checkbutton(pack_frame, text="Pack Textures", variable=self.blender_pack_textures, onvalue=True, offvalue=False
|
||||
).pack(anchor=W, side=LEFT, padx=5)
|
||||
|
||||
# multi cams
|
||||
def draw_scene_cams(event=None):
|
||||
if self.project_info:
|
||||
show_cams_checkbutton['state'] = NORMAL
|
||||
if self.blender_multiple_cameras.get():
|
||||
self.blender_cameras_frame = Frame(self.blender_frame)
|
||||
self.blender_cameras_frame.pack(fill=X)
|
||||
|
||||
Label(self.blender_cameras_frame, text="Cameras", width=label_width).pack(side=LEFT, padx=5, pady=5)
|
||||
|
||||
choices = [f"{x['name']} - {int(float(x['lens']))}mm" for x in self.project_info['cameras']]
|
||||
choices.sort()
|
||||
self.blender_cameras_list = ChecklistBox(self.blender_cameras_frame, choices, relief="sunken")
|
||||
self.blender_cameras_list.pack(padx=5, fill=X)
|
||||
elif self.blender_cameras_frame:
|
||||
self.blender_cameras_frame.pack_forget()
|
||||
else:
|
||||
show_cams_checkbutton['state'] = DISABLED
|
||||
if self.blender_cameras_frame:
|
||||
self.blender_cameras_frame.pack_forget()
|
||||
|
||||
# multiple cameras checkbox
|
||||
camera_count = len(self.project_info.get('cameras', [])) if self.project_info else 0
|
||||
show_cams_checkbutton = Checkbutton(pack_frame, text=f'Multiple Cameras ({camera_count})', offvalue=False,
|
||||
onvalue=True,
|
||||
variable=self.blender_multiple_cameras, command=draw_scene_cams)
|
||||
show_cams_checkbutton.pack(side=LEFT, padx=5)
|
||||
show_cams_checkbutton['state'] = NORMAL if camera_count > 1 else DISABLED
|
||||
|
||||
def submit_job(self):
|
||||
|
||||
def submit_job_worker():
|
||||
|
||||
self.draw_progress_frame()
|
||||
self.progress_bar['value'] = 0
|
||||
self.progress_bar.configure(mode='determinate')
|
||||
self.progress_bar.start()
|
||||
self.progress_label.configure(text="Preparing files...")
|
||||
|
||||
# start the progress UI
|
||||
client = self.client_combo.get()
|
||||
|
||||
renderer = self.renderer_combo.get()
|
||||
job_json = {'owner': psutil.Process().username() + '@' + socket.gethostname(),
|
||||
'renderer': renderer,
|
||||
'client': client,
|
||||
'output_path': os.path.join(os.path.dirname(self.chosen_file), self.output_entry.get()),
|
||||
'args': {'raw': self.custom_args_entry.get()},
|
||||
'start_frame': self.start_frame_spinbox.get(),
|
||||
'end_frame': self.end_frame_spinbox.get(),
|
||||
'name': None}
|
||||
job_list = []
|
||||
|
||||
input_path = self.chosen_file
|
||||
|
||||
temp_files = []
|
||||
if renderer == 'blender':
|
||||
if self.blender_pack_textures.get():
|
||||
self.progress_label.configure(text="Packing Blender file...")
|
||||
new_path = Blender.pack_project_file(project_path=input_path, timeout=300)
|
||||
if new_path:
|
||||
logger.info(f"New Path is now {new_path}")
|
||||
input_path = new_path
|
||||
temp_files.append(new_path)
|
||||
else:
|
||||
err_msg = f'Failed to pack Blender file: {input_path}'
|
||||
messagebox.showinfo("Error", err_msg)
|
||||
return
|
||||
# add all Blender args
|
||||
job_json['args']['engine'] = self.blender_engine.get()
|
||||
job_json['args']['export_format'] = self.output_format.get()
|
||||
|
||||
# multiple camera rendering
|
||||
if self.blender_cameras_list and self.blender_multiple_cameras.get():
|
||||
selected_cameras = self.blender_cameras_list.getCheckedItems()
|
||||
for cam in selected_cameras:
|
||||
job_copy = copy.deepcopy(job_json)
|
||||
job_copy['args']['camera'] = cam.rsplit('-', 1)[0].strip()
|
||||
job_copy['name'] = pathlib.Path(input_path).stem.replace(' ', '_') + "-" + cam.replace(' ', '')
|
||||
job_list.append(job_copy)
|
||||
|
||||
# Submit to server
|
||||
job_list = job_list or [job_json]
|
||||
self.progress_label.configure(text="Posting to server...")
|
||||
self.progress_bar.stop()
|
||||
self.progress_bar.configure(mode='determinate')
|
||||
self.progress_bar.start()
|
||||
|
||||
def create_callback(encoder):
|
||||
encoder_len = encoder.len
|
||||
|
||||
def callback(monitor):
|
||||
percent = f"{monitor.bytes_read / encoder_len * 100:.0f}"
|
||||
self.progress_label.configure(text=f"Transferring to {client} - {percent}%")
|
||||
self.progress_bar['value'] = int(percent)
|
||||
return callback
|
||||
|
||||
result = self.server_proxy.post_job_to_server(file_path=input_path, job_list=job_list,
|
||||
callback=create_callback)
|
||||
|
||||
self.progress_bar.stop()
|
||||
# clean up
|
||||
for temp in temp_files:
|
||||
os.remove(temp)
|
||||
|
||||
def finish_on_main():
|
||||
if result.ok:
|
||||
message = "Job successfully submitted to server."
|
||||
self.progress_label.configure(text=message)
|
||||
messagebox.showinfo("Success", message)
|
||||
logger.info(message)
|
||||
else:
|
||||
message = result.text or "Unknown error"
|
||||
self.progress_label.configure(text=message)
|
||||
logger.warning(message)
|
||||
messagebox.showinfo("Error", message)
|
||||
self.progress_label.configure(text="")
|
||||
self.progress_frame.forget()
|
||||
|
||||
self.root.after(0, finish_on_main)
|
||||
|
||||
# Start the job submit task as a bg thread
|
||||
bg_thread = threading.Thread(target=submit_job_worker)
|
||||
bg_thread.start()
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
root = Tk()
|
||||
root.geometry("500x600+300+300")
|
||||
root.maxsize(width=1000, height=2000)
|
||||
root.minsize(width=600, height=600)
|
||||
app = NewJobWindow(root)
|
||||
root.mainloop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
339
src/distributed_job_manager.py
Normal file
@@ -0,0 +1,339 @@
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from pubsub import pub
|
||||
|
||||
from src.render_queue import RenderQueue
|
||||
from src.server_proxy import RenderServerProxy
|
||||
from src.utilities.misc_helper import get_file_size_human
|
||||
from src.utilities.status_utils import RenderStatus, string_to_status
|
||||
from src.utilities.zeroconf_server import ZeroconfServer
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class DistributedJobManager:
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def start(cls):
|
||||
"""
|
||||
Subscribes the private class method '__local_job_status_changed' to the 'status_change' pubsub message.
|
||||
This should be called once, typically during the initialization phase.
|
||||
"""
|
||||
pub.subscribe(cls.__local_job_status_changed, 'status_change')
|
||||
|
||||
@classmethod
|
||||
def __local_job_status_changed(cls, job_id, old_status, new_status):
|
||||
"""
|
||||
Responds to the 'status_change' pubsub message for local jobs.
|
||||
If it's a child job, it notifies the parent job about the status change.
|
||||
|
||||
Parameters:
|
||||
job_id (str): The ID of the job that has changed status.
|
||||
old_status (str): The previous status of the job.
|
||||
new_status (str): The new (current) status of the job.
|
||||
|
||||
Note: Do not call directly. Instead, call via the 'status_change' pubsub message.
|
||||
"""
|
||||
|
||||
render_job = RenderQueue.job_with_id(job_id, none_ok=True)
|
||||
if not render_job: # ignore jobs created but not yet added to queue
|
||||
return
|
||||
|
||||
logger.debug(f"Job {job_id} status change: {old_status} -> {new_status}")
|
||||
if render_job.parent: # If local job is a subjob from a remote server
|
||||
parent_id, hostname = render_job.parent.split('@')[0], render_job.parent.split('@')[-1]
|
||||
RenderServerProxy(hostname).notify_parent_of_status_change(parent_id=parent_id, subjob=render_job)
|
||||
|
||||
elif render_job.children and new_status == RenderStatus.CANCELLED:
|
||||
# todo: handle cancelling all the children
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def handle_subjob_status_change(cls, local_job, subjob_data):
|
||||
"""
|
||||
Responds to a status change from a remote subjob and triggers the creation or modification of subjobs as needed.
|
||||
|
||||
Parameters:
|
||||
local_job (BaseRenderWorker): The local parent job worker.
|
||||
subjob_data (dict): subjob data sent from remote server.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
subjob_status = string_to_status(subjob_data['status'])
|
||||
subjob_id = subjob_data['id']
|
||||
subjob_hostname = next((hostname.split('@')[1] for hostname in local_job.children if
|
||||
hostname.split('@')[0] == subjob_id), None)
|
||||
local_job.children[f'{subjob_id}@{subjob_hostname}'] = subjob_data
|
||||
|
||||
logname = f"{local_job.id}:{subjob_id}@{subjob_hostname}"
|
||||
logger.debug(f"Subjob status changed: {logname} -> {subjob_status.value}")
|
||||
|
||||
# Download complete or partial render jobs
|
||||
if subjob_status in [RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.ERROR] and \
|
||||
subjob_data['file_count']:
|
||||
download_result = cls.download_from_subjob(local_job, subjob_id, subjob_hostname)
|
||||
if not download_result:
|
||||
# todo: handle error
|
||||
logger.error(f"Unable to download subjob files from {logname} with status {subjob_status.value}")
|
||||
|
||||
if subjob_status == RenderStatus.CANCELLED or subjob_status == RenderStatus.ERROR:
|
||||
# todo: determine missing frames and schedule new job
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def download_from_subjob(local_job, subjob_id, subjob_hostname):
|
||||
"""
|
||||
Downloads and extracts files from a completed subjob on a remote server.
|
||||
|
||||
Parameters:
|
||||
local_job (BaseRenderWorker): The local parent job worker.
|
||||
subjob_id (str or int): The ID of the subjob.
|
||||
subjob_hostname (str): The hostname of the remote server where the subjob is located.
|
||||
|
||||
Returns:
|
||||
bool: True if the files have been downloaded and extracted successfully, False otherwise.
|
||||
"""
|
||||
|
||||
child_key = f'{subjob_id}@{subjob_hostname}'
|
||||
logname = f"{local_job.id}:{child_key}"
|
||||
zip_file_path = local_job.output_path + f'_{subjob_hostname}_{subjob_id}.zip'
|
||||
|
||||
# download zip file from server
|
||||
try:
|
||||
local_job.children[child_key]['download_status'] = 'working'
|
||||
logger.info(f"Downloading completed subjob files from {subjob_hostname} to localhost")
|
||||
RenderServerProxy(subjob_hostname).get_job_files(subjob_id, zip_file_path)
|
||||
logger.info(f"File transfer complete for {logname} - Transferred {get_file_size_human(zip_file_path)}")
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception downloading files from remote server: {e}")
|
||||
local_job.children[child_key]['download_status'] = 'failed'
|
||||
return False
|
||||
|
||||
# extract zip
|
||||
try:
|
||||
logger.debug(f"Extracting zip file: {zip_file_path}")
|
||||
extract_path = os.path.dirname(zip_file_path)
|
||||
with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
|
||||
zip_ref.extractall(extract_path)
|
||||
logger.info(f"Successfully extracted zip to: {extract_path}")
|
||||
os.remove(zip_file_path)
|
||||
local_job.children[child_key]['download_status'] = 'complete'
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception extracting zip file: {e}")
|
||||
local_job.children[child_key]['download_status'] = 'failed'
|
||||
|
||||
return local_job.children[child_key].get('download_status', None) == 'complete'
|
||||
|
||||
@classmethod
|
||||
def wait_for_subjobs(cls, local_job):
|
||||
logger.debug(f"Waiting for subjobs for job {local_job}")
|
||||
local_job.status = RenderStatus.WAITING_FOR_SUBJOBS
|
||||
statuses_to_download = [RenderStatus.CANCELLED, RenderStatus.ERROR, RenderStatus.COMPLETED]
|
||||
|
||||
def subjobs_not_downloaded():
|
||||
return {k: v for k, v in local_job.children.items() if 'download_status' not in v or
|
||||
v['download_status'] == 'working' or v['download_status'] is None}
|
||||
|
||||
logger.debug(f'subjobs_not_downloaded: {subjobs_not_downloaded()}')
|
||||
|
||||
while len(subjobs_not_downloaded()):
|
||||
for child_key, subjob_cached_data in subjobs_not_downloaded().items():
|
||||
|
||||
subjob_id = child_key.split('@')[0]
|
||||
subjob_hostname = child_key.split('@')[-1]
|
||||
|
||||
# Fetch info from server and handle failing case
|
||||
subjob_data = RenderServerProxy(subjob_hostname).get_job_info(subjob_id)
|
||||
if not subjob_data:
|
||||
logger.warning(f"No response from: {subjob_hostname}")
|
||||
# todo: handle timeout / missing server situations
|
||||
continue
|
||||
|
||||
# Update parent job cache but keep the download status
|
||||
download_status = local_job.children[child_key].get('download_status', None)
|
||||
local_job.children[child_key] = subjob_data
|
||||
local_job.children[child_key]['download_status'] = download_status
|
||||
|
||||
status = string_to_status(subjob_data.get('status', ''))
|
||||
status_msg = f"Subjob {child_key} | {status} | " \
|
||||
f"{float(subjob_data.get('percent_complete')) * 100.0}%"
|
||||
logger.debug(status_msg)
|
||||
|
||||
# Still working in another thread - keep waiting
|
||||
if download_status == 'working':
|
||||
continue
|
||||
|
||||
# Check if job is finished, but has not had files copied yet over yet
|
||||
if download_status is None and subjob_data['file_count'] and status in statuses_to_download:
|
||||
download_result = cls.download_from_subjob(local_job, subjob_id, subjob_hostname)
|
||||
if not download_result:
|
||||
logger.error("Failed to download from subjob")
|
||||
# todo: error handling here
|
||||
|
||||
# Any finished jobs not successfully downloaded at this point are skipped
|
||||
if local_job.children[child_key].get('download_status', None) is None and \
|
||||
status in statuses_to_download:
|
||||
logger.warning(f"Skipping waiting on downloading from subjob: {child_key}")
|
||||
local_job.children[child_key]['download_status'] = 'skipped'
|
||||
|
||||
if subjobs_not_downloaded():
|
||||
logger.debug(f"Waiting on {len(subjobs_not_downloaded())} subjobs on "
|
||||
f"{', '.join(list(subjobs_not_downloaded().keys()))}")
|
||||
time.sleep(5)
|
||||
|
||||
@classmethod
|
||||
def split_into_subjobs(cls, worker, job_data, project_path):
|
||||
|
||||
# Check availability
|
||||
available_servers = cls.find_available_servers(worker.renderer)
|
||||
subjob_servers = cls.distribute_server_work(worker.start_frame, worker.end_frame, available_servers)
|
||||
local_hostname = socket.gethostname()
|
||||
|
||||
# Prep and submit these sub-jobs
|
||||
logger.info(f"Job {worker.id} split plan: {subjob_servers}")
|
||||
try:
|
||||
for server_data in subjob_servers:
|
||||
server_hostname = server_data['hostname']
|
||||
if server_hostname != local_hostname:
|
||||
post_results = cls.__create_subjob(job_data, local_hostname, project_path, server_data,
|
||||
server_hostname, worker)
|
||||
if post_results.ok:
|
||||
server_data['submission_results'] = post_results.json()[0]
|
||||
else:
|
||||
logger.error(f"Failed to create subjob on {server_hostname}")
|
||||
break
|
||||
else:
|
||||
# truncate parent render_job
|
||||
worker.start_frame = max(server_data['frame_range'][0], worker.start_frame)
|
||||
worker.end_frame = min(server_data['frame_range'][-1], worker.end_frame)
|
||||
logger.info(f"Local job now rendering from {worker.start_frame} to {worker.end_frame}")
|
||||
server_data['submission_results'] = worker.json()
|
||||
|
||||
# check that job posts were all successful.
|
||||
if not all(d.get('submission_results') is not None for d in subjob_servers):
|
||||
raise ValueError("Failed to create all subjobs") # look into recalculating job #s and use exising jobs
|
||||
|
||||
# start subjobs
|
||||
logger.debug(f"Starting {len(subjob_servers) - 1} attempted subjobs")
|
||||
for server_data in subjob_servers:
|
||||
if server_data['hostname'] != local_hostname:
|
||||
child_key = f"{server_data['submission_results']['id']}@{server_data['hostname']}"
|
||||
worker.children[child_key] = server_data['submission_results']
|
||||
worker.name = f"{worker.name}[{worker.start_frame}-{worker.end_frame}]"
|
||||
|
||||
except Exception as e:
|
||||
# cancel all the subjobs
|
||||
logger.error(f"Failed to split job into subjobs: {e}")
|
||||
logger.debug(f"Cancelling {len(subjob_servers) - 1} attempted subjobs")
|
||||
# [RenderServerProxy(hostname).cancel_job(results['id'], confirm=True) for hostname, results in
|
||||
# submission_results.items()] # todo: fix this
|
||||
|
||||
@staticmethod
|
||||
def __create_subjob(job_data, local_hostname, project_path, server_data, server_hostname, worker):
|
||||
subjob = job_data.copy()
|
||||
subjob['name'] = f"{worker.name}[{server_data['frame_range'][0]}-{server_data['frame_range'][-1]}]"
|
||||
subjob['parent'] = f"{worker.id}@{local_hostname}"
|
||||
subjob['start_frame'] = server_data['frame_range'][0]
|
||||
subjob['end_frame'] = server_data['frame_range'][-1]
|
||||
logger.debug(f"Posting subjob with frames {subjob['start_frame']}-"
|
||||
f"{subjob['end_frame']} to {server_hostname}")
|
||||
post_results = RenderServerProxy(server_hostname).post_job_to_server(
|
||||
file_path=project_path, job_list=[subjob])
|
||||
return post_results
|
||||
|
||||
@staticmethod
|
||||
def distribute_server_work(start_frame, end_frame, available_servers, method='cpu_count'):
|
||||
"""
|
||||
Splits the frame range among available servers proportionally based on their performance (CPU count).
|
||||
|
||||
:param start_frame: int, The start frame number of the animation to be rendered.
|
||||
:param end_frame: int, The end frame number of the animation to be rendered.
|
||||
:param available_servers: list, A list of available server dictionaries. Each server dictionary should include
|
||||
'hostname' and 'cpu_count' keys (see find_available_servers)
|
||||
:param method: str, Optional. Specifies the distribution method. Possible values are 'cpu_count' and 'equally'
|
||||
|
||||
|
||||
:return: A list of server dictionaries where each dictionary includes the frame range and total number of frames
|
||||
to be rendered by the server.
|
||||
"""
|
||||
|
||||
# Calculate respective frames for each server
|
||||
def divide_frames_by_cpu_count(frame_start, frame_end, servers):
|
||||
total_frames = frame_end - frame_start + 1
|
||||
total_performance = sum(server['cpu_count'] for server in servers)
|
||||
|
||||
frame_ranges = {}
|
||||
current_frame = frame_start
|
||||
allocated_frames = 0
|
||||
|
||||
for i, server in enumerate(servers):
|
||||
if i == len(servers) - 1: # if it's the last server
|
||||
# Give all remaining frames to the last server
|
||||
num_frames = total_frames - allocated_frames
|
||||
else:
|
||||
num_frames = round((server['cpu_count'] / total_performance) * total_frames)
|
||||
allocated_frames += num_frames
|
||||
|
||||
frame_end_for_server = current_frame + num_frames - 1
|
||||
|
||||
if current_frame <= frame_end_for_server:
|
||||
frame_ranges[server['hostname']] = (current_frame, frame_end_for_server)
|
||||
current_frame = frame_end_for_server + 1
|
||||
|
||||
return frame_ranges
|
||||
|
||||
def divide_frames_equally(frame_start, frame_end, servers):
|
||||
frame_range = frame_end - frame_start + 1
|
||||
frames_per_server = frame_range // len(servers)
|
||||
leftover_frames = frame_range % len(servers)
|
||||
|
||||
frame_ranges = {}
|
||||
current_start = frame_start
|
||||
for i, server in enumerate(servers):
|
||||
current_end = current_start + frames_per_server - 1
|
||||
if leftover_frames > 0:
|
||||
current_end += 1
|
||||
leftover_frames -= 1
|
||||
if current_start <= current_end:
|
||||
frame_ranges[server['hostname']] = (current_start, current_end)
|
||||
current_start = current_end + 1
|
||||
|
||||
return frame_ranges
|
||||
|
||||
if method == 'equally':
|
||||
breakdown = divide_frames_equally(start_frame, end_frame, available_servers)
|
||||
# elif method == 'benchmark_score': # todo: implement benchmark score
|
||||
# pass
|
||||
else:
|
||||
breakdown = divide_frames_by_cpu_count(start_frame, end_frame, available_servers)
|
||||
|
||||
server_breakdown = [server for server in available_servers if breakdown.get(server['hostname']) is not None]
|
||||
for server in server_breakdown:
|
||||
server['frame_range'] = breakdown[server['hostname']]
|
||||
server['total_frames'] = breakdown[server['hostname']][-1] - breakdown[server['hostname']][0] + 1
|
||||
return server_breakdown
|
||||
|
||||
@staticmethod
|
||||
def find_available_servers(renderer):
|
||||
"""
|
||||
Scan the Zeroconf network for currently available render servers supporting a specific renderer.
|
||||
|
||||
:param renderer: str, The renderer type to search for
|
||||
:return: A list of dictionaries with each dict containing hostname and cpu_count of available servers
|
||||
"""
|
||||
available_servers = []
|
||||
for hostname in ZeroconfServer.found_clients():
|
||||
response = RenderServerProxy(hostname).get_status()
|
||||
if response and response.get('renderers', {}).get(renderer, {}).get('is_available', False):
|
||||
available_servers.append({'hostname': hostname, 'cpu_count': int(response['cpu_count'])})
|
||||
|
||||
return available_servers
|
||||
160
src/render_queue.py
Executable file
@@ -0,0 +1,160 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from src.utilities.status_utils import RenderStatus
|
||||
from src.worker_factory import RenderWorkerFactory
|
||||
from src.workers.base_worker import Base
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class JobNotFoundError(Exception):
|
||||
def __init__(self, job_id, *args):
|
||||
super().__init__(args)
|
||||
self.job_id = job_id
|
||||
|
||||
|
||||
class RenderQueue:
|
||||
engine = create_engine('sqlite:///database.db')
|
||||
Base.metadata.create_all(engine)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
job_queue = []
|
||||
maximum_renderer_instances = {'blender': 1, 'aerender': 1, 'ffmpeg': 4}
|
||||
last_saved_counts = {}
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def start_queue(cls):
|
||||
cls.load_state()
|
||||
|
||||
@classmethod
|
||||
def job_status_change(cls, job_id, status):
|
||||
logger.debug(f"Job status changed: {job_id} -> {status}")
|
||||
|
||||
@classmethod
|
||||
def add_to_render_queue(cls, render_job, force_start=False):
|
||||
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job))
|
||||
cls.job_queue.append(render_job)
|
||||
if force_start:
|
||||
cls.start_job(render_job)
|
||||
cls.session.add(render_job)
|
||||
cls.save_state()
|
||||
|
||||
@classmethod
|
||||
def all_jobs(cls):
|
||||
return cls.job_queue
|
||||
|
||||
@classmethod
|
||||
def running_jobs(cls):
|
||||
return cls.jobs_with_status(RenderStatus.RUNNING)
|
||||
|
||||
@classmethod
|
||||
def pending_jobs(cls):
|
||||
pending_jobs = cls.jobs_with_status(RenderStatus.NOT_STARTED)
|
||||
pending_jobs.extend(cls.jobs_with_status(RenderStatus.SCHEDULED))
|
||||
return pending_jobs
|
||||
|
||||
@classmethod
|
||||
def jobs_with_status(cls, status, priority_sorted=False):
|
||||
found_jobs = [x for x in cls.all_jobs() if x.status == status]
|
||||
if priority_sorted:
|
||||
found_jobs = sorted(found_jobs, key=lambda a: a.priority, reverse=False)
|
||||
return found_jobs
|
||||
|
||||
@classmethod
|
||||
def job_with_id(cls, job_id, none_ok=False):
|
||||
found_job = next((x for x in cls.all_jobs() if x.id == job_id), None)
|
||||
if not found_job and not none_ok:
|
||||
raise JobNotFoundError(job_id)
|
||||
return found_job
|
||||
|
||||
@classmethod
|
||||
def clear_history(cls):
|
||||
to_remove = [x for x in cls.all_jobs() if x.status in [RenderStatus.CANCELLED,
|
||||
RenderStatus.COMPLETED, RenderStatus.ERROR]]
|
||||
for job_to_remove in to_remove:
|
||||
cls.delete_job(job_to_remove)
|
||||
cls.save_state()
|
||||
|
||||
@classmethod
|
||||
def load_state(cls):
|
||||
from src.workers.base_worker import BaseRenderWorker
|
||||
cls.job_queue = cls.session.query(BaseRenderWorker).all()
|
||||
|
||||
@classmethod
|
||||
def save_state(cls):
|
||||
cls.session.commit()
|
||||
|
||||
@classmethod
|
||||
def prepare_for_shutdown(cls):
|
||||
running_jobs = cls.jobs_with_status(RenderStatus.RUNNING) # cancel all running jobs
|
||||
for job in running_jobs:
|
||||
cls.cancel_job(job)
|
||||
cls.save_state()
|
||||
|
||||
@classmethod
|
||||
def is_available_for_job(cls, renderer, priority=2):
|
||||
if not RenderWorkerFactory.class_for_name(renderer).engine.renderer_path():
|
||||
return False
|
||||
|
||||
instances = cls.renderer_instances()
|
||||
higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < priority]
|
||||
max_allowed_instances = cls.maximum_renderer_instances.get(renderer, 1)
|
||||
maxed_out_instances = renderer in instances.keys() and instances[renderer] >= max_allowed_instances
|
||||
return not maxed_out_instances and not higher_priority_jobs
|
||||
|
||||
@classmethod
|
||||
def evaluate_queue(cls):
|
||||
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
||||
for job in not_started:
|
||||
if cls.is_available_for_job(job.renderer, job.priority):
|
||||
cls.start_job(job)
|
||||
|
||||
scheduled = cls.jobs_with_status(RenderStatus.SCHEDULED, priority_sorted=True)
|
||||
for job in scheduled:
|
||||
if job.scheduled_start <= datetime.now():
|
||||
logger.debug(f"Starting scheduled job: {job}")
|
||||
cls.start_job(job)
|
||||
|
||||
if cls.last_saved_counts != cls.job_counts():
|
||||
cls.save_state()
|
||||
|
||||
@classmethod
|
||||
def start_job(cls, job):
|
||||
logger.info(f'Starting render: {job.name} - Priority {job.priority}')
|
||||
job.start()
|
||||
cls.save_state()
|
||||
|
||||
@classmethod
|
||||
def cancel_job(cls, job):
|
||||
logger.info(f'Cancelling job ID: {job.id}')
|
||||
job.stop()
|
||||
return job.status == RenderStatus.CANCELLED
|
||||
|
||||
@classmethod
|
||||
def delete_job(cls, job):
|
||||
logger.info(f"Deleting job ID: {job.id}")
|
||||
job.stop()
|
||||
cls.job_queue.remove(job)
|
||||
cls.session.delete(job)
|
||||
cls.save_state()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def renderer_instances(cls):
|
||||
from collections import Counter
|
||||
all_instances = [x.renderer for x in cls.running_jobs()]
|
||||
return Counter(all_instances)
|
||||
|
||||
@classmethod
|
||||
def job_counts(cls):
|
||||
job_counts = {}
|
||||
for job_status in RenderStatus:
|
||||
job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
|
||||
return job_counts
|
||||
159
src/server_proxy.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
import requests
|
||||
from requests_toolbelt.multipart import MultipartEncoder, MultipartEncoderMonitor
|
||||
|
||||
from src.utilities.status_utils import RenderStatus
|
||||
|
||||
status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', RenderStatus.COMPLETED: 'green',
|
||||
RenderStatus.NOT_STARTED: "yellow", RenderStatus.SCHEDULED: 'purple',
|
||||
RenderStatus.RUNNING: 'cyan', RenderStatus.WAITING_FOR_SUBJOBS: 'blue'}
|
||||
|
||||
categories = [RenderStatus.RUNNING, RenderStatus.WAITING_FOR_SUBJOBS, RenderStatus.ERROR, RenderStatus.NOT_STARTED,
|
||||
RenderStatus.SCHEDULED, RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.UNDEFINED]
|
||||
|
||||
logger = logging.getLogger()
|
||||
OFFLINE_MAX = 2
|
||||
|
||||
|
||||
class RenderServerProxy:
|
||||
|
||||
def __init__(self, hostname, server_port="8080"):
|
||||
self.hostname = hostname
|
||||
self.port = server_port
|
||||
self.fetched_status_data = None
|
||||
self.__jobs_cache_token = None
|
||||
self.__jobs_cache = []
|
||||
self.__update_in_background = False
|
||||
self.__background_thread = None
|
||||
self.__offline_flags = 0
|
||||
self.update_cadence = 5
|
||||
|
||||
def connect(self):
|
||||
status = self.request_data('status')
|
||||
return status
|
||||
|
||||
def is_online(self):
|
||||
if self.__update_in_background:
|
||||
return self.__offline_flags < OFFLINE_MAX
|
||||
else:
|
||||
return self.connect() is not None
|
||||
|
||||
def status(self):
|
||||
if not self.is_online():
|
||||
return "Offline"
|
||||
running_jobs = [x for x in self.__jobs_cache if x['status'] == 'running'] if self.__jobs_cache else []
|
||||
return f"{len(running_jobs)} running" if running_jobs else "Available"
|
||||
|
||||
def request_data(self, payload, timeout=5):
|
||||
try:
|
||||
req = self.request(payload, timeout)
|
||||
if req.ok and req.status_code == 200:
|
||||
self.__offline_flags = 0
|
||||
return req.json()
|
||||
except json.JSONDecodeError as e:
|
||||
logger.debug(f"JSON decode error: {e}")
|
||||
except requests.ReadTimeout as e:
|
||||
logger.warning(f"Timed out: {e}")
|
||||
self.__offline_flags = self.__offline_flags + 1
|
||||
except requests.ConnectionError as e:
|
||||
logger.warning(f"Connection error: {e}")
|
||||
self.__offline_flags = self.__offline_flags + 1
|
||||
except Exception as e:
|
||||
logger.exception(f"Uncaught exception: {e}")
|
||||
return None
|
||||
|
||||
def request(self, payload, timeout=5):
|
||||
return requests.get(f'http://{self.hostname}:{self.port}/api/{payload}', timeout=timeout)
|
||||
|
||||
def start_background_update(self):
|
||||
self.__update_in_background = True
|
||||
|
||||
def thread_worker():
|
||||
while self.__update_in_background:
|
||||
self.__update_job_cache()
|
||||
time.sleep(self.update_cadence)
|
||||
|
||||
self.__background_thread = threading.Thread(target=thread_worker)
|
||||
self.__background_thread.daemon = True
|
||||
self.__background_thread.start()
|
||||
|
||||
def stop_background_update(self):
|
||||
self.__update_in_background = False
|
||||
|
||||
def get_job_info(self, job_id, timeout=5):
|
||||
return self.request_data(f'job/{job_id}', timeout=timeout)
|
||||
|
||||
def get_all_jobs(self, timeout=5, ignore_token=False):
|
||||
if not self.__update_in_background or ignore_token:
|
||||
self.__update_job_cache(timeout, ignore_token)
|
||||
return self.__jobs_cache.copy() if self.__jobs_cache else None
|
||||
|
||||
def __update_job_cache(self, timeout=5, ignore_token=False):
|
||||
url = f'jobs?token={self.__jobs_cache_token}' if self.__jobs_cache_token and not ignore_token else 'jobs'
|
||||
status_result = self.request_data(url, timeout=timeout)
|
||||
if status_result is not None:
|
||||
sorted_jobs = []
|
||||
for status_category in categories:
|
||||
found_jobs = [x for x in status_result['jobs'] if x['status'] == status_category.value]
|
||||
if found_jobs:
|
||||
sorted_jobs.extend(found_jobs)
|
||||
self.__jobs_cache = sorted_jobs
|
||||
self.__jobs_cache_token = status_result['token']
|
||||
|
||||
def get_data(self, timeout=5):
|
||||
all_data = self.request_data('full_status', timeout=timeout)
|
||||
return all_data
|
||||
|
||||
def cancel_job(self, job_id, confirm=False):
|
||||
return self.request_data(f'job/{job_id}/cancel?confirm={confirm}')
|
||||
|
||||
def get_status(self):
|
||||
return self.request_data('status')
|
||||
|
||||
def notify_parent_of_status_change(self, parent_id, subjob):
|
||||
return requests.post(f'http://{self.hostname}:{self.port}/api/job/{parent_id}/notify_parent_of_status_change',
|
||||
json=subjob.json())
|
||||
|
||||
def post_job_to_server(self, file_path, job_list, callback=None):
|
||||
|
||||
# bypass uploading file if posting to localhost
|
||||
if self.hostname == socket.gethostname():
|
||||
jobs_with_path = [{**item, "local_path": file_path} for item in job_list]
|
||||
return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=json.dumps(jobs_with_path),
|
||||
headers={'Content-Type': 'application/json'})
|
||||
|
||||
# Prepare the form data
|
||||
encoder = MultipartEncoder({
|
||||
'file': (os.path.basename(file_path), open(file_path, 'rb'), 'application/octet-stream'),
|
||||
'json': (None, json.dumps(job_list), 'application/json'),
|
||||
})
|
||||
|
||||
# Create a monitor that will track the upload progress
|
||||
if callback:
|
||||
monitor = MultipartEncoderMonitor(encoder, callback(encoder))
|
||||
else:
|
||||
monitor = MultipartEncoderMonitor(encoder)
|
||||
|
||||
# Send the request
|
||||
headers = {'Content-Type': monitor.content_type}
|
||||
return requests.post(f'http://{self.hostname}:{self.port}/api/add_job', data=monitor, headers=headers)
|
||||
|
||||
def get_job_files(self, job_id, save_path):
|
||||
url = f"http://{self.hostname}:{self.port}/api/job/{job_id}/download_all"
|
||||
return self.download_file(url, filename=save_path)
|
||||
|
||||
@staticmethod
|
||||
def download_file(url, filename):
|
||||
with requests.get(url, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
with open(filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
return filename
|
||||
|
||||
0
src/utilities/__init__.py
Normal file
20
src/utilities/ffmpeg_helper.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import subprocess
|
||||
from src.workers.engines.ffmpeg_engine import FFMPEG
|
||||
|
||||
|
||||
def image_sequence_to_video(source_glob_pattern, output_path, framerate=24, encoder="prores_ks", profile=4,
|
||||
start_frame=1):
|
||||
subprocess.run([FFMPEG.renderer_path(), "-framerate", str(framerate), "-start_number", str(start_frame), "-i",
|
||||
f"{source_glob_pattern}", "-c:v", encoder, "-profile:v", str(profile), '-pix_fmt', 'yuva444p10le',
|
||||
output_path], check=True)
|
||||
|
||||
|
||||
def save_first_frame(source_path, dest_path, max_width=1280):
|
||||
subprocess.run([FFMPEG.renderer_path(), '-i', source_path, '-vf', f'scale={max_width}:-1',
|
||||
'-vframes', '1', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
|
||||
|
||||
|
||||
def generate_thumbnail(source_path, dest_path, max_width=240, fps=12):
|
||||
subprocess.run([FFMPEG.renderer_path(), '-i', source_path, '-vf',
|
||||
f"scale={max_width}:trunc(ow/a/2)*2,format=yuv420p", '-r', str(fps), '-c:v', 'libx264', '-preset',
|
||||
'ultrafast', '-an', dest_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
|
||||
105
src/utilities/misc_helper.py
Normal file
@@ -0,0 +1,105 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def launch_url(url):
|
||||
if subprocess.run(['which', 'xdg-open'], capture_output=True).returncode == 0:
|
||||
subprocess.run(['xdg-open', url]) # linux
|
||||
elif subprocess.run(['which', 'open'], capture_output=True).returncode == 0:
|
||||
subprocess.run(['open', url]) # macos
|
||||
elif subprocess.run(['which', 'start'], capture_output=True).returncode == 0:
|
||||
subprocess.run(['start', url]) # windows - need to validate this works
|
||||
else:
|
||||
logger.error(f"No valid launchers found to launch url: {url}")
|
||||
|
||||
|
||||
def file_exists_in_mounts(filepath):
|
||||
"""
|
||||
Check if a file exists in any mounted directory.
|
||||
It searches for the file in common mount points like '/Volumes', '/mnt', and '/media'.
|
||||
Returns the path to the file in the mount if found, otherwise returns None.
|
||||
|
||||
Example:
|
||||
Before: filepath = '/path/to/file.txt'
|
||||
After: '/Volumes/ExternalDrive/path/to/file.txt'
|
||||
"""
|
||||
|
||||
def get_path_components(path):
|
||||
path = os.path.normpath(path)
|
||||
components = []
|
||||
while True:
|
||||
path, component = os.path.split(path)
|
||||
if component:
|
||||
components.append(component)
|
||||
else:
|
||||
if path:
|
||||
components.append(path)
|
||||
break
|
||||
components.reverse()
|
||||
return components
|
||||
|
||||
# Get path components of the directory of the file
|
||||
path_components = get_path_components(os.path.dirname(filepath).strip('/'))
|
||||
|
||||
# Iterate over possible root paths - this may need to be rethought for Windows support
|
||||
for root in ['/Volumes', '/mnt', '/media']:
|
||||
if os.path.exists(root):
|
||||
# Iterate over mounts in the root path
|
||||
for mount in os.listdir(root):
|
||||
# Since we don't know the home directory, we iterate until we find matching parts of the path
|
||||
matching_components = [s for s in path_components if s in mount]
|
||||
for component in matching_components:
|
||||
possible_mount_path = os.path.join(root, mount, filepath.split(component)[-1].lstrip('/'))
|
||||
if os.path.exists(possible_mount_path):
|
||||
return possible_mount_path
|
||||
|
||||
|
||||
def get_time_elapsed(start_time=None, end_time=None):
|
||||
|
||||
from string import Template
|
||||
|
||||
class DeltaTemplate(Template):
|
||||
delimiter = "%"
|
||||
|
||||
def strfdelta(tdelta, fmt='%H:%M:%S'):
|
||||
d = {"D": tdelta.days}
|
||||
hours, rem = divmod(tdelta.seconds, 3600)
|
||||
minutes, seconds = divmod(rem, 60)
|
||||
d["H"] = '{:02d}'.format(hours)
|
||||
d["M"] = '{:02d}'.format(minutes)
|
||||
d["S"] = '{:02d}'.format(seconds)
|
||||
t = DeltaTemplate(fmt)
|
||||
return t.substitute(**d)
|
||||
|
||||
# calculate elapsed time
|
||||
elapsed_time = None
|
||||
|
||||
if start_time:
|
||||
if end_time:
|
||||
elapsed_time = end_time - start_time
|
||||
else:
|
||||
elapsed_time = datetime.now() - start_time
|
||||
|
||||
elapsed_time_string = strfdelta(elapsed_time) if elapsed_time else None
|
||||
return elapsed_time_string
|
||||
|
||||
|
||||
def get_file_size_human(file_path):
|
||||
size_in_bytes = os.path.getsize(file_path)
|
||||
|
||||
# Convert size to a human readable format
|
||||
if size_in_bytes < 1024:
|
||||
return f"{size_in_bytes} B"
|
||||
elif size_in_bytes < 1024 ** 2:
|
||||
return f"{size_in_bytes / 1024:.2f} KB"
|
||||
elif size_in_bytes < 1024 ** 3:
|
||||
return f"{size_in_bytes / 1024 ** 2:.2f} MB"
|
||||
elif size_in_bytes < 1024 ** 4:
|
||||
return f"{size_in_bytes / 1024 ** 3:.2f} GB"
|
||||
else:
|
||||
return f"{size_in_bytes / 1024 ** 4:.2f} TB"
|
||||
|
||||
47
src/utilities/server_helper.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import threading
|
||||
|
||||
from src.utilities.ffmpeg_helper import generate_thumbnail, save_first_frame
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
def generate_thumbnail_for_job(job, thumb_video_path, thumb_image_path, max_width=320):
|
||||
|
||||
# Simple thread to generate thumbs in background
|
||||
def generate_thumb_thread(source):
|
||||
in_progress_path = thumb_video_path + '_IN-PROGRESS'
|
||||
subprocess.run(['touch', in_progress_path])
|
||||
try:
|
||||
logger.debug(f"Generating video thumbnail for {source}")
|
||||
generate_thumbnail(source_path=source, dest_path=thumb_video_path, max_width=max_width)
|
||||
except subprocess.CalledProcessError as err:
|
||||
logger.error(f"Error generating video thumbnail for {source}: {err}")
|
||||
|
||||
try:
|
||||
os.remove(in_progress_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
# Determine best source file to use for thumbs
|
||||
source_files = job.file_list() or [job.input_path]
|
||||
if source_files:
|
||||
video_formats = ['.mp4', '.mov', '.avi', '.mpg', '.mpeg', '.mxf', '.m4v', 'mkv']
|
||||
image_formats = ['.jpg', '.png', '.exr']
|
||||
|
||||
image_files = [f for f in source_files if os.path.splitext(f)[-1].lower() in image_formats]
|
||||
video_files = [f for f in source_files if os.path.splitext(f)[-1].lower() in video_formats]
|
||||
|
||||
if (video_files or image_files) and not os.path.exists(thumb_image_path):
|
||||
try:
|
||||
path_of_source = image_files[0] if image_files else video_files[0]
|
||||
logger.debug(f"Generating image thumbnail for {path_of_source}")
|
||||
save_first_frame(source_path=path_of_source, dest_path=thumb_image_path, max_width=max_width)
|
||||
except Exception as e:
|
||||
logger.error(f"Exception saving first frame: {e}")
|
||||
|
||||
if video_files and not os.path.exists(thumb_video_path):
|
||||
x = threading.Thread(target=generate_thumb_thread, args=(video_files[0],))
|
||||
x.start()
|
||||
20
src/utilities/status_utils.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class RenderStatus(Enum):
|
||||
NOT_STARTED = "not_started"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
CANCELLED = "cancelled"
|
||||
ERROR = "error"
|
||||
SCHEDULED = "scheduled"
|
||||
WAITING_FOR_SUBJOBS = "waiting_for_subjobs"
|
||||
CONFIGURING = "configuring"
|
||||
UNDEFINED = "undefined"
|
||||
|
||||
|
||||
def string_to_status(string):
|
||||
for stat in RenderStatus:
|
||||
if stat.value == string:
|
||||
return stat
|
||||
return RenderStatus.UNDEFINED
|
||||
86
src/utilities/zeroconf_server.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import logging
|
||||
import socket
|
||||
|
||||
from zeroconf import Zeroconf, ServiceInfo, ServiceBrowser, ServiceStateChange
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class ZeroconfServer:
|
||||
service_type = None
|
||||
server_name = None
|
||||
server_port = None
|
||||
server_ip = None
|
||||
zeroconf = Zeroconf()
|
||||
service_info = None
|
||||
client_cache = {}
|
||||
properties = {}
|
||||
|
||||
@classmethod
|
||||
def configure(cls, service_type, server_name, server_port):
|
||||
cls.service_type = service_type
|
||||
cls.server_name = server_name
|
||||
cls.server_port = server_port
|
||||
|
||||
@classmethod
|
||||
def start(cls, listen_only=False):
|
||||
if not listen_only:
|
||||
cls._register_service()
|
||||
cls._browse_services()
|
||||
|
||||
@classmethod
|
||||
def stop(cls):
|
||||
cls._unregister_service()
|
||||
cls.zeroconf.close()
|
||||
|
||||
@classmethod
|
||||
def _register_service(cls):
|
||||
cls.server_ip = socket.gethostbyname(socket.gethostname())
|
||||
|
||||
info = ServiceInfo(
|
||||
cls.service_type,
|
||||
f"{cls.server_name}.{cls.service_type}",
|
||||
addresses=[socket.inet_aton(cls.server_ip)],
|
||||
port=cls.server_port,
|
||||
properties=cls.properties,
|
||||
)
|
||||
|
||||
cls.service_info = info
|
||||
cls.zeroconf.register_service(info)
|
||||
logger.info(f"Registered zeroconf service: {cls.service_info.name}")
|
||||
|
||||
@classmethod
|
||||
def _unregister_service(cls):
|
||||
if cls.service_info:
|
||||
cls.zeroconf.unregister_service(cls.service_info)
|
||||
logger.info(f"Unregistered zeroconf service: {cls.service_info.name}")
|
||||
cls.service_info = None
|
||||
|
||||
@classmethod
|
||||
def _browse_services(cls):
|
||||
browser = ServiceBrowser(cls.zeroconf, cls.service_type, [cls._on_service_discovered])
|
||||
browser.is_alive()
|
||||
|
||||
@classmethod
|
||||
def _on_service_discovered(cls, zeroconf, service_type, name, state_change):
|
||||
info = zeroconf.get_service_info(service_type, name)
|
||||
logger.debug(f"Zeroconf: {name} {state_change}")
|
||||
if service_type == cls.service_type:
|
||||
if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated:
|
||||
cls.client_cache[name] = info
|
||||
else:
|
||||
cls.client_cache.pop(name)
|
||||
|
||||
@classmethod
|
||||
def found_clients(cls):
|
||||
return [x.split(f'.{cls.service_type}')[0] for x in cls.client_cache.keys()]
|
||||
|
||||
|
||||
# Example usage:
|
||||
if __name__ == "__main__":
|
||||
ZeroconfServer.configure("_zordon._tcp.local.", "foobar.local", 8080)
|
||||
try:
|
||||
ZeroconfServer.start()
|
||||
input("Server running - Press enter to end")
|
||||
finally:
|
||||
ZeroconfServer.stop()
|
||||
BIN
src/web/static/images/cancelled.png
Normal file
|
After Width: | Height: | Size: 1.7 KiB |
BIN
src/web/static/images/desktop.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
src/web/static/images/error.png
Normal file
|
After Width: | Height: | Size: 995 B |
BIN
src/web/static/images/gears.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
src/web/static/images/logo.png
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
src/web/static/images/not_started.png
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
src/web/static/images/scheduled.png
Normal file
|
After Width: | Height: | Size: 2.1 KiB |
BIN
src/web/static/images/spinner.gif
Normal file
|
After Width: | Height: | Size: 66 KiB |
64
src/web/static/js/job_table.js
Normal file
@@ -0,0 +1,64 @@
|
||||
const grid = new gridjs.Grid({
|
||||
columns: [
|
||||
{ data: (row) => row.id,
|
||||
name: 'Thumbnail',
|
||||
formatter: (cell) => gridjs.html(`<img src="/api/job/${cell}/thumbnail?video_ok" style='width: 200px; min-width: 120px;'>`),
|
||||
sort: {enabled: false}
|
||||
},
|
||||
{ id: 'name',
|
||||
name: 'Name',
|
||||
data: (row) => row.name,
|
||||
formatter: (name, row) => gridjs.html(`<a href="/ui/job/${row.cells[0].data}/full_details">${name}</a>`)
|
||||
},
|
||||
{ id: 'renderer', data: (row) => `${row.renderer}-${row.renderer_version}`, name: 'Renderer' },
|
||||
{ id: 'priority', name: 'Priority' },
|
||||
{ id: 'status',
|
||||
name: 'Status',
|
||||
data: (row) => row,
|
||||
formatter: (cell, row) => gridjs.html(`
|
||||
<span class="tag ${(cell.status == 'running') ? 'is-hidden' : ''} ${(cell.status == 'cancelled') ?
|
||||
'is-warning' : (cell.status == 'error') ? 'is-danger' : (cell.status == 'not_started') ?
|
||||
'is-light' : 'is-primary'}">${cell.status}</span>
|
||||
<progress class="progress is-primary ${(cell.status != 'running') ? 'is-hidden': ''}"
|
||||
value="${(parseFloat(cell.percent_complete) * 100.0)}" max="100">${cell.status}</progress>
|
||||
`)},
|
||||
{ id: 'time_elapsed', name: 'Time Elapsed' },
|
||||
{ data: (row) => row.total_frames ?? 'N/A', name: 'Frame Count' },
|
||||
{ id: 'client', name: 'Client'},
|
||||
{ data: (row) => row.last_output ?? 'N/A',
|
||||
name: 'Last Output',
|
||||
formatter: (output, row) => gridjs.html(`<a href="/api/job/${row.cells[0].data}/logs">${output}</a>`)
|
||||
},
|
||||
{ data: (row) => row,
|
||||
name: 'Commands',
|
||||
formatter: (cell, row) => gridjs.html(`
|
||||
<div class="field has-addons" style='white-space: nowrap; display: inline-block;'>
|
||||
<button class="button is-info" onclick="window.location.href='/ui/job/${row.cells[0].data}/full_details';">
|
||||
<span class="icon"><i class="fa-solid fa-info"></i></span>
|
||||
</button>
|
||||
<button class="button is-link" onclick="window.location.href='/api/job/${row.cells[0].data}/logs';">
|
||||
<span class="icon"><i class="fa-regular fa-file-lines"></i></span>
|
||||
</button>
|
||||
<button class="button is-warning is-active ${(cell.status != 'running') ? 'is-hidden': ''}" onclick="window.location.href='/api/job/${row.cells[0].data}/cancel?confirm=True&redirect=True';">
|
||||
<span class="icon"><i class="fa-solid fa-x"></i></span>
|
||||
</button>
|
||||
<button class="button is-success ${(cell.status != 'completed') ? 'is-hidden': ''}" onclick="window.location.href='/api/job/${row.cells[0].data}/download_all';">
|
||||
<span class="icon"><i class="fa-solid fa-download"></i></span>
|
||||
<span>${cell.file_count}</span>
|
||||
</button>
|
||||
<button class="button is-danger" onclick="window.location.href='/api/job/${row.cells[0].data}/delete?confirm=True&redirect=True'">
|
||||
<span class="icon"><i class="fa-regular fa-trash-can"></i></span>
|
||||
</button>
|
||||
</div>
|
||||
`),
|
||||
sort: false
|
||||
},
|
||||
{ id: 'owner', name: 'Owner' }
|
||||
],
|
||||
autoWidth: true,
|
||||
server: {
|
||||
url: '/api/jobs',
|
||||
then: results => results['jobs'],
|
||||
},
|
||||
sort: true,
|
||||
}).render(document.getElementById('table'));
|
||||
44
src/web/static/js/modals.js
Normal file
@@ -0,0 +1,44 @@
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
// Functions to open and close a modal
|
||||
function openModal($el) {
|
||||
$el.classList.add('is-active');
|
||||
}
|
||||
|
||||
function closeModal($el) {
|
||||
$el.classList.remove('is-active');
|
||||
}
|
||||
|
||||
function closeAllModals() {
|
||||
(document.querySelectorAll('.modal') || []).forEach(($modal) => {
|
||||
closeModal($modal);
|
||||
});
|
||||
}
|
||||
|
||||
// Add a click event on buttons to open a specific modal
|
||||
(document.querySelectorAll('.js-modal-trigger') || []).forEach(($trigger) => {
|
||||
const modal = $trigger.dataset.target;
|
||||
const $target = document.getElementById(modal);
|
||||
|
||||
$trigger.addEventListener('click', () => {
|
||||
openModal($target);
|
||||
});
|
||||
});
|
||||
|
||||
// Add a click event on various child elements to close the parent modal
|
||||
(document.querySelectorAll('.modal-background, .modal-close, .modal-card-head .delete, .modal-card-foot .button') || []).forEach(($close) => {
|
||||
const $target = $close.closest('.modal');
|
||||
|
||||
$close.addEventListener('click', () => {
|
||||
closeModal($target);
|
||||
});
|
||||
});
|
||||
|
||||
// Add a keyboard event to close all modals
|
||||
document.addEventListener('keydown', (event) => {
|
||||
const e = event || window.event;
|
||||
|
||||
if (e.keyCode === 27) { // Escape key
|
||||
closeAllModals();
|
||||
}
|
||||
});
|
||||
});
|
||||
48
src/web/templates/details.html
Normal file
@@ -0,0 +1,48 @@
|
||||
{% extends 'layout.html' %}
|
||||
|
||||
{% block body %}
|
||||
<div class="container" style="text-align:center; width: 100%">
|
||||
<br>
|
||||
{% if media_url: %}
|
||||
<video width="1280" height="720" controls>
|
||||
<source src="{{media_url}}" type="video/mp4">
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
{% elif job_status == 'Running': %}
|
||||
<div style="width: 100%; height: 720px; position: relative; background: black; text-align: center; color: white;">
|
||||
<img src="/static/images/gears.png" style="vertical-align: middle; width: auto; height: auto; position:absolute; margin: auto; top: 0; bottom: 0; left: 0; right: 0;">
|
||||
<span style="height: auto; position:absolute; margin: auto; top: 58%; left: 0; right: 0; color: white; width: 60%">
|
||||
<progress class="progress is-primary" value="{{job.worker_data()['percent_complete'] * 100}}" max="100" style="margin-top: 6px;" id="progress-bar">Rendering</progress>
|
||||
Rendering in Progress - <span id="percent-complete">{{(job.worker_data()['percent_complete'] * 100) | int}}%</span>
|
||||
<br>Time Elapsed: <span id="time-elapsed">{{job.worker_data()['time_elapsed']}}</span>
|
||||
</span>
|
||||
<script>
|
||||
var startingStatus = '{{job.status.value}}';
|
||||
function update_job() {
|
||||
$.getJSON('/api/job/{{job.id}}', function(data) {
|
||||
document.getElementById('progress-bar').value = (data.percent_complete * 100);
|
||||
document.getElementById('percent-complete').innerHTML = (data.percent_complete * 100).toFixed(0) + '%';
|
||||
document.getElementById('time-elapsed').innerHTML = data.time_elapsed;
|
||||
if (data.status != startingStatus){
|
||||
clearInterval(renderingTimer);
|
||||
window.location.reload(true);
|
||||
};
|
||||
});
|
||||
}
|
||||
if (startingStatus == 'running'){
|
||||
var renderingTimer = setInterval(update_job, 1000);
|
||||
};
|
||||
</script>
|
||||
</div>
|
||||
{% else %}
|
||||
<div style="width: 100%; height: 720px; position: relative; background: black;">
|
||||
<img src="/static/images/{{job_status}}.png" style="vertical-align: middle; width: auto; height: auto; position:absolute; margin: auto; top: 0; bottom: 0; left: 0; right: 0;">
|
||||
<span style="height: auto; position:absolute; margin: auto; top: 58%; left: 0; right: 0; color: white;">
|
||||
{{job_status}}
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<br>
|
||||
{{detail_table|safe}}
|
||||
</div>
|
||||
{% endblock %}
|
||||
8
src/web/templates/index.html
Normal file
@@ -0,0 +1,8 @@
|
||||
{% extends 'layout.html' %}
|
||||
|
||||
{% block body %}
|
||||
<div class="container is-fluid" style="padding-top: 20px;">
|
||||
<div id="table" class="table"></div>
|
||||
</div>
|
||||
<script src="/static/js/job_table.js"></script>
|
||||
{% endblock %}
|
||||
236
src/web/templates/layout.html
Normal file
@@ -0,0 +1,236 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Zordon Dashboard</title>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.4/css/bulma.min.css">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/gridjs/dist/gridjs.umd.js"></script>
|
||||
<link href="https://unpkg.com/gridjs/dist/theme/mermaid.min.css" rel="stylesheet" />
|
||||
<script src="https://kit.fontawesome.com/698705d14d.js" crossorigin="anonymous"></script>
|
||||
<script type="text/javascript" src="/static/js/modals.js"></script>
|
||||
</head>
|
||||
<body onload="rendererChanged(document.getElementById('renderer'))">
|
||||
|
||||
<nav class="navbar is-dark" role="navigation" aria-label="main navigation">
|
||||
<div class="navbar-brand">
|
||||
<a class="navbar-item" href="/">
|
||||
<img src="/static/images/logo.png">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div id="navbarBasicExample" class="navbar-menu">
|
||||
<div class="navbar-start">
|
||||
<a class="navbar-item" href="/">
|
||||
Home
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div class="navbar-end">
|
||||
<div class="navbar-item">
|
||||
<button class="button is-primary js-modal-trigger" data-target="add-job-modal">
|
||||
<span class="icon">
|
||||
<i class="fa-solid fa-upload"></i>
|
||||
</span>
|
||||
<span>Submit Job</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
{% block body %}
|
||||
{% endblock %}
|
||||
|
||||
<div id="add-job-modal" class="modal">
|
||||
<!-- Start Add Form -->
|
||||
<form id="submit_job" action="/api/add_job?redirect=True" method="POST" enctype="multipart/form-data">
|
||||
<div class="modal-background"></div>
|
||||
<div class="modal-card">
|
||||
<header class="modal-card-head">
|
||||
<p class="modal-card-title">Submit New Job</p>
|
||||
<button class="delete" aria-label="close" type="button"></button>
|
||||
</header>
|
||||
<section class="modal-card-body">
|
||||
<!-- File Uploader -->
|
||||
|
||||
<label class="label">Upload File</label>
|
||||
<div id="file-uploader" class="file has-name is-fullwidth">
|
||||
<label class="file-label">
|
||||
<input class="file-input is-small" type="file" name="file">
|
||||
<span class="file-cta">
|
||||
<span class="file-icon">
|
||||
<i class="fas fa-upload"></i>
|
||||
</span>
|
||||
<span class="file-label">
|
||||
Choose a file…
|
||||
</span>
|
||||
</span>
|
||||
<span class="file-name">
|
||||
No File Uploaded
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<br>
|
||||
<script>
|
||||
const fileInput = document.querySelector('#file-uploader input[type=file]');
|
||||
fileInput.onchange = () => {
|
||||
if (fileInput.files.length > 0) {
|
||||
const fileName = document.querySelector('#file-uploader .file-name');
|
||||
fileName.textContent = fileInput.files[0].name;
|
||||
}
|
||||
}
|
||||
|
||||
const presets = {
|
||||
{% for preset in preset_list: %}
|
||||
{{preset}}: {
|
||||
name: '{{preset_list[preset]['name']}}',
|
||||
renderer: '{{preset_list[preset]['renderer']}}',
|
||||
args: '{{preset_list[preset]['args']}}',
|
||||
},
|
||||
{% endfor %}
|
||||
};
|
||||
|
||||
function rendererChanged(ddl1) {
|
||||
|
||||
var renderers = {
|
||||
{% for renderer in renderer_info: %}
|
||||
{% if renderer_info[renderer]['supported_export_formats']: %}
|
||||
{{renderer}}: [
|
||||
{% for format in renderer_info[renderer]['supported_export_formats']: %}
|
||||
'{{format}}',
|
||||
{% endfor %}
|
||||
],
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
};
|
||||
|
||||
var selectedRenderer = ddl1.value;
|
||||
|
||||
var ddl3 = document.getElementById('preset_list');
|
||||
ddl3.options.length = 0;
|
||||
createOption(ddl3, '-Presets-', '');
|
||||
for (var preset_name in presets) {
|
||||
if (presets[preset_name]['renderer'] == selectedRenderer) {
|
||||
createOption(ddl3, presets[preset_name]['name'], preset_name);
|
||||
};
|
||||
};
|
||||
document.getElementById('raw_args').value = "";
|
||||
|
||||
var ddl2 = document.getElementById('export_format');
|
||||
ddl2.options.length = 0;
|
||||
var options = renderers[selectedRenderer];
|
||||
for (i = 0; i < options.length; i++) {
|
||||
createOption(ddl2, options[i], options[i]);
|
||||
};
|
||||
}
|
||||
|
||||
function createOption(ddl, text, value) {
|
||||
var opt = document.createElement('option');
|
||||
opt.value = value;
|
||||
opt.text = text;
|
||||
ddl.options.add(opt);
|
||||
}
|
||||
|
||||
function addPresetTextToInput(presetfield, textfield) {
|
||||
var p = presets[presetfield.value];
|
||||
textfield.value = p['args'];
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<!-- Renderer & Priority -->
|
||||
<div class="field is-grouped">
|
||||
<p class="control">
|
||||
<label class="label">Renderer</label>
|
||||
<span class="select">
|
||||
<select id="renderer" name="renderer" onchange="rendererChanged(this)">
|
||||
{% for renderer in renderer_info: %}
|
||||
<option name="renderer" value="{{renderer}}">{{renderer}}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</span>
|
||||
</p>
|
||||
<p class="control">
|
||||
<label class="label">Client</label>
|
||||
<span class="select">
|
||||
<select name="client">
|
||||
<option name="client" value="">First Available</option>
|
||||
{% for client in render_clients: %}
|
||||
<option name="client" value="{{client}}">{{client}}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</span>
|
||||
</p>
|
||||
<p class="control">
|
||||
<label class="label">Priority</label>
|
||||
<span class="select">
|
||||
<select name="priority">
|
||||
<option name="priority" value="1">1</option>
|
||||
<option name="priority" value="2" selected="selected">2</option>
|
||||
<option name="priority" value="3">3</option>
|
||||
</select>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Output Path -->
|
||||
<label class="label">Output</label>
|
||||
<div class="field has-addons">
|
||||
<div class="control is-expanded">
|
||||
<input class="input is-small" type="text" placeholder="Output Name" name="output_path" value="output.mp4">
|
||||
</div>
|
||||
<p class="control">
|
||||
<span class="select is-small">
|
||||
<select id="export_format" name="export_format">
|
||||
<option value="ar">option</option>
|
||||
</select>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Resolution -->
|
||||
<!-- <label class="label">Resolution</label>-->
|
||||
<!-- <div class="field is-grouped">-->
|
||||
<!-- <p class="control">-->
|
||||
<!-- <input class="input" type="text" placeholder="auto" maxlength="5" size="8" name="AnyRenderer-arg_x_resolution">-->
|
||||
<!-- </p>-->
|
||||
<!-- <label class="label"> x </label>-->
|
||||
<!-- <p class="control">-->
|
||||
<!-- <input class="input" type="text" placeholder="auto" maxlength="5" size="8" name="AnyRenderer-arg_y_resolution">-->
|
||||
<!-- </p>-->
|
||||
<!-- <label class="label"> @ </label>-->
|
||||
<!-- <p class="control">-->
|
||||
<!-- <input class="input" type="text" placeholder="auto" maxlength="3" size="5" name="AnyRenderer-arg_frame_rate">-->
|
||||
<!-- </p>-->
|
||||
<!-- <label class="label"> fps </label>-->
|
||||
<!-- </div>-->
|
||||
|
||||
<label class="label">Command Line Arguments</label>
|
||||
<div class="field has-addons">
|
||||
<p class="control">
|
||||
<span class="select is-small">
|
||||
<select id="preset_list" onchange="addPresetTextToInput(this, document.getElementById('raw_args'))">
|
||||
<option value="preset-placeholder">presets</option>
|
||||
</select>
|
||||
</span>
|
||||
</p>
|
||||
<p class="control is-expanded">
|
||||
<input class="input is-small" type="text" placeholder="Args" id="raw_args" name="raw_args">
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- End Add Form -->
|
||||
</section>
|
||||
<footer class="modal-card-foot">
|
||||
<input class="button is-link" type="submit"/>
|
||||
<button class="button" type="button">Cancel</button>
|
||||
</footer>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
62
src/web/templates/upload.html
Normal file
@@ -0,0 +1,62 @@
|
||||
<html>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
|
||||
<script>
|
||||
$(function() {
|
||||
$('#renderer').change(function() {
|
||||
$('.render_settings').hide();
|
||||
$('#' + $(this).val()).show();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
</script>
|
||||
<body>
|
||||
<h3>Upload a file</h3>
|
||||
|
||||
<div>
|
||||
<form action="/add_job" method="POST"
|
||||
enctype="multipart/form-data">
|
||||
<div>
|
||||
<input type="file" name="file"/><br>
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="origin" name="origin" value="html">
|
||||
|
||||
<div id="client">
|
||||
Render Client:
|
||||
<select name="client">
|
||||
{% for client in render_clients %}
|
||||
<option value="{{client}}">{{client}}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="priority">
|
||||
Priority:
|
||||
<select name="priority">
|
||||
<option value="1">1</option>
|
||||
<option value="2" selected>2</option>
|
||||
<option value="3">3</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label for="renderer">Renderer:</label>
|
||||
<select id="renderer" name="renderer">
|
||||
{% for renderer in supported_renderers %}
|
||||
<option value="{{renderer}}">{{renderer}}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="blender" class="render_settings" style="display:none">
|
||||
Engine:
|
||||
<select name="blender+engine">
|
||||
<option value="CYCLES">Cycles</option>
|
||||
<option value="BLENDER_EEVEE">Eevee</option>
|
||||
</select>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
<input type="submit"/>
|
||||
</form>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
27
src/worker_factory.py
Normal file
@@ -0,0 +1,27 @@
|
||||
class RenderWorkerFactory:
|
||||
|
||||
@staticmethod
|
||||
def supported_classes():
|
||||
# to add support for any additional RenderWorker classes, import their classes and add to list here
|
||||
from src.workers.blender_worker import BlenderRenderWorker
|
||||
from src.workers.aerender_worker import AERenderWorker
|
||||
from src.workers.ffmpeg_worker import FFMPEGRenderWorker
|
||||
classes = [BlenderRenderWorker, AERenderWorker, FFMPEGRenderWorker]
|
||||
return classes
|
||||
|
||||
@staticmethod
|
||||
def create_worker(renderer, input_path, output_path, args=None, parent=None, name=None):
|
||||
worker_class = RenderWorkerFactory.class_for_name(renderer)
|
||||
return worker_class(input_path=input_path, output_path=output_path, args=args, parent=parent, name=name)
|
||||
|
||||
@staticmethod
|
||||
def supported_renderers():
|
||||
return [x.engine.name() for x in RenderWorkerFactory.supported_classes()]
|
||||
|
||||
@staticmethod
|
||||
def class_for_name(name):
|
||||
name = name.lower()
|
||||
for render_class in RenderWorkerFactory.supported_classes():
|
||||
if render_class.engine.name() == name:
|
||||
return render_class
|
||||
raise LookupError(f'Cannot find class for name: {name}')
|
||||
0
src/workers/__init__.py
Normal file
135
src/workers/aerender_worker.py
Normal file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from src.workers.base_worker import BaseRenderWorker, timecode_to_frames
|
||||
from src.workers.engines.aerender_engine import AERender
|
||||
|
||||
|
||||
def aerender_path():
|
||||
paths = glob.glob('/Applications/*After Effects*/aerender')
|
||||
if len(paths) > 1:
|
||||
logging.warning('Multiple After Effects installations detected')
|
||||
elif not paths:
|
||||
logging.error('After Effects installation not found')
|
||||
else:
|
||||
return paths[0]
|
||||
|
||||
|
||||
class AERenderWorker(BaseRenderWorker):
|
||||
|
||||
supported_extensions = ['.aep']
|
||||
engine = AERender
|
||||
|
||||
def __init__(self, input_path, output_path, args=None, parent=None, name=None):
|
||||
super(AERenderWorker, self).__init__(input_path=input_path, output_path=output_path, args=args,
|
||||
parent=parent, name=name)
|
||||
|
||||
self.comp = args.get('comp', None)
|
||||
self.render_settings = args.get('render_settings', None)
|
||||
self.omsettings = args.get('omsettings', None)
|
||||
|
||||
self.progress = 0
|
||||
self.progress_history = []
|
||||
self.attributes = {}
|
||||
|
||||
def generate_worker_subprocess(self):
|
||||
|
||||
if os.path.exists('nexrender-cli-macos'):
|
||||
logging.info('nexrender found')
|
||||
# {
|
||||
# "template": {
|
||||
# "src": String,
|
||||
# "composition": String,
|
||||
#
|
||||
# "frameStart": Number,
|
||||
# "frameEnd": Number,
|
||||
# "frameIncrement": Number,
|
||||
#
|
||||
# "continueOnMissing": Boolean,
|
||||
# "settingsTemplate": String,
|
||||
# "outputModule": String,
|
||||
# "outputExt": String,
|
||||
# },
|
||||
# "assets": [],
|
||||
# "actions": {
|
||||
# "prerender": [],
|
||||
# "postrender": [],
|
||||
# },
|
||||
# "onChange": Function,
|
||||
# "onRenderProgress": Function
|
||||
# }
|
||||
job = {'template':
|
||||
{
|
||||
'src': 'file://' + self.input_path, 'composition': self.comp.replace('"', ''),
|
||||
'settingsTemplate': self.render_settings.replace('"', ''),
|
||||
'outputModule': self.omsettings.replace('"', ''), 'outputExt': 'mov'}
|
||||
}
|
||||
x = ['./nexrender-cli-macos', "'{}'".format(json.dumps(job))]
|
||||
else:
|
||||
logging.info('nexrender not found')
|
||||
x = [aerender_path(), '-project', self.input_path, '-comp', self.comp, '-RStemplate', self.render_settings,
|
||||
'-OMtemplate', self.omsettings, '-output', self.output_path]
|
||||
return x
|
||||
|
||||
def _parse_stdout(self, line):
|
||||
|
||||
# print line
|
||||
if line.startswith('PROGRESS:'):
|
||||
# print 'progress'
|
||||
trimmed = line.replace('PROGRESS:', '').strip()
|
||||
if len(trimmed):
|
||||
self.progress_history.append(line)
|
||||
if 'Seconds' in trimmed:
|
||||
self._update_progress(line)
|
||||
elif ': ' in trimmed:
|
||||
tmp = trimmed.split(': ')
|
||||
self.attributes[tmp[0].strip()] = tmp[1].strip()
|
||||
elif line.startswith('WARNING:'):
|
||||
trimmed = line.replace('WARNING:', '').strip()
|
||||
self.warnings.append(trimmed)
|
||||
logging.warning(trimmed)
|
||||
elif line.startswith('aerender ERROR') or 'ERROR:' in line:
|
||||
self.log_error(line)
|
||||
|
||||
def _update_progress(self, line):
|
||||
|
||||
if not self.total_frames:
|
||||
duration_string = self.attributes.get('Duration', None)
|
||||
frame_rate = self.attributes.get('Frame Rate', '0').split(' ')[0]
|
||||
self.total_frames = timecode_to_frames(duration_string.split('Duration:')[-1], float(frame_rate))
|
||||
|
||||
match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line).groupdict()
|
||||
self.last_frame = match['frame']
|
||||
|
||||
def average_frame_duration(self):
|
||||
|
||||
total_durations = 0
|
||||
|
||||
for line in self.progress_history:
|
||||
match = re.match(r'PROGRESS:.*\((?P<frame>\d+)\): (?P<time>\d+)', line)
|
||||
if match:
|
||||
total_durations += int(match.group(2))
|
||||
|
||||
average = float(total_durations) / self.last_frame
|
||||
return average
|
||||
|
||||
def percent_complete(self):
|
||||
if self.total_frames:
|
||||
return (float(self.last_frame) / float(self.total_frames)) * 100
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.DEBUG)
|
||||
|
||||
r = AERenderWorker('/Users/brett/Desktop/Youtube_Vids/Film_Formats/Frame_Animations.aep', '"Film Pan"',
|
||||
'"Draft Settings"', '"ProRes"', '/Users/brett/Desktop/test_render')
|
||||
r.start()
|
||||
while r.is_running():
|
||||
time.sleep(0.1)
|
||||
316
src/workers/base_worker.py
Normal file
@@ -0,0 +1,316 @@
|
||||
#!/usr/bin/env python3
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import threading
|
||||
from datetime import datetime
|
||||
|
||||
import psutil
|
||||
from pubsub import pub
|
||||
from sqlalchemy import Column, Integer, String, DateTime, JSON
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from src.utilities.misc_helper import get_time_elapsed
|
||||
from src.utilities.status_utils import RenderStatus, string_to_status
|
||||
|
||||
logger = logging.getLogger()
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class BaseRenderWorker(Base):
|
||||
__tablename__ = 'render_workers'
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
input_path = Column(String)
|
||||
output_path = Column(String)
|
||||
date_created = Column(DateTime)
|
||||
start_time = Column(DateTime, nullable=True)
|
||||
end_time = Column(DateTime, nullable=True)
|
||||
renderer = Column(String)
|
||||
renderer_version = Column(String)
|
||||
priority = Column(Integer)
|
||||
project_length = Column(Integer)
|
||||
start_frame = Column(Integer)
|
||||
end_frame = Column(Integer, nullable=True)
|
||||
parent = Column(String, nullable=True)
|
||||
children = Column(JSON)
|
||||
name = Column(String)
|
||||
file_hash = Column(String)
|
||||
_status = Column(String)
|
||||
|
||||
engine = None
|
||||
|
||||
def __init__(self, input_path, output_path, priority=2, args=None, ignore_extensions=True, parent=None,
|
||||
name=None):
|
||||
|
||||
if not ignore_extensions:
|
||||
if not any(ext in input_path for ext in self.engine.supported_extensions):
|
||||
err_meg = f'Cannot find valid project with supported file extension for {self.engine.name()} renderer'
|
||||
logger.error(err_meg)
|
||||
raise ValueError(err_meg)
|
||||
if not self.engine:
|
||||
raise NotImplementedError("Engine not defined")
|
||||
|
||||
def generate_id():
|
||||
import uuid
|
||||
return str(uuid.uuid4()).split('-')[0]
|
||||
|
||||
# Essential Info
|
||||
self.id = generate_id()
|
||||
self.input_path = input_path
|
||||
self.output_path = output_path
|
||||
self.args = args or {}
|
||||
self.date_created = datetime.now()
|
||||
self.renderer = self.engine.name()
|
||||
self.renderer_version = self.engine.version()
|
||||
self.priority = priority
|
||||
self.parent = parent
|
||||
self.children = {}
|
||||
self.name = name or os.path.basename(input_path)
|
||||
|
||||
# Frame Ranges
|
||||
self.project_length = -1
|
||||
self.current_frame = 0 # should this be a 1 ?
|
||||
self.start_frame = 0 # should this be a 1 ?
|
||||
self.end_frame = None
|
||||
|
||||
# Logging
|
||||
self.start_time = None
|
||||
self.end_time = None
|
||||
|
||||
# History
|
||||
self.status = RenderStatus.CONFIGURING
|
||||
self.warnings = []
|
||||
self.errors = []
|
||||
|
||||
# Threads and processes
|
||||
self.__thread = threading.Thread(target=self.run, args=())
|
||||
self.__thread.daemon = True
|
||||
self.__process = None
|
||||
self.last_output = None
|
||||
|
||||
@property
|
||||
def total_frames(self):
|
||||
return (self.end_frame or self.project_length) - self.start_frame + 1
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, new_status):
|
||||
if self._status != new_status.value:
|
||||
old_status = self.status
|
||||
self._status = new_status.value
|
||||
pub.sendMessage('status_change', job_id=self.id, old_status=old_status, new_status=new_status)
|
||||
|
||||
@status.getter
|
||||
def status(self):
|
||||
if self._status in [RenderStatus.RUNNING.value, RenderStatus.NOT_STARTED.value]:
|
||||
if not hasattr(self, 'errors'):
|
||||
self._status = RenderStatus.CANCELLED.value
|
||||
return string_to_status(self._status)
|
||||
|
||||
def validate(self):
|
||||
if not os.path.exists(self.input_path):
|
||||
raise FileNotFoundError(f"Cannot find input path: {self.input_path}")
|
||||
self.generate_subprocess()
|
||||
|
||||
def generate_subprocess(self):
|
||||
# Convert raw args from string if available and catch conflicts
|
||||
generated_args = [str(x) for x in self.generate_worker_subprocess()]
|
||||
generated_args_flags = [x for x in generated_args if x.startswith('-')]
|
||||
if len(generated_args_flags) != len(set(generated_args_flags)):
|
||||
msg = "Cannot generate subprocess - Multiple arg conflicts detected"
|
||||
logger.error(msg)
|
||||
logger.debug(f"Generated args for subprocess: {generated_args}")
|
||||
raise ValueError(msg)
|
||||
return generated_args
|
||||
|
||||
def get_raw_args(self):
|
||||
raw_args_string = self.args.get('raw', None)
|
||||
raw_args = None
|
||||
if raw_args_string:
|
||||
import shlex
|
||||
raw_args = shlex.split(raw_args_string)
|
||||
return raw_args
|
||||
|
||||
def generate_worker_subprocess(self):
|
||||
raise NotImplementedError("generate_worker_subprocess not implemented")
|
||||
|
||||
def log_path(self):
|
||||
filename = (self.name or os.path.basename(self.input_path)) + '_' + \
|
||||
self.date_created.strftime("%Y.%m.%d_%H.%M.%S") + '.log'
|
||||
return os.path.join(os.path.dirname(self.input_path), filename)
|
||||
|
||||
def start(self):
|
||||
|
||||
if self.status not in [RenderStatus.SCHEDULED, RenderStatus.NOT_STARTED]:
|
||||
logger.error(f"Trying to start job with status: {self.status}")
|
||||
return
|
||||
|
||||
if not os.path.exists(self.input_path):
|
||||
self.status = RenderStatus.ERROR
|
||||
msg = 'Cannot find input path: {}'.format(self.input_path)
|
||||
logger.error(msg)
|
||||
self.errors.append(msg)
|
||||
return
|
||||
|
||||
if not self.engine.renderer_path():
|
||||
self.status = RenderStatus.ERROR
|
||||
msg = 'Cannot find render engine path for {}'.format(self.engine.name())
|
||||
logger.error(msg)
|
||||
self.errors.append(msg)
|
||||
return
|
||||
|
||||
self.status = RenderStatus.RUNNING
|
||||
self.start_time = datetime.now()
|
||||
logger.info(f'Starting {self.engine.name()} {self.engine.version()} Render for {self.input_path} | '
|
||||
f'Frame Count: {self.total_frames}')
|
||||
self.__thread.start()
|
||||
|
||||
def run(self):
|
||||
# Setup logging
|
||||
log_dir = os.path.dirname(self.log_path())
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
|
||||
# Start process and get updates
|
||||
subprocess_cmds = self.generate_subprocess()
|
||||
logger.debug("Renderer commands generated - {}".format(" ".join(subprocess_cmds)))
|
||||
self.__process = subprocess.Popen(subprocess_cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
universal_newlines=False)
|
||||
|
||||
with open(self.log_path(), "a") as f:
|
||||
|
||||
f.write(f"{self.start_time.isoformat()} - Starting {self.engine.name()} {self.engine.version()} "
|
||||
f"Render for {self.input_path}")
|
||||
f.write(f"Running command: {' '.join(subprocess_cmds)}\n")
|
||||
for c in io.TextIOWrapper(self.__process.stdout, encoding="utf-8"): # or another encoding
|
||||
f.write(c)
|
||||
# logger.debug(f"{self.engine.name()}Worker: {c.strip()}")
|
||||
self.last_output = c.strip()
|
||||
self._parse_stdout(c.strip())
|
||||
f.write('\n')
|
||||
|
||||
# Check return codes
|
||||
return_code = self.__process.wait()
|
||||
self.end_time = datetime.now()
|
||||
# Return early if job was cancelled
|
||||
if self.status in [RenderStatus.CANCELLED, RenderStatus.ERROR]:
|
||||
return
|
||||
|
||||
if return_code:
|
||||
message = f"{self.engine.name()} render failed with code {return_code} after {self.time_elapsed()}"
|
||||
logger.error(message)
|
||||
f.write(message)
|
||||
self.status = RenderStatus.ERROR
|
||||
if not self.errors:
|
||||
self.errors = [message]
|
||||
return
|
||||
|
||||
message = f"{self.engine.name()} render completed successfully in {self.time_elapsed()}"
|
||||
logger.info(message)
|
||||
f.write(message)
|
||||
|
||||
if self.children:
|
||||
from src.distributed_job_manager import DistributedJobManager
|
||||
DistributedJobManager.wait_for_subjobs(local_job=self)
|
||||
|
||||
# Post Render Work
|
||||
logger.debug("Starting post-processing work")
|
||||
self.post_processing()
|
||||
self.status = RenderStatus.COMPLETED
|
||||
logger.info(f"Render {self.id}-{self.name} completed successfully after {self.time_elapsed()}")
|
||||
|
||||
def post_processing(self):
|
||||
pass
|
||||
|
||||
def is_running(self):
|
||||
if self.__thread:
|
||||
return self.__thread.is_alive()
|
||||
return False
|
||||
|
||||
def log_error(self, error_line, halt_render=False):
|
||||
logger.error(error_line)
|
||||
self.errors.append(error_line)
|
||||
if halt_render:
|
||||
self.stop(is_error=True)
|
||||
|
||||
def stop(self, is_error=False):
|
||||
if hasattr(self, '__process'):
|
||||
try:
|
||||
process = psutil.Process(self.__process.pid)
|
||||
for proc in process.children(recursive=True):
|
||||
proc.kill()
|
||||
process.kill()
|
||||
except Exception as e:
|
||||
logger.debug(f"Error stopping the process: {e}")
|
||||
if self.status in [RenderStatus.RUNNING, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED]:
|
||||
if is_error:
|
||||
err_message = self.errors[-1] if self.errors else 'Unknown error'
|
||||
logger.error(f"Halting render due to error: {err_message}")
|
||||
self.status = RenderStatus.ERROR
|
||||
else:
|
||||
self.status = RenderStatus.CANCELLED
|
||||
|
||||
def percent_complete(self):
|
||||
return 0
|
||||
|
||||
def _parse_stdout(self, line):
|
||||
raise NotImplementedError("_parse_stdout not implemented")
|
||||
|
||||
def time_elapsed(self):
|
||||
return get_time_elapsed(self.start_time, self.end_time)
|
||||
|
||||
def file_list(self):
|
||||
try:
|
||||
job_dir = os.path.dirname(self.output_path)
|
||||
file_list = [os.path.join(job_dir, file) for file in os.listdir(job_dir)]
|
||||
file_list.sort()
|
||||
return file_list
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
def json(self):
|
||||
job_dict = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'input_path': self.input_path,
|
||||
'output_path': self.output_path,
|
||||
'priority': self.priority,
|
||||
'parent': self.parent,
|
||||
'children': self.children,
|
||||
'date_created': self.date_created,
|
||||
'start_time': self.start_time,
|
||||
'end_time': self.end_time,
|
||||
'status': self.status.value,
|
||||
'file_hash': self.file_hash,
|
||||
'percent_complete': self.percent_complete(),
|
||||
'file_count': len(self.file_list()),
|
||||
'renderer': self.renderer,
|
||||
'renderer_version': self.renderer_version,
|
||||
'errors': getattr(self, 'errors', None),
|
||||
'start_frame': self.start_frame,
|
||||
'end_frame': self.end_frame,
|
||||
'total_frames': self.total_frames,
|
||||
'last_output': getattr(self, 'last_output', None),
|
||||
'log_path': self.log_path()
|
||||
}
|
||||
|
||||
# convert to json and back to auto-convert dates to iso format
|
||||
def date_serializer(o):
|
||||
if isinstance(o, datetime):
|
||||
return o.isoformat()
|
||||
|
||||
json_convert = json.dumps(job_dict, default=date_serializer)
|
||||
worker_json = json.loads(json_convert)
|
||||
return worker_json
|
||||
|
||||
|
||||
def timecode_to_frames(timecode, frame_rate):
|
||||
e = [int(x) for x in timecode.split(':')]
|
||||
seconds = (((e[0] * 60) + e[1] * 60) + e[2])
|
||||
frames = (seconds * frame_rate) + e[-1] + 1
|
||||
return frames
|
||||
146
src/workers/blender_worker.py
Normal file
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
from collections import Counter
|
||||
|
||||
from src.workers.engines.blender_engine import Blender
|
||||
from src.utilities.ffmpeg_helper import image_sequence_to_video
|
||||
from src.workers.base_worker import *
|
||||
|
||||
|
||||
class BlenderRenderWorker(BaseRenderWorker):
|
||||
|
||||
engine = Blender
|
||||
|
||||
def __init__(self, input_path, output_path, args=None, parent=None, name=None):
|
||||
super(BlenderRenderWorker, self).__init__(input_path=input_path, output_path=output_path, args=args,
|
||||
parent=parent, name=name)
|
||||
|
||||
# Args
|
||||
self.blender_engine = self.args.get('engine', 'BLENDER_EEVEE').upper()
|
||||
self.export_format = self.args.get('export_format', None) or 'JPEG'
|
||||
self.camera = self.args.get('camera', None)
|
||||
|
||||
# Stats
|
||||
self.__frame_percent_complete = 0.0
|
||||
|
||||
# Scene Info
|
||||
self.scene_info = Blender.get_scene_info(input_path)
|
||||
self.start_frame = int(self.scene_info.get('start_frame', 1))
|
||||
self.end_frame = int(self.scene_info.get('end_frame', self.start_frame))
|
||||
self.project_length = (self.end_frame - self.start_frame) + 1
|
||||
self.current_frame = -1
|
||||
|
||||
def generate_worker_subprocess(self):
|
||||
|
||||
cmd = [self.engine.renderer_path()]
|
||||
if self.args.get('background', True): # optionally run render not in background
|
||||
cmd.append('-b')
|
||||
cmd.append(self.input_path)
|
||||
|
||||
if self.camera:
|
||||
cmd.extend(['--python-expr', f"import bpy;bpy.context.scene.camera = bpy.data.objects['{self.camera}'];"])
|
||||
|
||||
path_without_ext = os.path.splitext(self.output_path)[0] + "_"
|
||||
cmd.extend(['-E', self.blender_engine, '-o', path_without_ext, '-F', self.export_format])
|
||||
|
||||
# set frame range
|
||||
cmd.extend(['-s', self.start_frame, '-e', self.end_frame, '-a'])
|
||||
|
||||
# Convert raw args from string if available
|
||||
raw_args = self.get_raw_args()
|
||||
if raw_args:
|
||||
cmd.extend(raw_args)
|
||||
|
||||
return cmd
|
||||
|
||||
def _parse_stdout(self, line):
|
||||
|
||||
pattern = re.compile(
|
||||
r'Fra:(?P<frame>\d*).*Mem:(?P<memory>\S+).*Time:(?P<time>\S+)(?:.*Remaining:)?(?P<remaining>\S*)')
|
||||
found = pattern.search(line)
|
||||
if found:
|
||||
stats = found.groupdict()
|
||||
memory_use = stats['memory']
|
||||
time_elapsed = stats['time']
|
||||
time_remaining = stats['remaining'] or 'Unknown'
|
||||
|
||||
sample_string = line.split('|')[-1].strip()
|
||||
if "sample" in sample_string.lower():
|
||||
samples = re.sub(r'[^\d/]', '', sample_string)
|
||||
self.__frame_percent_complete = int(samples.split('/')[0]) / int(samples.split('/')[-1])
|
||||
|
||||
if int(stats['frame']) > self.current_frame:
|
||||
self.current_frame = int(stats['frame'])
|
||||
logger.debug(
|
||||
'Frame:{0} | Mem:{1} | Time:{2} | Remaining:{3}'.format(self.current_frame, memory_use,
|
||||
time_elapsed, time_remaining))
|
||||
elif "file doesn't exist" in line.lower():
|
||||
self.log_error(line, halt_render=True)
|
||||
elif line.lower().startswith('error'):
|
||||
self.log_error(line)
|
||||
elif 'Saved' in line or 'Saving' in line or 'quit' in line:
|
||||
match = re.match(r'Time: (.*) \(Saving', line)
|
||||
if match:
|
||||
time_completed = match.groups()[0]
|
||||
frame_count = self.current_frame - self.end_frame + self.total_frames
|
||||
logger.info(f'Frame #{self.current_frame} - '
|
||||
f'{frame_count} of {self.total_frames} completed in {time_completed} | '
|
||||
f'Total Elapsed Time: {datetime.now() - self.start_time}')
|
||||
else:
|
||||
logger.debug(line)
|
||||
else:
|
||||
pass
|
||||
# if len(line.strip()):
|
||||
# logger.debug(line.strip())
|
||||
|
||||
def percent_complete(self):
|
||||
if self.total_frames <= 1:
|
||||
return self.__frame_percent_complete
|
||||
else:
|
||||
whole_frame_percent = (self.current_frame - self.start_frame) / self.total_frames
|
||||
adjusted_frame_percent = self.__frame_percent_complete / self.total_frames
|
||||
total_percent = whole_frame_percent + adjusted_frame_percent
|
||||
return max(total_percent, 0)
|
||||
|
||||
def post_processing(self):
|
||||
|
||||
def most_common_extension(file_paths):
|
||||
extensions = [os.path.splitext(path)[1] for path in file_paths]
|
||||
counter = Counter(extensions)
|
||||
most_common_ext, _ = counter.most_common(1)[0]
|
||||
return most_common_ext
|
||||
|
||||
output_dir_files = os.listdir(os.path.dirname(self.output_path))
|
||||
if self.total_frames > 1 and len(output_dir_files) > 1 and not self.parent:
|
||||
logger.info("Generating preview for image sequence")
|
||||
|
||||
# Calculate what the real start frame # is if we have child objects
|
||||
start_frame = self.start_frame
|
||||
if self.children:
|
||||
min_child_frame = min(int(child["start_frame"]) for child in self.children.values())
|
||||
start_frame = min(min_child_frame, self.start_frame)
|
||||
logger.debug(f"Post processing start frame #{start_frame}")
|
||||
|
||||
try:
|
||||
pattern = os.path.splitext(self.output_path)[0] + "_%04d" + most_common_extension(output_dir_files)
|
||||
image_sequence_to_video(source_glob_pattern=pattern,
|
||||
output_path=self.output_path + '.mov',
|
||||
framerate=self.scene_info['fps'],
|
||||
start_frame=start_frame)
|
||||
logger.info('Successfully generated preview video from image sequence')
|
||||
except Exception as e:
|
||||
logger.error(f'Error generating video from image sequence: {e}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
import pprint
|
||||
x = Blender.get_scene_info('/Users/brett/Desktop/TC Previz/nallie_farm.blend')
|
||||
pprint.pprint(x)
|
||||
|
||||
# logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.DEBUG)
|
||||
# r = BlenderRenderWorker('/Users/brett/Blender Files/temple_animatic.blend', '/Users/brett/testing1234')
|
||||
# # r.engine = 'CYCLES'
|
||||
# r.start()
|
||||
# while r.is_running():
|
||||
# time.sleep(1)
|
||||
0
src/workers/engines/__init__.py
Normal file
26
src/workers/engines/aerender_engine.py
Normal file
@@ -0,0 +1,26 @@
|
||||
try:
|
||||
from .base_engine import *
|
||||
except ImportError:
|
||||
from base_engine import *
|
||||
|
||||
|
||||
class AERender(BaseRenderEngine):
|
||||
|
||||
supported_extensions = ['.aep']
|
||||
|
||||
@classmethod
|
||||
def version(cls):
|
||||
version = None
|
||||
try:
|
||||
render_path = cls.renderer_path()
|
||||
if render_path:
|
||||
ver_out = subprocess.check_output([render_path, '-version'], timeout=SUBPROCESS_TIMEOUT)
|
||||
version = ver_out.decode('utf-8').split(" ")[-1].strip()
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to get {cls.name()} version: {e}')
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def get_output_formats(cls):
|
||||
# todo: create implementation
|
||||
return []
|
||||
46
src/workers/engines/base_engine.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger()
|
||||
SUBPROCESS_TIMEOUT = 5
|
||||
|
||||
|
||||
class BaseRenderEngine(object):
|
||||
|
||||
install_paths = []
|
||||
supported_extensions = []
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
@classmethod
|
||||
def renderer_path(cls):
|
||||
path = None
|
||||
try:
|
||||
path = subprocess.check_output(['which', cls.name()], timeout=SUBPROCESS_TIMEOUT).decode('utf-8').strip()
|
||||
except subprocess.CalledProcessError:
|
||||
for p in cls.install_paths:
|
||||
if os.path.exists(p):
|
||||
path = p
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return path
|
||||
|
||||
@classmethod
|
||||
def version(cls):
|
||||
raise NotImplementedError("version not implemented")
|
||||
|
||||
@classmethod
|
||||
def get_help(cls):
|
||||
path = cls.renderer_path()
|
||||
if not path:
|
||||
raise FileNotFoundError("renderer path not found")
|
||||
help_doc = subprocess.check_output([path, '-h'], stderr=subprocess.STDOUT,
|
||||
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
|
||||
return help_doc
|
||||
|
||||
@classmethod
|
||||
def get_output_formats(cls):
|
||||
raise NotImplementedError(f"get_output_formats not implemented for {cls.__name__}")
|
||||
104
src/workers/engines/blender_engine.py
Normal file
@@ -0,0 +1,104 @@
|
||||
try:
|
||||
from .base_engine import *
|
||||
except ImportError:
|
||||
from base_engine import *
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class Blender(BaseRenderEngine):
|
||||
|
||||
install_paths = ['/Applications/Blender.app/Contents/MacOS/Blender']
|
||||
supported_extensions = ['.blend']
|
||||
|
||||
@classmethod
|
||||
def version(cls):
|
||||
version = None
|
||||
try:
|
||||
render_path = cls.renderer_path()
|
||||
if render_path:
|
||||
ver_out = subprocess.check_output([render_path, '-v'], timeout=SUBPROCESS_TIMEOUT)
|
||||
version = ver_out.decode('utf-8').splitlines()[0].replace('Blender', '').strip()
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to get Blender version: {e}')
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def get_output_formats(cls):
|
||||
format_string = cls.get_help().split('Format Options')[-1].split('Animation Playback Options')[0]
|
||||
formats = re.findall(r"'([A-Z_0-9]+)'", format_string)
|
||||
return formats
|
||||
|
||||
@classmethod
|
||||
def run_python_expression(cls, project_path, python_expression, timeout=None):
|
||||
if os.path.exists(project_path):
|
||||
try:
|
||||
return subprocess.run([cls.renderer_path(), '-b', project_path, '--python-expr', python_expression],
|
||||
capture_output=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error running python expression in blender: {e}")
|
||||
pass
|
||||
else:
|
||||
raise FileNotFoundError(f'Project file not found: {project_path}')
|
||||
|
||||
@classmethod
|
||||
def run_python_script(cls, project_path, script_path, timeout=None):
|
||||
if os.path.exists(project_path) and os.path.exists(script_path):
|
||||
try:
|
||||
return subprocess.run([cls.renderer_path(), '-b', project_path, '--python', script_path],
|
||||
capture_output=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error running python expression in blender: {e}")
|
||||
pass
|
||||
elif not os.path.exists(project_path):
|
||||
raise FileNotFoundError(f'Project file not found: {project_path}')
|
||||
elif not os.path.exists(script_path):
|
||||
raise FileNotFoundError(f'Python script not found: {script_path}')
|
||||
raise Exception("Uncaught exception")
|
||||
|
||||
@classmethod
|
||||
def get_scene_info(cls, project_path, timeout=10):
|
||||
scene_info = {}
|
||||
try:
|
||||
results = cls.run_python_script(project_path, os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
'scripts', 'blender', 'get_file_info.py'), timeout=timeout)
|
||||
result_text = results.stdout.decode()
|
||||
for line in result_text.splitlines():
|
||||
if line.startswith('SCENE_DATA:'):
|
||||
raw_data = line.split('SCENE_DATA:')[-1]
|
||||
scene_info = json.loads(raw_data)
|
||||
break
|
||||
elif line.startswith('Error'):
|
||||
logger.error(f"get_scene_info error: {line.strip()}")
|
||||
except Exception as e:
|
||||
logger.error(f'Error getting file details for .blend file: {e}')
|
||||
return scene_info
|
||||
|
||||
@classmethod
|
||||
def pack_project_file(cls, project_path, timeout=30):
|
||||
# Credit to L0Lock for pack script - https://blender.stackexchange.com/a/243935
|
||||
try:
|
||||
logger.info(f"Starting to pack Blender file: {project_path}")
|
||||
results = cls.run_python_script(project_path, os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
'scripts', 'blender', 'pack_project.py'), timeout=timeout)
|
||||
|
||||
result_text = results.stdout.decode()
|
||||
dir_name = os.path.dirname(project_path)
|
||||
|
||||
# report any missing textures
|
||||
not_found = re.findall("(Unable to pack file, source path .*)\n", result_text)
|
||||
for err in not_found:
|
||||
logger.error(err)
|
||||
|
||||
p = re.compile('Saved to: (.*)\n')
|
||||
match = p.search(result_text)
|
||||
if match:
|
||||
new_path = os.path.join(dir_name, match.group(1).strip())
|
||||
logger.info(f'Blender file packed successfully to {new_path}')
|
||||
return new_path
|
||||
except Exception as e:
|
||||
logger.error(f'Error packing .blend file: {e}')
|
||||
return None
|
||||
55
src/workers/engines/ffmpeg_engine.py
Normal file
@@ -0,0 +1,55 @@
|
||||
try:
|
||||
from .base_engine import *
|
||||
except ImportError:
|
||||
from base_engine import *
|
||||
import re
|
||||
|
||||
|
||||
class FFMPEG(BaseRenderEngine):
|
||||
|
||||
@classmethod
|
||||
def version(cls):
|
||||
version = None
|
||||
try:
|
||||
ver_out = subprocess.check_output([cls.renderer_path(), '-version'],
|
||||
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
|
||||
match = re.match(".*version\s*(\S+)\s*Copyright", ver_out)
|
||||
if match:
|
||||
version = match.groups()[0]
|
||||
except Exception as e:
|
||||
logger.error("Failed to get FFMPEG version: {}".format(e))
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def get_encoders(cls):
|
||||
raw_stdout = subprocess.check_output([cls.renderer_path(), '-encoders'], stderr=subprocess.DEVNULL,
|
||||
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
|
||||
pattern = '(?P<type>[VASFXBD.]{6})\s+(?P<name>\S{2,})\s+(?P<description>.*)'
|
||||
encoders = [m.groupdict() for m in re.finditer(pattern, raw_stdout)]
|
||||
return encoders
|
||||
|
||||
@classmethod
|
||||
def get_all_formats(cls):
|
||||
raw_stdout = subprocess.check_output([cls.renderer_path(), '-formats'], stderr=subprocess.DEVNULL,
|
||||
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
|
||||
pattern = '(?P<type>[DE]{1,2})\s+(?P<name>\S{2,})\s+(?P<description>.*)'
|
||||
formats = [m.groupdict() for m in re.finditer(pattern, raw_stdout)]
|
||||
return formats
|
||||
|
||||
@classmethod
|
||||
def get_output_formats(cls):
|
||||
return [x for x in cls.get_all_formats() if 'E' in x['type'].upper()]
|
||||
|
||||
@classmethod
|
||||
def get_frame_count(cls, path_to_file):
|
||||
raw_stdout = subprocess.check_output([cls.renderer_path(), '-i', path_to_file, '-map', '0:v:0', '-c', 'copy',
|
||||
'-f', 'null', '-'], stderr=subprocess.STDOUT,
|
||||
timeout=SUBPROCESS_TIMEOUT).decode('utf-8')
|
||||
match = re.findall(r'frame=\s*(\d+)', raw_stdout)
|
||||
if match:
|
||||
frame_number = int(match[-1])
|
||||
return frame_number
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(FFMPEG.get_frame_count('/Users/brett/Desktop/Big_Fire_02.mov'))
|
||||
29
src/workers/engines/scripts/blender/get_file_info.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import json
|
||||
import bpy
|
||||
|
||||
# Get all cameras
|
||||
cameras = []
|
||||
for cam_obj in bpy.data.cameras:
|
||||
user_map = bpy.data.user_map(subset={cam_obj}, value_types={'OBJECT'})
|
||||
for data_obj in user_map[cam_obj]:
|
||||
cam = {'name': data_obj.name,
|
||||
'cam_name': cam_obj.name,
|
||||
'cam_name_full': cam_obj.name_full,
|
||||
'lens': cam_obj.lens,
|
||||
'lens_unit': cam_obj.lens_unit,
|
||||
'sensor_height': cam_obj.sensor_height,
|
||||
'sensor_width': cam_obj.sensor_width}
|
||||
cameras.append(cam)
|
||||
|
||||
scene = bpy.data.scenes[0]
|
||||
data = {'cameras': cameras,
|
||||
'engine': scene.render.engine,
|
||||
'frame_start': scene.frame_start,
|
||||
'frame_end': scene.frame_end,
|
||||
'resolution_x': scene.render.resolution_x,
|
||||
'resolution_y': scene.render.resolution_y,
|
||||
'resolution_percentage': scene.render.resolution_percentage,
|
||||
'fps': scene.render.fps}
|
||||
|
||||
data_string = json.dumps(data)
|
||||
print("SCENE_DATA:" + data_string)
|
||||
53
src/workers/engines/scripts/blender/pack_project.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import bpy
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
|
||||
def zip_files(paths, output_zip_path):
|
||||
with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
for path in paths:
|
||||
if os.path.isfile(path):
|
||||
# If the path is a file, add it to the zip
|
||||
zipf.write(path, arcname=os.path.basename(path))
|
||||
elif os.path.isdir(path):
|
||||
# If the path is a directory, add all its files and subdirectories
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
full_path = os.path.join(root, file)
|
||||
zipf.write(full_path, arcname=os.path.join(os.path.basename(path), os.path.relpath(full_path, path)))
|
||||
|
||||
|
||||
# Get File path
|
||||
project_path = str(bpy.data.filepath)
|
||||
|
||||
# Pack Files
|
||||
bpy.ops.file.pack_all()
|
||||
bpy.ops.file.make_paths_absolute()
|
||||
|
||||
# Temp dir
|
||||
tmp_dir = os.path.join(os.path.dirname(project_path), 'tmp')
|
||||
asset_dir = os.path.join(tmp_dir, 'assets')
|
||||
os.makedirs(tmp_dir, exist_ok=True)
|
||||
|
||||
# Find images we could not pack - usually videos
|
||||
for img in bpy.data.images:
|
||||
if not img.packed_file and img.filepath and img.users:
|
||||
os.makedirs(asset_dir, exist_ok=True)
|
||||
shutil.copy2(img.filepath, os.path.join(asset_dir, os.path.basename(img.filepath)))
|
||||
print(f"Copied {os.path.basename(img.filepath)} to tmp directory")
|
||||
img.filepath = '//' + os.path.join('assets', os.path.basename(img.filepath))
|
||||
|
||||
# Save Output
|
||||
bpy.ops.wm.save_as_mainfile(filepath=os.path.join(tmp_dir, os.path.basename(project_path)), compress=True, relative_remap=False)
|
||||
|
||||
# Save Zip
|
||||
zip_path = os.path.join(os.path.dirname(project_path), f"{os.path.basename(project_path).split('.')[0]}.zip")
|
||||
zip_files([os.path.join(tmp_dir, os.path.basename(project_path)), asset_dir], zip_path)
|
||||
if os.path.exists(zip_path):
|
||||
print(f'Saved to: {zip_path}')
|
||||
else:
|
||||
print("Error saving zip file!")
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
52
src/workers/ffmpeg_worker.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from src.workers.base_worker import BaseRenderWorker
|
||||
from src.workers.engines.ffmpeg_engine import FFMPEG
|
||||
|
||||
|
||||
class FFMPEGRenderWorker(BaseRenderWorker):
|
||||
|
||||
engine = FFMPEG
|
||||
|
||||
def __init__(self, input_path, output_path, args=None, parent=None, name=None):
|
||||
super(FFMPEGRenderWorker, self).__init__(input_path=input_path, output_path=output_path, args=args,
|
||||
parent=parent, name=name)
|
||||
|
||||
stream_info = subprocess.check_output([self.engine.renderer_path(), "-i", # https://stackoverflow.com/a/61604105
|
||||
input_path, "-map", "0:v:0", "-c", "copy", "-f", "null", "-y",
|
||||
"/dev/null"], stderr=subprocess.STDOUT).decode('utf-8')
|
||||
found_frames = re.findall('frame=\s*(\d+)', stream_info)
|
||||
self.project_length = found_frames[-1] if found_frames else '-1'
|
||||
self.current_frame = -1
|
||||
|
||||
def generate_worker_subprocess(self):
|
||||
|
||||
cmd = [self.engine.renderer_path(), '-y', '-stats', '-i', self.input_path]
|
||||
|
||||
# Resize frame
|
||||
if self.args.get('x_resolution', None) and self.args.get('y_resolution', None):
|
||||
cmd.extend(['-vf', f"scale={self.args['x_resolution']}:{self.args['y_resolution']}"])
|
||||
|
||||
# Convert raw args from string if available
|
||||
raw_args = self.args.get('raw', None)
|
||||
if raw_args:
|
||||
cmd.extend(raw_args.split(' '))
|
||||
|
||||
# Close with output path
|
||||
cmd.append(self.output_path)
|
||||
return cmd
|
||||
|
||||
def percent_complete(self):
|
||||
return max(float(self.current_frame) / float(self.total_frames), 0.0)
|
||||
|
||||
def _parse_stdout(self, line):
|
||||
pattern = re.compile(r'frame=\s*(?P<current_frame>\d+)\s*fps.*time=(?P<time_elapsed>\S+)')
|
||||
found = pattern.search(line)
|
||||
if found:
|
||||
stats = found.groupdict()
|
||||
self.current_frame = stats['current_frame']
|
||||
time_elapsed = stats['time_elapsed']
|
||||
elif "not found" in line:
|
||||
self.log_error(line)
|
||||