diff --git a/lib/job_server.py b/lib/job_server.py
index b6f8d27..14ef528 100755
--- a/lib/job_server.py
+++ b/lib/job_server.py
@@ -54,7 +54,8 @@ def index():
def job_detail(job_id):
found_job = RenderQueue.job_with_id(job_id)
if found_job:
- table_html = json2html.json2html.convert(json=found_job.json(), table_attributes='class="table is-narrow is-striped"')
+ table_html = json2html.json2html.convert(json=found_job.json(),
+ table_attributes='class="table is-narrow is-striped"')
media_url = None
if found_job.file_list():
media_basename = os.path.basename(found_job.file_list()[0])
@@ -230,11 +231,12 @@ def add_job_handler():
elif request.form.get('json', None):
jobs_list = json.loads(request.form['json'])
else:
- form_dict = dict(request.form)
+ # Cleanup flat form data into nested structure
+ form_dict = {k: v for k, v in dict(request.form).items() if v}
args = {}
arg_keys = [k for k in form_dict.keys() if '-arg_' in k]
for key in arg_keys:
- if form_dict['renderer'] in key:
+ if form_dict['renderer'] in key or 'AnyRenderer' in key:
cleaned_key = key.split('-arg_')[-1]
args[cleaned_key] = form_dict[key]
form_dict.pop(key)
@@ -285,7 +287,6 @@ def add_job_handler():
def add_job(job_params, remove_job_dir_on_failure=False):
-
def remove_job_dir():
if remove_job_dir_on_failure and job_dir and os.path.exists(job_dir):
logger.debug(f"Removing job dir: {job_dir}")
@@ -298,7 +299,7 @@ def add_job(job_params, remove_job_dir_on_failure=False):
output_path = job_params.get("output_path", None)
priority = int(job_params.get('priority', 2))
args = job_params.get('args', {})
- client = job_params.get('client', RenderQueue.host_name)
+ client = job_params.get('client', None) or RenderQueue.host_name
force_start = job_params.get('force_start', False)
custom_id = None
job_dir = None
@@ -448,4 +449,3 @@ def renderer_info():
def upload_file_page():
return render_template('upload.html', render_clients=RenderQueue.render_clients,
supported_renderers=RenderWorkerFactory.supported_renderers())
-
diff --git a/lib/render_job.py b/lib/render_job.py
index fc6bf70..7b21226 100644
--- a/lib/render_job.py
+++ b/lib/render_job.py
@@ -41,7 +41,6 @@ class RenderJob:
def json(self):
"""Converts RenderJob into JSON-friendly dict"""
- import numbers
job_dict = None
try:
job_dict = self.__dict__.copy()
@@ -91,6 +90,9 @@ class RenderJob:
def frame_count(self):
return self.worker.total_frames
+ def work_path(self):
+ return os.path.dirname(self.worker.output_path)
+
def file_list(self):
job_dir = os.path.dirname(self.worker.output_path)
return glob.glob(os.path.join(job_dir, '*'))
diff --git a/lib/render_queue.py b/lib/render_queue.py
index d5cf502..faf58cc 100755
--- a/lib/render_queue.py
+++ b/lib/render_queue.py
@@ -39,8 +39,6 @@ class RenderQueue:
cls.job_queue.append(render_job)
if force_start:
cls.start_job(render_job)
- else:
- cls.evaluate_queue()
else:
# todo: implement client rendering
logger.warning('remote client rendering not implemented yet')
@@ -88,35 +86,37 @@ class RenderQueue:
cls.render_clients = saved_state.get('clients', {})
for job in saved_state.get('jobs', []):
+ try:
+ render_job = RenderJob(renderer=job['renderer'], input_path=job['worker']['input_path'],
+ output_path=job['worker']['output_path'], args=job['worker']['args'],
+ priority=job['priority'], client=job['client'])
- render_job = RenderJob(renderer=job['renderer'], input_path=job['worker']['input_path'],
- output_path=job['worker']['output_path'], args=job['worker']['args'],
- priority=job['priority'], client=job['client'])
+ # Load Worker values
+ for key, val in job['worker'].items():
+ if val and key in ['start_time', 'end_time']: # convert date strings back into date objects
+ render_job.worker.__dict__[key] = datetime.fromisoformat(val)
+ else:
+ render_job.worker.__dict__[key] = val
- # Load Worker values
- for key, val in job['worker'].items():
- if val and key in ['start_time', 'end_time']: # convert date strings back into date objects
- render_job.worker.__dict__[key] = datetime.fromisoformat(val)
- else:
- render_job.worker.__dict__[key] = val
+ render_job.worker.status = RenderStatus[job['status'].upper()]
+ job.pop('worker', None)
- render_job.worker.status = RenderStatus[job['status'].upper()]
- job.pop('worker', None)
+ # Create RenderJob with re-created Renderer object
+ for key, val in job.items():
+ if key in ['date_created']: # convert date strings back to datetime objects
+ render_job.__dict__[key] = datetime.fromisoformat(val)
+ else:
+ render_job.__dict__[key] = val
+ render_job.__delattr__('status')
- # Create RenderJob with re-created Renderer object
- for key, val in job.items():
- if key in ['date_created']: # convert date strings back to datetime objects
- render_job.__dict__[key] = datetime.fromisoformat(val)
- else:
- render_job.__dict__[key] = val
- render_job.__delattr__('status')
+ # Handle older loaded jobs that were cancelled before closing
+ if render_job.render_status() == RenderStatus.RUNNING:
+ render_job.worker.status = RenderStatus.CANCELLED
- # Handle older loaded jobs that were cancelled before closing
- if render_job.render_status() == RenderStatus.RUNNING:
- render_job.worker.status = RenderStatus.CANCELLED
-
- # finally add back to render queue
- cls.job_queue.append(render_job)
+ # finally add back to render queue
+ cls.job_queue.append(render_job)
+ except Exception as e:
+ logger.error(f"Unable to load job: {job} - {e}")
cls.last_saved_counts = cls.job_counts()
diff --git a/templates/index.html b/templates/index.html
index 0239838..ecc3dc0 100644
--- a/templates/index.html
+++ b/templates/index.html
@@ -247,13 +247,22 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/utilities/compressor.py b/utilities/compressor.py
deleted file mode 100644
index f2747df..0000000
--- a/utilities/compressor.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#! /usr/bin/python
-from render_worker import *
-import glob
-import logging
-import subprocess
-
-# Documentation
-# https://help.apple.com/compressor/mac/4.0/en/compressor/usermanual/Compressor%204%20User%20Manual%20(en).pdf
-
-def compressor_path():
- return '/Applications/Compressor.app/Contents/MacOS/Compressor'
-
-
-class CompressorRenderWorker(RenderWorker):
-
- renderer = 'Compressor'
-
- # Usage: Compressor [Cluster Info] [Batch Specific Info] [Optional Info] [Other Options]
- #
- # -computergroup -- name of the Computer Group to use.
- # --Batch Specific Info:--
- # -batchname -- name to be given to the batch.
- # -priority -- priority to be given to the batch. Possible values are: low, medium or high
- # Job Info: Used when submitting individual source files. Following parameters are repeated to enter multiple job targets in a batch
- # -jobpath -- url to source file.
- # -- In case of Image Sequence, URL should be a file URL pointing to directory with image sequence.
- # -- Additional URL query style parameters may be specified to set frameRate (file:///myImageSequenceDir?frameRate=29.97) and audio file (e.g. file:///myImageSequenceDir?audio=/usr/me/myaudiofile.mov).
- # -settingpath -- path to settings file.
- # -locationpath -- path to location file.
- # -info -- xml for job info.
- # -jobaction -- xml for job action.
- # -scc -- url to scc file for source
- # -startoffset -- time offset from beginning
- # -in -- in time
- # -out -- out time
- # -annotations -- path to file to import annotations from; a plist file or a Quicktime movie
- # -chapters -- path to file to import chapters from
- # --Optional Info:--
- # -help -- Displays, on stdout, this help information.
- # -checkstream -- url to source file to analyze
- # -findletterbox -- url to source file to analyze
- #
- # --Batch Monitoring Info:--
- # Actions on Job:
- # -monitor -- monitor the job or batch specified by jobid or batchid.
- # -kill -- kill the job or batch specified by jobid or batchid.
- # -pause -- pause the job or batch specified by jobid or batchid.
- # -resume -- resume previously paused job or batch specified by jobid or batchid.
- # Optional Info:
- # -jobid -- unique id of the job usually obtained when job was submitted.
- # -batchid -- unique id of the batch usually obtained when job was submitted.
- # -query -- The value in seconds, specifies how often to query the cluster for job status.
- # -timeout -- the timeOut value, in seconds, specifies when to quit the process.
- # -once -- show job status only once and quit the process.
- #
- # --Sharing Related Options:--
- # -resetBackgroundProcessing [cancelJobs] -- Restart all processes used in background processing, and optionally cancel all queued jobs.
- #
- # -repairCompressor -- Repair Compressor config files and restart all processes used in background processing.
- #
- # -sharing -- Turn sharing of this computer on or off.
- #
- # -requiresPassword [password] -- Sharing of this computer requires specified password. Computer must not be busy processing jobs when you set the password.
- #
- # -noPassword -- Turn off the password requirement for sharing this computer.
- #
- # -instances -- Enables additional Compressor instances.
- #
- # -networkInterface -- Specify which network interface to use. If "all" is specified for , all available network interfaces are used.
- #
- # -portRange -- Defines what port range use, using start number specifying how many ports to use.
- #
- # --File Modification Options (all other parameters ignored):--
- # -relabelaudiotracks -- url to source file. - Must be a QuickTime Movie file
- # --Optional Info:--
- # -renametrackswithlayouts (Optional, rename the tracks with the new channel layouts)
- # -locationpath -- path to location file. Modified movie will be saved here. If unspecified, changes will be saved in place, overwriting the original file.
-
- def __init__(self, project, settings_path, output):
- super(CompressorRenderWorker, self).__init__(project=project, output=output)
- self.settings_path = settings_path
-
- self.batch_name = os.path.basename(project)
- self.cluster_name = 'This Computer'
-
- self.timeout = 5
-
- # /Applications/Compressor.app/Contents/MacOS/Compressor -clusterid "tcp://192.168.1.148:62995" -batchname "My First Batch" -jobpath ~/Movies/MySource.mov -settingpath ~/Library/Application\ Support/Compressor/Settings/MPEG-4.setting -destinationpath ~/Movies/MyOutput.mp4 -timeout 5
-
- def _generate_subprocess(self):
- x = [compressor_path(), '-batchname', datetime.now().isoformat(), '-jobpath', self.input, '-settingpath', self.settings_path, '-locationpath', self.output]
- print(' '.join(x))
- return x
-
- def _parse_stdout(self, line):
- print(line)
-
-
-if __name__ == '__main__':
- logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.DEBUG)
- r = CompressorRenderWorker('/Users/brett/Desktop/drone_raw.mp4', '/Applications/Compressor.app/Contents/Resources/Settings/Website Sharing/HD720WebShareName.compressorsetting', '/Users/brett/Desktop/test_drone_output.mp4')
- r.start()
- while r.is_running():
- time.sleep(1)
\ No newline at end of file
diff --git a/utilities/ffmpeg_worker.py b/utilities/ffmpeg_worker.py
index d2b0bd9..3a2b8e2 100644
--- a/utilities/ffmpeg_worker.py
+++ b/utilities/ffmpeg_worker.py
@@ -42,14 +42,17 @@ class FFMPEGRenderWorker(BaseRenderWorker):
def _generate_subprocess(self):
cmd = [self.renderer_path(), '-y', '-stats', '-i', self.input_path]
- if self.args:
- cmd.extend([x for x in self.args if x != 'raw'])
+
+ # Resize frame
+ if self.args.get('x_resolution', None) and self.args.get('y_resolution', None):
+ cmd.extend(['-vf', f"scale={self.args['x_resolution']}:{self.args['y_resolution']}"])
# Convert raw args from string if available
raw_args = self.args.get('raw', None)
- print(raw_args)
if raw_args:
cmd.extend(raw_args.split(' '))
+
+ # Close with output path
cmd.append(self.output_path)
return cmd
diff --git a/utilities/render_worker.py b/utilities/render_worker.py
index c4714e1..42fca3c 100644
--- a/utilities/render_worker.py
+++ b/utilities/render_worker.py
@@ -32,6 +32,7 @@ class BaseRenderWorker(object):
renderer = 'BaseRenderWorker'
render_engine = None
+ render_engine_version = None
supported_extensions = []
install_paths = []
supported_export_formats = []