mirror of
https://github.com/blw1138/Zordon.git
synced 2025-12-17 08:48:13 +00:00
Combine RenderWorker creation into RenderJob creation. Misc cleanup.
This commit is contained in:
32
dashboard.py
32
dashboard.py
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import os.path
|
||||
import socket
|
||||
@@ -18,7 +18,7 @@ from rich.text import Text
|
||||
from rich.tree import Tree
|
||||
|
||||
from utilities.render_worker import RenderStatus, string_to_status
|
||||
from server import start_server
|
||||
from start_server import start_server
|
||||
|
||||
"""
|
||||
The RenderDashboard is designed to be run on a remote machine or on the local server
|
||||
@@ -32,7 +32,7 @@ status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', R
|
||||
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
|
||||
RenderStatus.COMPLETED, RenderStatus.CANCELLED]
|
||||
|
||||
renderer_colors = {'ffmpeg': '[magenta]', 'Blender': '[orange1]', 'aerender':'[purple'}
|
||||
renderer_colors = {'ffmpeg': '[magenta]', 'Blender': '[orange1]', 'aerender': '[purple]'}
|
||||
|
||||
local_hostname = socket.gethostname()
|
||||
|
||||
@@ -90,9 +90,9 @@ def create_node_tree(all_server_data) -> Tree:
|
||||
|
||||
for job in jobs_to_display:
|
||||
renderer = f"{renderer_colors[job['renderer']]}{job['renderer']}[default]"
|
||||
filename = os.path.basename(job['render']['input']).split('.')[0]
|
||||
filename = os.path.basename(job['worker']['input_path']).split('.')[0]
|
||||
if job['status'] == 'running':
|
||||
jobs_tree.add(f"[bold]{renderer} {filename} ({job['id']}) - {status_string_to_color(job['status'])}{(float(job['render']['percent_complete']) * 100):.1f}%")
|
||||
jobs_tree.add(f"[bold]{renderer} {filename} ({job['id']}) - {status_string_to_color(job['status'])}{(float(job['worker']['percent_complete']) * 100):.1f}%")
|
||||
else:
|
||||
jobs_tree.add(f"{filename} ({job['id']}) - {status_string_to_color(job['status'])}{job['status'].title()}")
|
||||
|
||||
@@ -129,18 +129,18 @@ def create_jobs_table(all_server_data) -> Table:
|
||||
job_color = '[{}]'.format(status_colors[job_status])
|
||||
job_text = f"{job_color}" + job_status.value.title()
|
||||
|
||||
if job_status == RenderStatus.ERROR and job['render']['errors']:
|
||||
job_text = job_text + "\n" + "\n".join(job['render']['errors'])
|
||||
if job_status == RenderStatus.ERROR and job['worker']['errors']:
|
||||
job_text = job_text + "\n" + "\n".join(job['worker']['errors'])
|
||||
|
||||
elapsed_time = job['render'].get('time_elapsed', 'unknown')
|
||||
elapsed_time = job['worker'].get('time_elapsed', 'unknown')
|
||||
|
||||
# Project name
|
||||
project_name = job_color + os.path.basename(job['render']['input'])
|
||||
project_name = job_color + os.path.basename(job['worker']['input_path'])
|
||||
project_name = project_name.replace(".", "[default].")
|
||||
|
||||
if job_status == RenderStatus.RUNNING:
|
||||
job_text = f"{job_color}[bold]Running - {float(job['render']['percent_complete']) * 100:.1f}%"
|
||||
delta = datetime.datetime.now() - datetime.datetime.fromisoformat(job['render']['start_time'])
|
||||
job_text = f"{job_color}[bold]Running - {float(job['worker']['percent_complete']) * 100:.1f}%"
|
||||
delta = datetime.datetime.now() - datetime.datetime.fromisoformat(job['worker']['start_time'])
|
||||
elapsed_time = "[bold]" + str(delta)
|
||||
project_name = "[bold]" + project_name
|
||||
elif job_status == RenderStatus.CANCELLED or job_status == RenderStatus.ERROR:
|
||||
@@ -156,12 +156,12 @@ def create_jobs_table(all_server_data) -> Table:
|
||||
table.add_row(
|
||||
job['id'],
|
||||
project_name,
|
||||
os.path.basename(job['render']['output']),
|
||||
renderer_colors.get(job['renderer'], '[cyan]') + job['renderer'] + '[default]-' + job['render']['renderer_version'],
|
||||
os.path.basename(job['worker']['output_path']),
|
||||
renderer_colors.get(job['renderer'], '[cyan]') + job['renderer'] + '[default]-' + job['worker']['renderer_version'],
|
||||
f"[{priority_color}]{job['priority']}",
|
||||
job_text,
|
||||
elapsed_time,
|
||||
str(max(int(job['render']['total_frames']), 1)),
|
||||
str(max(int(job['worker']['total_frames']), 1)),
|
||||
client_title
|
||||
)
|
||||
|
||||
@@ -256,8 +256,8 @@ if __name__ == '__main__':
|
||||
# Divide the "screen" in to three parts
|
||||
layout.split(
|
||||
Layout(name="header", size=3),
|
||||
Layout(ratio=1, name="main"),
|
||||
Layout(size=10, name="footer"),
|
||||
Layout(ratio=1, name="main")
|
||||
# Layout(size=10, name="footer"),
|
||||
)
|
||||
# Divide the "main" layout in to "side" and "body"
|
||||
layout["main"].split_row(
|
||||
|
||||
@@ -48,7 +48,7 @@ def get_job_status(job_id):
|
||||
def get_file_list(job_id):
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
if found_job:
|
||||
job_dir = os.path.dirname(found_job.render.output_path)
|
||||
job_dir = os.path.dirname(found_job.worker.output_path)
|
||||
return os.listdir(job_dir)
|
||||
else:
|
||||
return f'Cannot find job with ID {job_id}', 400
|
||||
@@ -67,9 +67,9 @@ def download_all(job_id):
|
||||
|
||||
found_job = RenderQueue.job_with_id(job_id)
|
||||
if found_job:
|
||||
output_dir = os.path.dirname(found_job.render.output_path)
|
||||
output_dir = os.path.dirname(found_job.worker.output_path)
|
||||
if os.path.exists(output_dir):
|
||||
zip_filename = pathlib.Path(found_job.render.input_path).stem + '.zip'
|
||||
zip_filename = pathlib.Path(found_job.worker.input_path).stem + '.zip'
|
||||
with ZipFile(zip_filename, 'w') as zipObj:
|
||||
for f in os.listdir(output_dir):
|
||||
zipObj.write(filename=os.path.join(output_dir, f),
|
||||
@@ -187,19 +187,16 @@ def add_job():
|
||||
if client == RenderQueue.host_name:
|
||||
logger.info(f"Creating job locally - {input_path}")
|
||||
try:
|
||||
render_job = RenderWorkerFactory.create_worker(renderer, input_path, output_path, args)
|
||||
render_job.log_path = os.path.join(os.path.dirname(input_path), os.path.basename(input_path) + '.log')
|
||||
render_job = RenderJob(renderer, input_path, output_path, args, priority, job_owner, client,
|
||||
notify=False, custom_id=custom_id)
|
||||
RenderQueue.add_to_render_queue(render_job, force_start=force_start)
|
||||
return render_job.json()
|
||||
except Exception as e:
|
||||
err_msg = f"Error creating job: {str(e)}"
|
||||
logger.exception(err_msg)
|
||||
remove_job_dir()
|
||||
return err_msg, 400
|
||||
|
||||
new_job = RenderJob(render_job, priority=priority, owner=job_owner, custom_id=custom_id)
|
||||
RenderQueue.add_to_render_queue(new_job, force_start=force_start)
|
||||
|
||||
return new_job.json()
|
||||
|
||||
# client renders
|
||||
elif client in RenderQueue.render_clients:
|
||||
|
||||
|
||||
@@ -1,40 +1,46 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from utilities.render_worker import RenderStatus
|
||||
from utilities.render_worker import RenderStatus, RenderWorkerFactory
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class RenderJob:
|
||||
|
||||
def __init__(self, render, priority=2, owner=None, client=None, notify=None, custom_id=None):
|
||||
def __init__(self, renderer, input_path, output_path, args, priority=2, owner=None, client=None, notify=None, custom_id=None):
|
||||
self.id = custom_id or self.generate_id()
|
||||
self.owner = owner
|
||||
self.render = render
|
||||
self.priority = priority
|
||||
self.client = client
|
||||
self.notify = notify
|
||||
self.date_created = datetime.now()
|
||||
self.scheduled_start = None
|
||||
self.renderer = render.renderer
|
||||
self.name = os.path.basename(render.input_path) + '_' + self.date_created.isoformat()
|
||||
self.renderer = renderer
|
||||
self.name = os.path.basename(input_path) + '_' + self.date_created.isoformat()
|
||||
self.archived = False
|
||||
|
||||
self.worker = RenderWorkerFactory.create_worker(renderer, input_path, output_path, args)
|
||||
self.worker.log_path = os.path.join(os.path.dirname(input_path), os.path.basename(input_path) + '.log')
|
||||
|
||||
def render_status(self):
|
||||
"""Returns status of render job"""
|
||||
try:
|
||||
if self.scheduled_start and self.render.status == RenderStatus.NOT_STARTED:
|
||||
if self.scheduled_start and self.worker.status == RenderStatus.NOT_STARTED:
|
||||
return RenderStatus.SCHEDULED
|
||||
else:
|
||||
return self.render.status
|
||||
return self.worker.status
|
||||
except Exception as e:
|
||||
logger.warning("render_status error: {}".format(e))
|
||||
return RenderStatus.ERROR
|
||||
|
||||
def file_hash(self):
|
||||
return hashlib.md5(open(self.worker.input_path, 'rb').read()).hexdigest()
|
||||
|
||||
def json(self):
|
||||
"""Converts RenderJob into JSON format"""
|
||||
import numbers
|
||||
@@ -48,20 +54,22 @@ class RenderJob:
|
||||
try:
|
||||
d = self.__dict__.copy()
|
||||
d['status'] = self.render_status().value
|
||||
d['render'] = self.render.__dict__.copy()
|
||||
d['file_hash'] = self.file_hash if isinstance(self.file_hash, str) else self.file_hash()
|
||||
d['worker'] = self.worker.__dict__.copy()
|
||||
for key in ['thread', 'process']: # remove unwanted keys from JSON
|
||||
d['render'].pop(key, None)
|
||||
d['render']['status'] = d['status']
|
||||
d['worker'].pop(key, None)
|
||||
d['worker']['status'] = d['status']
|
||||
|
||||
# jobs from current_session generate percent completed
|
||||
# jobs after loading server pull in a saved value. Have to check if callable object or not
|
||||
|
||||
percent_complete = self.render.percent_complete if isinstance(self.render.percent_complete, numbers.Number) \
|
||||
else self.render.percent_complete()
|
||||
d['render']['percent_complete'] = percent_complete
|
||||
percent_complete = self.worker.percent_complete if isinstance(self.worker.percent_complete, numbers.Number) \
|
||||
else self.worker.percent_complete()
|
||||
d['worker']['percent_complete'] = percent_complete
|
||||
|
||||
json_string = json.dumps(d, default=date_serializer)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.error("Error converting to JSON: {}".format(e))
|
||||
return json_string
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import psutil
|
||||
import requests
|
||||
|
||||
from lib.render_job import RenderJob
|
||||
from utilities.render_worker import RenderWorkerFactory, RenderStatus
|
||||
from utilities.render_worker import RenderStatus
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
@@ -34,7 +34,7 @@ class RenderQueue:
|
||||
def add_to_render_queue(cls, render_job, force_start=False, client=None):
|
||||
|
||||
if not client or render_job.client == cls.host_name:
|
||||
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job.render))
|
||||
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job.worker))
|
||||
render_job.client = cls.host_name
|
||||
cls.job_queue.append(render_job)
|
||||
if force_start:
|
||||
@@ -91,36 +91,34 @@ class RenderQueue:
|
||||
|
||||
for job in saved_state.get('jobs', []):
|
||||
|
||||
# Identify renderer type and recreate Renderer object
|
||||
job_render_object = RenderWorkerFactory.create_worker(job['renderer'],
|
||||
input_path=job['render']['input_path'],
|
||||
output_path=job['render']['output_path'])
|
||||
render_job = RenderJob(renderer=job['renderer'], input_path=job['worker']['input_path'],
|
||||
output_path=job['worker']['output_path'], args=job['worker']['args'],
|
||||
priority=job['priority'], client=job['client'])
|
||||
|
||||
# Load Renderer values
|
||||
for key, val in job['render'].items():
|
||||
# Load Worker values
|
||||
for key, val in job['worker'].items():
|
||||
if val and key in ['start_time', 'end_time']: # convert date strings back into date objects
|
||||
job_render_object.__dict__[key] = datetime.fromisoformat(val)
|
||||
render_job.worker.__dict__[key] = datetime.fromisoformat(val)
|
||||
else:
|
||||
job_render_object.__dict__[key] = val
|
||||
render_job.worker.__dict__[key] = val
|
||||
|
||||
job_render_object.status = RenderStatus[job['status'].upper()]
|
||||
job.pop('render', None)
|
||||
render_job.worker.status = RenderStatus[job['status'].upper()]
|
||||
job.pop('worker', None)
|
||||
|
||||
# Create RenderJob with re-created Renderer object
|
||||
new_job = RenderJob(job_render_object, job['priority'], job['client'])
|
||||
for key, val in job.items():
|
||||
if key in ['date_created']: # convert date strings back to datetime objects
|
||||
new_job.__dict__[key] = datetime.fromisoformat(val)
|
||||
render_job.__dict__[key] = datetime.fromisoformat(val)
|
||||
else:
|
||||
new_job.__dict__[key] = val
|
||||
new_job.__delattr__('status')
|
||||
render_job.__dict__[key] = val
|
||||
render_job.__delattr__('status')
|
||||
|
||||
# Handle older loaded jobs that were cancelled before closing
|
||||
if new_job.render_status() == RenderStatus.RUNNING:
|
||||
new_job.render.status = RenderStatus.CANCELLED
|
||||
if render_job.render_status() == RenderStatus.RUNNING:
|
||||
render_job.worker.status = RenderStatus.CANCELLED
|
||||
|
||||
# finally add back to render queue
|
||||
cls.job_queue.append(new_job)
|
||||
cls.job_queue.append(render_job)
|
||||
|
||||
cls.last_saved_counts = cls.job_counts()
|
||||
|
||||
@@ -147,7 +145,7 @@ class RenderQueue:
|
||||
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
||||
if not_started:
|
||||
for job in not_started:
|
||||
renderer = job.render.renderer
|
||||
renderer = job.worker.renderer
|
||||
higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < job.priority]
|
||||
max_renderers = renderer in instances.keys() and instances[
|
||||
renderer] >= cls.maximum_renderer_instances.get(renderer, 1)
|
||||
@@ -167,21 +165,21 @@ class RenderQueue:
|
||||
def start_job(cls, job):
|
||||
logger.info('Starting {}render: {} - Priority {}'.format('scheduled ' if job.scheduled_start else '', job.name,
|
||||
job.priority))
|
||||
job.render.start()
|
||||
job.worker.start()
|
||||
|
||||
@classmethod
|
||||
def cancel_job(cls, job):
|
||||
logger.info('Cancelling job ID: {}'.format(job.id))
|
||||
if job.render_status() in [RenderStatus.NOT_STARTED, RenderStatus.RUNNING, RenderStatus.ERROR]:
|
||||
job.render.stop()
|
||||
job.render.status = RenderStatus.CANCELLED
|
||||
job.worker.stop()
|
||||
job.worker.status = RenderStatus.CANCELLED
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def renderer_instances(cls):
|
||||
from collections import Counter
|
||||
all_instances = [x.render.renderer for x in cls.running_jobs()]
|
||||
all_instances = [x.worker.renderer for x in cls.running_jobs()]
|
||||
return Counter(all_instances)
|
||||
|
||||
@classmethod
|
||||
@@ -217,24 +215,29 @@ class RenderQueue:
|
||||
@classmethod
|
||||
def register_client(cls, hostname):
|
||||
|
||||
#todo: check to make sure not adding ourselves
|
||||
err_msg = None
|
||||
|
||||
success = False
|
||||
if hostname == cls.host_name:
|
||||
err_msg = "Cannot register same hostname as server"
|
||||
elif hostname in cls.render_clients:
|
||||
err_msg = f"Client '{hostname}' already registered"
|
||||
else:
|
||||
try:
|
||||
response = requests.get(f"http://{hostname}:8080/status", timeout=3)
|
||||
if response.ok:
|
||||
cls.render_clients.append(hostname)
|
||||
logger.info(f"Client '{hostname}' successfully registered")
|
||||
cls.save_state()
|
||||
else:
|
||||
err_msg = f'Response from server not ok: {response.text}'
|
||||
except requests.ConnectionError as e:
|
||||
err_msg = f"Cannot connect to client at hostname: {hostname}"
|
||||
|
||||
if hostname in cls.render_clients:
|
||||
logger.warning(f"Client '{hostname}' already registered")
|
||||
return success
|
||||
|
||||
try:
|
||||
response = requests.get(f"http://{hostname}:8080/status", timeout=3)
|
||||
if response.ok:
|
||||
cls.render_clients.append(hostname)
|
||||
logger.info(f"Client '{hostname}' successfully registered")
|
||||
success = True
|
||||
cls.save_state()
|
||||
except requests.ConnectionError as e:
|
||||
logger.error(f"Cannot connect to client at hostname: {hostname}")
|
||||
return success
|
||||
if err_msg:
|
||||
logger.warning(err_msg)
|
||||
return err_msg, 400
|
||||
else:
|
||||
return 'success'
|
||||
|
||||
@classmethod
|
||||
def unregister_client(cls, hostname):
|
||||
|
||||
2
start_server.py
Normal file → Executable file
2
start_server.py
Normal file → Executable file
@@ -40,7 +40,7 @@ def start_server(background_thread=False):
|
||||
thread = threading.Thread(target=eval_loop, kwargs={'delay_sec': config.get('queue_eval_seconds', 1)}, daemon=True)
|
||||
thread.start()
|
||||
|
||||
logging.info("Starting Zordon Render Server")
|
||||
logging.info(f"Starting Zordon Render Server - Hostname: '{RenderQueue.host_name}'")
|
||||
|
||||
if background_thread:
|
||||
server_thread = threading.Thread(
|
||||
|
||||
@@ -17,20 +17,26 @@ class BlenderRenderWorker(BaseRenderWorker):
|
||||
super(BlenderRenderWorker, self).__init__(input_path=input_path, output_path=output_path,
|
||||
ignore_extensions=False, args=args)
|
||||
|
||||
# Args
|
||||
self.engine = self.args.get('engine', 'BLENDER_EEVEE').upper()
|
||||
self.export_format = self.args.get('export_format', None) or 'JPEG'
|
||||
self.camera = self.args.get('camera', None)
|
||||
self.frame = 0
|
||||
|
||||
self.render_all_frames = self.args.get('render_all_frames', False)
|
||||
self.frame = 0 #todo: remove this attribute
|
||||
|
||||
# Stats
|
||||
self.current_frame = -1
|
||||
self.memory_use = None
|
||||
self.time_elapsed = None
|
||||
self.time_remaining = None
|
||||
self.frame_percent_complete = 0.0
|
||||
|
||||
# Scene Info
|
||||
self.scene_info = get_scene_info(input_path)
|
||||
self.total_frames = int(self.scene_info.get('frame_end', 0))
|
||||
self.current_frame = int(self.scene_info.get('frame_start', 0))
|
||||
self.resolution = {'x': int(self.scene_info.get('resolution_x', 0)),
|
||||
'y': int(self.scene_info.get('resolution_y', 0))}
|
||||
|
||||
@classmethod
|
||||
def version(cls):
|
||||
version = None
|
||||
@@ -111,8 +117,10 @@ class BlenderRenderWorker(BaseRenderWorker):
|
||||
if self.total_frames <= 1:
|
||||
return self.frame_percent_complete
|
||||
else:
|
||||
return (self.current_frame / self.total_frames) + \
|
||||
(self.frame_percent_complete * (self.current_frame / self.total_frames))
|
||||
whole_frame_percent = (self.current_frame - 1) / self.total_frames
|
||||
adjusted_frame_percent = self.frame_percent_complete / self.total_frames
|
||||
total_percent = whole_frame_percent + adjusted_frame_percent
|
||||
return max(total_percent, 0)
|
||||
|
||||
|
||||
def run_python_expression_in_blend(path, python_expression):
|
||||
@@ -141,7 +149,10 @@ def run_python_script_in_blend(path, python_path):
|
||||
|
||||
def pack_blender_files(path):
|
||||
# Credit to L0Lock for pack script - https://blender.stackexchange.com/a/243935
|
||||
pack_script = "import bpy\nbpy.ops.file.pack_all()\nmyPath = bpy.data.filepath\nmyPath = str(myPath)\n" \
|
||||
pack_script = "import bpy\n" \
|
||||
"bpy.ops.file.pack_all()\n" \
|
||||
"myPath = bpy.data.filepath\n" \
|
||||
"myPath = str(myPath)\n" \
|
||||
"bpy.ops.wm.save_as_mainfile(filepath=myPath[:-6]+'_packed'+myPath[-6:])"
|
||||
|
||||
try:
|
||||
@@ -171,7 +182,7 @@ def get_scene_info(path):
|
||||
scene_info = None
|
||||
try:
|
||||
results = run_python_script_in_blend(path, os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
'get_blender_info.py'))
|
||||
'scripts', 'get_blender_info.py'))
|
||||
result_text = results.stdout.decode()
|
||||
for line in result_text.splitlines():
|
||||
if line.startswith('SCENE_DATA:'):
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
import argparse
|
||||
import os
|
||||
import glob
|
||||
from urllib2 import unquote
|
||||
import time
|
||||
|
||||
library = None
|
||||
|
||||
|
||||
class FCPXLibrary:
|
||||
|
||||
def __init__(self, xml_path):
|
||||
parser = ET.parse(xml_path)
|
||||
self.root = parser.getroot()
|
||||
self.xml_version = self.root.attrib.get('version')
|
||||
self.location = self.library_location()
|
||||
|
||||
# self.projects = self.root.findall('./library/event/project')
|
||||
self.formats = self.root.findall('./resources/format')
|
||||
|
||||
self.clips = [Clip(x, self) for x in self.root.findall(".//asset-clip")]
|
||||
self.projects = [Project(x, self) for x in self.root.findall('./library/event/project')]
|
||||
|
||||
def formats(self):
|
||||
return self.root.findall('./resources/format')
|
||||
|
||||
def element_with_tag_value(self, element, tag, value):
|
||||
return self.root.findall(".//{e}[@{t}='{v}']".format(e=element, t=tag, v=value))
|
||||
|
||||
def clips_with_videorole(self, role):
|
||||
return [clip for clip in self.clips if getattr(clip, 'videoRole', None) == role]
|
||||
|
||||
def format_with_id(self, id):
|
||||
# return self.root.findall("./resources/format[id='{}']".format(id))
|
||||
return self.element_with_tag_value('format', 'id', id)
|
||||
|
||||
def library_location(self):
|
||||
# urllib2.unquote(asset_ref.get('src'))[7:]
|
||||
path = self.root.findall('./library')[0].attrib['location']
|
||||
return unquote(path)[7:]
|
||||
|
||||
|
||||
class Project:
|
||||
def __init__(self, project_element, library):
|
||||
for attrib in project_element.attrib:
|
||||
setattr(self, attrib, project_element.get(attrib))
|
||||
print(project_element.attrib)
|
||||
print(project_element.parent)
|
||||
|
||||
ref_clips = project_element.findall(".//ref-clip")
|
||||
|
||||
print('start')
|
||||
for clip in library.clips:
|
||||
print(clip.name)
|
||||
if clip.name == ref_clips[0]:
|
||||
print(clip)
|
||||
break
|
||||
print('end')
|
||||
|
||||
# for child in ref_clips:
|
||||
# print(child.tag, child.attrib)
|
||||
|
||||
|
||||
class Clip:
|
||||
def __init__(self, clip_element, library):
|
||||
|
||||
# self.library = library
|
||||
|
||||
# Get attribs from XML
|
||||
for attrib in clip_element.attrib:
|
||||
setattr(self, attrib, clip_element.get(attrib))
|
||||
self.type = 'audio' if hasattr(self, 'audioRole') else 'video'
|
||||
|
||||
|
||||
# Get clip reference
|
||||
asset_ref = next(iter(library.element_with_tag_value('asset', 'id', self.ref)))
|
||||
|
||||
for attrib in asset_ref.attrib:
|
||||
if not hasattr(self, attrib):
|
||||
setattr(self, attrib, asset_ref.get(attrib))
|
||||
|
||||
self.source = unquote(asset_ref.get('src'))[7:]
|
||||
|
||||
if self.type == 'video':
|
||||
|
||||
format_id = getattr(self, 'format', asset_ref.get('format', None))
|
||||
video_format = next(iter(library.format_with_id(format_id)))
|
||||
|
||||
if not hasattr(self, 'format'):
|
||||
print('no format!')
|
||||
|
||||
try:
|
||||
frame_duration = fcp_time_to_float(video_format.get('frameDuration'))
|
||||
self.in_frame = int(round(fcp_time_to_float(self.start) / frame_duration))
|
||||
duration = int(round(fcp_time_to_float(self.duration) / frame_duration))
|
||||
self.out_frame = self.in_frame + duration
|
||||
except Exception as e:
|
||||
print('in/out fail: ' + str(e))
|
||||
print(dir(self))
|
||||
pass
|
||||
|
||||
def optimized_source(self):
|
||||
path = None
|
||||
mov = os.path.splitext(os.path.basename(self.source))[0] + '.mov'
|
||||
found = glob.glob(os.path.join(library.location, '*', 'Transcoded Media', 'High Quality Media', mov))
|
||||
if found:
|
||||
path = found[0]
|
||||
print(path)
|
||||
return path
|
||||
|
||||
def proxy_source(self):
|
||||
path = None
|
||||
mov = os.path.splitext(os.path.basename(self.source))[0] + '.mov'
|
||||
found = glob.glob(os.path.join(library.location, '*', 'Transcoded Media', 'Proxy Media', mov))
|
||||
if found:
|
||||
path = found[0]
|
||||
print(path)
|
||||
return path
|
||||
|
||||
def __repr__(self):
|
||||
if self.type == 'video':
|
||||
return "<Clip name:'%s' type: %s role: '%s' duration:%s frames>" % (getattr(self, 'name', None), self.type,
|
||||
getattr(self, 'videoRole', None), self.out_frame - self.in_frame)
|
||||
else:
|
||||
return "<Clip name:'%s' type: %s role: '%s'>" % (getattr(self, 'name', None), self.type, getattr(self, 'audioRole', None))
|
||||
|
||||
|
||||
def fcp_time_to_float(timestr):
|
||||
try:
|
||||
rates = timestr.strip('s').split('/')
|
||||
return float(rates[0]) / float(rates[-1])
|
||||
except (ZeroDivisionError, AttributeError) as e:
|
||||
return 0.0
|
||||
|
||||
import sys
|
||||
from types import ModuleType, FunctionType
|
||||
from gc import get_referents
|
||||
|
||||
# Custom objects know their class.
|
||||
# Function objects seem to know way too much, including modules.
|
||||
# Exclude modules as well.
|
||||
BLACKLIST = type, ModuleType, FunctionType
|
||||
|
||||
|
||||
def getsize(obj):
|
||||
"""sum size of object & members."""
|
||||
if isinstance(obj, BLACKLIST):
|
||||
raise TypeError('getsize() does not take argument of type: '+ str(type(obj)))
|
||||
seen_ids = set()
|
||||
size = 0
|
||||
objects = [obj]
|
||||
while objects:
|
||||
need_referents = []
|
||||
for obj in objects:
|
||||
if not isinstance(obj, BLACKLIST) and id(obj) not in seen_ids:
|
||||
seen_ids.add(id(obj))
|
||||
size += sys.getsizeof(obj)
|
||||
need_referents.append(obj)
|
||||
objects = get_referents(*need_referents)
|
||||
return size
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
#
|
||||
# parser = argparse.ArgumentParser()
|
||||
# parser.add_argument('-i', '--input', help='Input FCPX Library XML', required=True)
|
||||
# parser.add_argument('-s', '--save-file', help='Description', required=False)
|
||||
#
|
||||
# args = parser.parse_args()
|
||||
#
|
||||
# library = FCPXLibrary(args.input)
|
||||
#
|
||||
# print getsize(library)
|
||||
# while True:
|
||||
# time.sleep(4)
|
||||
#
|
||||
# print library.library_location()
|
||||
#
|
||||
# print dir(library.clips[0])
|
||||
# print library.clips[0]
|
||||
# print library.clips[0].proxy_source()
|
||||
#
|
||||
# print(args.input)
|
||||
# print(args.save_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
library = FCPXLibrary('new.fcpxml')
|
||||
# print library.clips[0].source
|
||||
# print library.library_location()
|
||||
#
|
||||
# print dir(library.clips[0])
|
||||
# print library.clips[0]
|
||||
# print library.clips[0].proxy_source()
|
||||
# for clip in library.clips:
|
||||
# print clip
|
||||
print(dir(library.projects[0]))
|
||||
print(library.formats)
|
||||
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
import json
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from datetime import datetime, timezone
|
||||
import time
|
||||
import logging
|
||||
|
||||
|
||||
class OpenProject:
|
||||
|
||||
def __init__(self):
|
||||
# self.server_url = "http://localhost:8080"
|
||||
self.server_url = "http://17.114.221.240:8080"
|
||||
# self.api_key = "bb5897eb1daf9bdc4b400675de8e1e52bd64e1e8bce95b341a61a036431c850e"
|
||||
self.api_key = "b902d975fcf6a29558e611e665145282acffa1e7109bfb462ef25266f7f9ed6e"
|
||||
|
||||
def create_shot(self, scene, shot, project, sequence=None):
|
||||
url = self.server_url + "/api/v3/work_packages"
|
||||
|
||||
project_url = 1
|
||||
attributes = {
|
||||
"subject": "SC{}_{}".format(scene, shot),
|
||||
"customField2": scene,
|
||||
"customField1": shot,
|
||||
"_links": {
|
||||
"project": {"href": "/api/v3/projects/{}".format(project_url)},
|
||||
"type": {"href": "/api/v3/types/1"}
|
||||
}
|
||||
}
|
||||
|
||||
return self._send_command(url, attributes)
|
||||
|
||||
def add_comment(self, work_project_id, comment, notify=False):
|
||||
|
||||
url = self.server_url + "/api/v3/work_packages/{}/activities?notify={}".format(str(work_project_id), str(notify))
|
||||
attributes = {"comment": {"raw": comment}}
|
||||
|
||||
return self._send_command(url, attributes)
|
||||
|
||||
def get_work_package(self, identifier=None, attribute=None):
|
||||
url = self.server_url + "/api/v3/work_packages/"
|
||||
if identifier:
|
||||
url = url + str(identifier)
|
||||
return self._send_command(url, attribute)
|
||||
|
||||
def get_projects(self, identifier=None):
|
||||
url = self.server_url + "/api/v3/projects/"
|
||||
if identifier:
|
||||
url = url + str(identifier)
|
||||
return self._send_command(url, None)
|
||||
|
||||
def _send_command(self, url, body):
|
||||
|
||||
if body:
|
||||
response = requests.post(url, json=body,
|
||||
auth=HTTPBasicAuth('apikey', self.api_key))
|
||||
else:
|
||||
response = requests.get(url, auth=HTTPBasicAuth('apikey', self.api_key))
|
||||
|
||||
if not response.ok:
|
||||
logging.error('Response error: {}'.format(response.reason))
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
class OpenProjectWatcher:
|
||||
|
||||
def __init__(self, op_instance, interval=30):
|
||||
self.op = OpenProject()
|
||||
self.interval = interval
|
||||
self.last_check = None
|
||||
|
||||
def _check_projects(self):
|
||||
projects = self.op.get_projects()
|
||||
for project in projects['_embedded']['elements']:
|
||||
# last_update = datetime.datetime.fromisoformat(project['updatedAt'])
|
||||
last_update = datetime.strptime(project['updatedAt'], "%Y-%m-%dT%H:%M:%S%z")
|
||||
if not self.last_check or last_update > self.last_check:
|
||||
logging.info("Update found for project: {}".format(project['name']))
|
||||
# todo: do something with updated info
|
||||
|
||||
def _check_work_projects(self):
|
||||
packages = self.op.get_work_package()
|
||||
for pkg in packages['_embedded']['elements']:
|
||||
# print(pkg.keys())
|
||||
last_update = datetime.strptime(pkg['updatedAt'], "%Y-%m-%dT%H:%M:%S%z")
|
||||
if not self.last_check or last_update > self.last_check:
|
||||
logging.info("Update found for shot: {}".format(pkg['subject']))
|
||||
# todo: do something with updated info
|
||||
|
||||
def watch(self):
|
||||
while True:
|
||||
now = datetime.now(timezone.utc)
|
||||
self._check_projects()
|
||||
self._check_work_projects()
|
||||
self.last_check = now
|
||||
time.sleep(self.interval)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
op = OpenProject()
|
||||
op.add_comment(42, "After Effects Render completed successfully. Log available here.", True)
|
||||
# print(op.get_projects())
|
||||
watcher = OpenProjectWatcher(OpenProject())
|
||||
watcher.watch()
|
||||
@@ -51,7 +51,7 @@ class BaseRenderWorker(object):
|
||||
self.date_created = datetime.now()
|
||||
self.renderer_version = self.version()
|
||||
|
||||
# Ranges
|
||||
# Frame Ranges
|
||||
self.total_frames = 0
|
||||
self.current_frame = 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user