mirror of
https://github.com/blw1138/Zordon.git
synced 2025-12-17 08:48:13 +00:00
@@ -1,20 +1,17 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
import requests
|
import requests
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from .scheduled_job import ScheduledJob
|
|
||||||
from .render_workers.render_worker import RenderStatus
|
from .render_workers.render_worker import RenderStatus
|
||||||
|
from .scheduled_job import ScheduledJob, Base
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
JSON_FILE = 'server_state.json'
|
|
||||||
#todo: move history to sqlite db
|
|
||||||
|
|
||||||
|
|
||||||
class JobNotFoundError(Exception):
|
class JobNotFoundError(Exception):
|
||||||
def __init__(self, job_id, *args):
|
def __init__(self, job_id, *args):
|
||||||
@@ -23,6 +20,11 @@ class JobNotFoundError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class RenderQueue:
|
class RenderQueue:
|
||||||
|
engine = create_engine('sqlite:///database.db')
|
||||||
|
Base.metadata.create_all(engine)
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
ScheduledJob.register_user_events()
|
||||||
job_queue = []
|
job_queue = []
|
||||||
render_clients = []
|
render_clients = []
|
||||||
maximum_renderer_instances = {'blender': 2, 'aerender': 1, 'ffmpeg': 4}
|
maximum_renderer_instances = {'blender': 2, 'aerender': 1, 'ffmpeg': 4}
|
||||||
@@ -40,15 +42,21 @@ class RenderQueue:
|
|||||||
def add_to_render_queue(cls, render_job, force_start=False, client=None):
|
def add_to_render_queue(cls, render_job, force_start=False, client=None):
|
||||||
|
|
||||||
if not client or render_job.client == cls.host_name:
|
if not client or render_job.client == cls.host_name:
|
||||||
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job.worker))
|
logger.debug('Adding priority {} job to render queue: {}'.format(render_job.priority, render_job.worker_object))
|
||||||
render_job.client = cls.host_name
|
render_job.client = cls.host_name
|
||||||
cls.job_queue.append(render_job)
|
cls.job_queue.append(render_job)
|
||||||
if force_start:
|
if force_start:
|
||||||
cls.start_job(render_job)
|
cls.start_job(render_job)
|
||||||
|
cls.session.add(render_job)
|
||||||
|
cls.save_state()
|
||||||
else:
|
else:
|
||||||
# todo: implement client rendering
|
# todo: implement client rendering
|
||||||
logger.warning('remote client rendering not implemented yet')
|
logger.warning('remote client rendering not implemented yet')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def all_jobs(cls):
|
||||||
|
return cls.job_queue
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def running_jobs(cls):
|
def running_jobs(cls):
|
||||||
return cls.jobs_with_status(RenderStatus.RUNNING)
|
return cls.jobs_with_status(RenderStatus.RUNNING)
|
||||||
@@ -61,89 +69,33 @@ class RenderQueue:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def jobs_with_status(cls, status, priority_sorted=False):
|
def jobs_with_status(cls, status, priority_sorted=False):
|
||||||
found_jobs = [x for x in cls.job_queue if x.render_status() == status]
|
found_jobs = [x for x in cls.all_jobs() if x.render_status() == status]
|
||||||
if priority_sorted:
|
if priority_sorted:
|
||||||
found_jobs = sorted(found_jobs, key=lambda a: a.priority, reverse=False)
|
found_jobs = sorted(found_jobs, key=lambda a: a.priority, reverse=False)
|
||||||
return found_jobs
|
return found_jobs
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def job_with_id(cls, job_id, none_ok=False):
|
def job_with_id(cls, job_id, none_ok=False):
|
||||||
found_job = next((x for x in cls.job_queue if x.id == job_id), None)
|
found_job = next((x for x in cls.all_jobs() if x.id == job_id), None)
|
||||||
if not found_job and not none_ok:
|
if not found_job and not none_ok:
|
||||||
raise JobNotFoundError(job_id)
|
raise JobNotFoundError(job_id)
|
||||||
return found_job
|
return found_job
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def clear_history(cls):
|
def clear_history(cls):
|
||||||
to_remove = [x for x in cls.job_queue if x.render_status() in [RenderStatus.CANCELLED,
|
to_remove = [x for x in cls.all_jobs() if x.render_status() in [RenderStatus.CANCELLED,
|
||||||
RenderStatus.COMPLETED, RenderStatus.ERROR]]
|
RenderStatus.COMPLETED, RenderStatus.ERROR]]
|
||||||
for job_to_remove in to_remove:
|
for job_to_remove in to_remove:
|
||||||
cls.job_queue.remove(job_to_remove)
|
cls.delete_job(job_to_remove)
|
||||||
cls.save_state()
|
cls.save_state()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_state(cls, json_path=None):
|
def load_state(cls):
|
||||||
"""Load state history from JSON file"""
|
cls.job_queue = cls.session.query(ScheduledJob).all()
|
||||||
|
|
||||||
input_path = json_path or JSON_FILE
|
|
||||||
if os.path.exists(input_path):
|
|
||||||
with open(input_path) as f:
|
|
||||||
|
|
||||||
# load saved data
|
|
||||||
saved_state = json.load(f)
|
|
||||||
cls.render_clients = saved_state.get('clients', {})
|
|
||||||
|
|
||||||
for job in saved_state.get('jobs', []):
|
|
||||||
try:
|
|
||||||
render_job = ScheduledJob(renderer=job['renderer'], input_path=job['worker']['input_path'],
|
|
||||||
output_path=job['worker']['output_path'], args=job['worker']['args'],
|
|
||||||
priority=job['priority'], client=job['client'])
|
|
||||||
|
|
||||||
# Load Worker values
|
|
||||||
for key, val in job['worker'].items():
|
|
||||||
if val and key in ['start_time', 'end_time']: # convert date strings back into date objects
|
|
||||||
render_job.worker.__dict__[key] = datetime.fromisoformat(val)
|
|
||||||
else:
|
|
||||||
render_job.worker.__dict__[key] = val
|
|
||||||
|
|
||||||
render_job.worker.status = RenderStatus[job['status'].upper()]
|
|
||||||
job.pop('worker', None)
|
|
||||||
|
|
||||||
# Create RenderJob with re-created Renderer object
|
|
||||||
for key, val in job.items():
|
|
||||||
if key in ['date_created']: # convert date strings back to datetime objects
|
|
||||||
render_job.__dict__[key] = datetime.fromisoformat(val)
|
|
||||||
else:
|
|
||||||
import types
|
|
||||||
if hasattr(render_job, key):
|
|
||||||
if getattr(render_job, key) and not isinstance(getattr(render_job, key), types.MethodType):
|
|
||||||
render_job.__dict__[key] = val
|
|
||||||
|
|
||||||
# Handle older loaded jobs that were cancelled before closing
|
|
||||||
if render_job.render_status() == RenderStatus.RUNNING:
|
|
||||||
render_job.worker.status = RenderStatus.CANCELLED
|
|
||||||
|
|
||||||
# finally add back to render queue
|
|
||||||
cls.job_queue.append(render_job)
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"Unable to load job: {job['id']} - {e}")
|
|
||||||
|
|
||||||
cls.last_saved_counts = cls.job_counts()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def save_state(cls, json_path=None):
|
def save_state(cls):
|
||||||
"""Save state history to JSON file"""
|
cls.session.commit()
|
||||||
try:
|
|
||||||
logger.debug("Saving Render History")
|
|
||||||
output = {'timestamp': datetime.now().isoformat(),
|
|
||||||
'jobs': [j.json() for j in cls.job_queue],
|
|
||||||
'clients': cls.render_clients}
|
|
||||||
output_path = json_path or JSON_FILE
|
|
||||||
with open(output_path, 'w') as f:
|
|
||||||
json.dump(output, f, indent=4)
|
|
||||||
cls.last_saved_counts = cls.job_counts()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Error saving state JSON: {}".format(e))
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def evaluate_queue(cls):
|
def evaluate_queue(cls):
|
||||||
@@ -153,10 +105,9 @@ class RenderQueue:
|
|||||||
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
not_started = cls.jobs_with_status(RenderStatus.NOT_STARTED, priority_sorted=True)
|
||||||
if not_started:
|
if not_started:
|
||||||
for job in not_started:
|
for job in not_started:
|
||||||
renderer = job.worker.engine.name()
|
|
||||||
higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < job.priority]
|
higher_priority_jobs = [x for x in cls.running_jobs() if x.priority < job.priority]
|
||||||
max_renderers = renderer in instances.keys() and instances[
|
max_renderers = job.renderer in instances.keys() and instances[
|
||||||
renderer] >= cls.maximum_renderer_instances.get(renderer, 1)
|
job.renderer] >= cls.maximum_renderer_instances.get(job.renderer, 1)
|
||||||
|
|
||||||
if not max_renderers and not higher_priority_jobs:
|
if not max_renderers and not higher_priority_jobs:
|
||||||
cls.start_job(job)
|
cls.start_job(job)
|
||||||
@@ -172,7 +123,7 @@ class RenderQueue:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def start_job(cls, job):
|
def start_job(cls, job):
|
||||||
logger.info('Starting {}render: {} - Priority {}'.format('scheduled ' if job.scheduled_start else '', job.name,
|
logger.info('Starting {}render: {} - Priority {}'.format('scheduled ' if job.scheduled_start else '', job.name,
|
||||||
job.priority))
|
job.priority))
|
||||||
job.start()
|
job.start()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -186,12 +137,14 @@ class RenderQueue:
|
|||||||
logger.info(f"Deleting job ID: {job.id}")
|
logger.info(f"Deleting job ID: {job.id}")
|
||||||
job.stop()
|
job.stop()
|
||||||
cls.job_queue.remove(job)
|
cls.job_queue.remove(job)
|
||||||
|
cls.session.delete(job)
|
||||||
|
cls.save_state()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def renderer_instances(cls):
|
def renderer_instances(cls):
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
all_instances = [x.worker.engine.name() for x in cls.running_jobs()]
|
all_instances = [x.renderer for x in cls.running_jobs()]
|
||||||
return Counter(all_instances)
|
return Counter(all_instances)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -199,25 +152,21 @@ class RenderQueue:
|
|||||||
job_counts = {}
|
job_counts = {}
|
||||||
for job_status in RenderStatus:
|
for job_status in RenderStatus:
|
||||||
job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
|
job_counts[job_status.value] = len(cls.jobs_with_status(job_status))
|
||||||
|
|
||||||
return job_counts
|
return job_counts
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def status(cls):
|
def status(cls):
|
||||||
|
return {"timestamp": datetime.now().isoformat(),
|
||||||
stats = {"timestamp": datetime.now().isoformat(),
|
"platform": platform.platform(),
|
||||||
"platform": platform.platform(),
|
"cpu_percent": psutil.cpu_percent(percpu=False),
|
||||||
"cpu_percent": psutil.cpu_percent(percpu=False),
|
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
|
||||||
"cpu_percent_per_cpu": psutil.cpu_percent(percpu=True),
|
"cpu_count": psutil.cpu_count(),
|
||||||
"cpu_count": psutil.cpu_count(),
|
"memory_total": psutil.virtual_memory().total,
|
||||||
"memory_total": psutil.virtual_memory().total,
|
"memory_available": psutil.virtual_memory().available,
|
||||||
"memory_available": psutil.virtual_memory().available,
|
"memory_percent": psutil.virtual_memory().percent,
|
||||||
"memory_percent": psutil.virtual_memory().percent,
|
"job_counts": cls.job_counts(),
|
||||||
"job_counts": cls.job_counts(),
|
"host_name": cls.host_name
|
||||||
"host_name": cls.host_name
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return stats
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def register_client(cls, hostname):
|
def register_client(cls, hostname):
|
||||||
@@ -264,4 +213,3 @@ class RenderQueue:
|
|||||||
except requests.ConnectionError as e:
|
except requests.ConnectionError as e:
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -27,12 +27,6 @@ class Blender(BaseRenderEngine):
|
|||||||
formats = re.findall(r"'([A-Z_0-9]+)'", format_string)
|
formats = re.findall(r"'([A-Z_0-9]+)'", format_string)
|
||||||
return formats
|
return formats
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def full_report(cls):
|
|
||||||
return {'version': cls.version(),
|
|
||||||
'help_text': cls.get_help(),
|
|
||||||
'formats': cls.get_formats()}
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def run_python_expression(cls, project_path, python_expression):
|
def run_python_expression(cls, project_path, python_expression):
|
||||||
if os.path.exists(project_path):
|
if os.path.exists(project_path):
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import threading
|
import threading
|
||||||
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
@@ -19,6 +20,7 @@ class RenderStatus(Enum):
|
|||||||
CANCELLED = "cancelled"
|
CANCELLED = "cancelled"
|
||||||
ERROR = "error"
|
ERROR = "error"
|
||||||
SCHEDULED = "scheduled"
|
SCHEDULED = "scheduled"
|
||||||
|
UNDEFINED = "undefined"
|
||||||
|
|
||||||
|
|
||||||
def string_to_status(string):
|
def string_to_status(string):
|
||||||
@@ -135,7 +137,7 @@ class BaseRenderWorker(object):
|
|||||||
while self.failed_attempts < self.maximum_attempts and self.status is not RenderStatus.COMPLETED:
|
while self.failed_attempts < self.maximum_attempts and self.status is not RenderStatus.COMPLETED:
|
||||||
|
|
||||||
if self.failed_attempts:
|
if self.failed_attempts:
|
||||||
logger.info('Attempt #{} failed. Starting attempt #{}'.format(self.failed_attempts, self.failed_attempts + 1))
|
logger.info(f'Attempt #{self.failed_attempts} failed. Starting attempt #{self.failed_attempts + 1}')
|
||||||
|
|
||||||
# Start process and get updates
|
# Start process and get updates
|
||||||
subprocess_cmds = self.generate_subprocess()
|
subprocess_cmds = self.generate_subprocess()
|
||||||
@@ -208,14 +210,57 @@ class BaseRenderWorker(object):
|
|||||||
def _parse_stdout(self, line):
|
def _parse_stdout(self, line):
|
||||||
raise NotImplementedError("_parse_stdout not implemented")
|
raise NotImplementedError("_parse_stdout not implemented")
|
||||||
|
|
||||||
def elapsed_time(self):
|
def time_elapsed(self):
|
||||||
elapsed = ""
|
|
||||||
if self.start_time:
|
from string import Template
|
||||||
if self.end_time:
|
|
||||||
elapsed = self.end_time - self.start_time
|
class DeltaTemplate(Template):
|
||||||
elif self.is_running():
|
delimiter = "%"
|
||||||
elapsed = datetime.now() - self.start_time
|
|
||||||
return elapsed
|
def strfdelta(tdelta, fmt='%H:%M:%S'):
|
||||||
|
d = {"D": tdelta.days}
|
||||||
|
hours, rem = divmod(tdelta.seconds, 3600)
|
||||||
|
minutes, seconds = divmod(rem, 60)
|
||||||
|
d["H"] = '{:02d}'.format(hours)
|
||||||
|
d["M"] = '{:02d}'.format(minutes)
|
||||||
|
d["S"] = '{:02d}'.format(seconds)
|
||||||
|
t = DeltaTemplate(fmt)
|
||||||
|
return t.substitute(**d)
|
||||||
|
|
||||||
|
# calculate elapsed time
|
||||||
|
elapsed_time = None
|
||||||
|
start_time = self.start_time
|
||||||
|
end_time = self.end_time
|
||||||
|
|
||||||
|
if start_time:
|
||||||
|
if end_time:
|
||||||
|
elapsed_time = end_time - start_time
|
||||||
|
elif self.status == RenderStatus.RUNNING:
|
||||||
|
elapsed_time = datetime.now() - start_time
|
||||||
|
|
||||||
|
elapsed_time_string = strfdelta(elapsed_time) if elapsed_time else "Unknown"
|
||||||
|
return elapsed_time_string
|
||||||
|
|
||||||
|
def json(self):
|
||||||
|
worker_data = self.__dict__.copy()
|
||||||
|
worker_data['percent_complete'] = self.percent_complete()
|
||||||
|
worker_data['time_elapsed'] = self.time_elapsed()
|
||||||
|
worker_data['status'] = self.status.value
|
||||||
|
keys_to_remove = ['thread', 'process'] # remove unwanted keys from dict
|
||||||
|
for key in worker_data.keys():
|
||||||
|
if key.startswith('_'):
|
||||||
|
keys_to_remove.append(key)
|
||||||
|
for key in keys_to_remove:
|
||||||
|
worker_data.pop(key, None)
|
||||||
|
|
||||||
|
# convert to json and back to auto-convert dates to iso format
|
||||||
|
def date_serializer(o):
|
||||||
|
if isinstance(o, datetime):
|
||||||
|
return o.isoformat()
|
||||||
|
|
||||||
|
json_convert = json.dumps(worker_data, default=date_serializer)
|
||||||
|
worker_json = json.loads(json_convert)
|
||||||
|
return worker_json
|
||||||
|
|
||||||
|
|
||||||
class BaseRenderEngine(object):
|
class BaseRenderEngine(object):
|
||||||
|
|||||||
@@ -3,72 +3,116 @@ import hashlib
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import threading
|
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Integer, String, DateTime, JSON, event
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
from .render_workers.render_worker import RenderStatus, RenderWorkerFactory
|
from .render_workers.render_worker import RenderStatus, RenderWorkerFactory
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
class ScheduledJob:
|
class ScheduledJob(Base):
|
||||||
|
__tablename__ = 'scheduled_jobs'
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
|
||||||
# Get file hash on bg thread
|
renderer = Column(String)
|
||||||
def __get_file_hash(self):
|
input_path = Column(String)
|
||||||
if os.path.exists(self.worker.input_path):
|
output_path = Column(String)
|
||||||
self.file_hash = hashlib.md5(open(self.worker.input_path, 'rb').read()).hexdigest()
|
priority = Column(Integer)
|
||||||
|
owner = Column(String)
|
||||||
|
client = Column(String)
|
||||||
|
notify = Column(String)
|
||||||
|
date_created = Column(DateTime)
|
||||||
|
scheduled_start = Column(DateTime)
|
||||||
|
name = Column(String)
|
||||||
|
file_hash = Column(String)
|
||||||
|
worker_data_store = Column(JSON)
|
||||||
|
|
||||||
def __init__(self, renderer, input_path, output_path, args, priority=2, owner=None, client=None, notify=None,
|
def __init__(self, renderer, input_path, output_path, args, priority=2, owner=None, client=None, notify=None,
|
||||||
custom_id=None, name=None):
|
custom_id=None, name=None):
|
||||||
self.id = custom_id or self.generate_id()
|
self.id = custom_id or self.generate_id()
|
||||||
self.owner = owner
|
|
||||||
|
self.renderer = renderer
|
||||||
|
self.input_path = input_path
|
||||||
|
self.output_path = output_path
|
||||||
self.priority = priority
|
self.priority = priority
|
||||||
|
self.owner = owner
|
||||||
self.client = client
|
self.client = client
|
||||||
self.notify = notify
|
self.notify = notify
|
||||||
self.date_created = datetime.now()
|
self.date_created = datetime.now()
|
||||||
self.scheduled_start = None
|
self.scheduled_start = None
|
||||||
self.renderer = renderer
|
|
||||||
self.name = name or os.path.basename(input_path) + '_' + self.date_created.strftime("%Y.%m.%d_%H.%M.%S")
|
self.name = name or os.path.basename(input_path) + '_' + self.date_created.strftime("%Y.%m.%d_%H.%M.%S")
|
||||||
|
|
||||||
self.worker = RenderWorkerFactory.create_worker(renderer, input_path, output_path, args)
|
self.worker_object = RenderWorkerFactory.create_worker(renderer, input_path, output_path, args)
|
||||||
self.worker.log_path = os.path.join(os.path.dirname(input_path), self.name + '.log')
|
self.worker_object.log_path = self.log_path()
|
||||||
self.worker.validate()
|
self.worker_object.validate()
|
||||||
|
|
||||||
self.file_hash = None
|
self.file_hash = None
|
||||||
threading.Thread(target=self.__get_file_hash).start() # get file hash on bg thread
|
if not self.file_hash and os.path.exists(input_path):
|
||||||
|
self.file_hash = hashlib.md5(open(input_path, 'rb').read()).hexdigest()
|
||||||
|
|
||||||
|
def worker_data(self):
|
||||||
|
if hasattr(self, 'worker_object'):
|
||||||
|
fetched = self.worker_object.json()
|
||||||
|
if fetched != self.worker_data_store:
|
||||||
|
self.worker_data_store = fetched
|
||||||
|
return self.worker_data_store
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def before_insert(mapper, connection, target):
|
||||||
|
logger.debug(f"Before insert: {target.name}")
|
||||||
|
if hasattr(target, 'worker_object'):
|
||||||
|
target.worker_data_store = target.worker_object.json()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def before_update(mapper, connection, target):
|
||||||
|
logger.debug(f"Before update: {target.name}")
|
||||||
|
if hasattr(target, 'worker_object'):
|
||||||
|
target.worker_data_store = target.worker_object.json()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_user_events(cls):
|
||||||
|
event.listen(cls, 'before_insert', cls.before_insert)
|
||||||
|
event.listen(cls, 'before_update', cls.before_update)
|
||||||
|
|
||||||
def render_status(self):
|
def render_status(self):
|
||||||
if self.scheduled_start and self.worker.status == RenderStatus.NOT_STARTED:
|
try:
|
||||||
return RenderStatus.SCHEDULED
|
worker_status = RenderStatus(self.worker_data()['status'])
|
||||||
else:
|
if self.scheduled_start and worker_status == RenderStatus.NOT_STARTED:
|
||||||
return self.worker.status
|
return RenderStatus.SCHEDULED
|
||||||
|
else:
|
||||||
def file_hash(self):
|
return worker_status
|
||||||
if os.path.exists(self.worker.input_path):
|
except Exception as e:
|
||||||
return hashlib.md5(open(self.worker.input_path, 'rb').read()).hexdigest()
|
logger.error(f"Exception fetching render status: {e}")
|
||||||
return None
|
return RenderStatus.UNDEFINED
|
||||||
|
|
||||||
def json(self):
|
def json(self):
|
||||||
"""Converts RenderJob into JSON-friendly dict"""
|
"""Converts RenderJob into JSON-friendly dict"""
|
||||||
job_dict = None
|
job_dict = None
|
||||||
try:
|
try:
|
||||||
job_dict = self.__dict__.copy()
|
job_dict = {
|
||||||
job_dict['status'] = self.render_status().value
|
'id': self.id,
|
||||||
job_dict['time_elapsed'] = self.time_elapsed() if type(self.time_elapsed) != str else self.time_elapsed
|
'name': self.name,
|
||||||
job_dict['file_hash'] = self.file_hash
|
'input_path': self.input_path,
|
||||||
job_dict['percent_complete'] = self.percent_complete()
|
'output_path': self.output_path,
|
||||||
job_dict['file_list'] = self.file_list()
|
'priority': self.priority,
|
||||||
job_dict['worker'] = self.worker.__dict__.copy()
|
'owner': self.owner,
|
||||||
job_dict['worker']['status'] = job_dict['status']
|
'client': self.client,
|
||||||
|
'notify': self.notify,
|
||||||
# remove unwanted keys from dict
|
'date_created': self.date_created,
|
||||||
keys_to_remove = ['thread', 'process']
|
'scheduled_start': self.scheduled_start,
|
||||||
for key in job_dict['worker'].keys():
|
'status': self.render_status().value,
|
||||||
if key.startswith('_'):
|
'time_elapsed': self.worker_data().get('time_elapsed', None),
|
||||||
keys_to_remove.append(key)
|
'file_hash': self.file_hash,
|
||||||
for key in keys_to_remove:
|
'percent_complete': self.worker_data().get('percent_complete', None),
|
||||||
job_dict['worker'].pop(key, None)
|
'file_list': self.file_list(),
|
||||||
|
'renderer': self.renderer,
|
||||||
|
'worker': self.worker_data(),
|
||||||
|
}
|
||||||
|
|
||||||
# convert to json and back to auto-convert dates to iso format
|
# convert to json and back to auto-convert dates to iso format
|
||||||
def date_serializer(o):
|
def date_serializer(o):
|
||||||
@@ -82,57 +126,22 @@ class ScheduledJob:
|
|||||||
return job_dict
|
return job_dict
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.worker.start()
|
if hasattr(self, 'worker_object'):
|
||||||
|
self.worker_object.start()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.worker.stop()
|
if hasattr(self, 'worker_object'):
|
||||||
|
self.worker_object.stop()
|
||||||
def time_elapsed(self):
|
|
||||||
|
|
||||||
from string import Template
|
|
||||||
|
|
||||||
class DeltaTemplate(Template):
|
|
||||||
delimiter = "%"
|
|
||||||
|
|
||||||
def strfdelta(tdelta, fmt='%H:%M:%S'):
|
|
||||||
d = {"D": tdelta.days}
|
|
||||||
hours, rem = divmod(tdelta.seconds, 3600)
|
|
||||||
minutes, seconds = divmod(rem, 60)
|
|
||||||
d["H"] = '{:02d}'.format(hours)
|
|
||||||
d["M"] = '{:02d}'.format(minutes)
|
|
||||||
d["S"] = '{:02d}'.format(seconds)
|
|
||||||
t = DeltaTemplate(fmt)
|
|
||||||
return t.substitute(**d)
|
|
||||||
|
|
||||||
# calculate elapsed time
|
|
||||||
elapsed_time = None
|
|
||||||
start_time = self.worker.start_time
|
|
||||||
end_time = self.worker.end_time
|
|
||||||
|
|
||||||
if start_time:
|
|
||||||
if end_time:
|
|
||||||
elapsed_time = end_time - start_time
|
|
||||||
elif self.render_status() == RenderStatus.RUNNING:
|
|
||||||
elapsed_time = datetime.now() - start_time
|
|
||||||
|
|
||||||
elapsed_time_string = strfdelta(elapsed_time) if elapsed_time else "Unknown"
|
|
||||||
return elapsed_time_string
|
|
||||||
|
|
||||||
def frame_count(self):
|
|
||||||
return self.worker.total_frames
|
|
||||||
|
|
||||||
def work_path(self):
|
def work_path(self):
|
||||||
return os.path.dirname(self.worker.output_path)
|
return os.path.dirname(self.output_path)
|
||||||
|
|
||||||
def file_list(self):
|
def file_list(self):
|
||||||
job_dir = os.path.dirname(self.worker.output_path)
|
job_dir = os.path.dirname(self.output_path)
|
||||||
return glob.glob(os.path.join(job_dir, '*'))
|
return glob.glob(os.path.join(job_dir, '*'))
|
||||||
|
|
||||||
def log_path(self):
|
def log_path(self):
|
||||||
return self.worker.log_path
|
return os.path.join(os.path.dirname(self.input_path), self.name + '.log')
|
||||||
|
|
||||||
def percent_complete(self):
|
|
||||||
return self.worker.percent_complete()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate_id(cls):
|
def generate_id(cls):
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ def index():
|
|||||||
with open('config/presets.yaml') as f:
|
with open('config/presets.yaml') as f:
|
||||||
presets = yaml.load(f, Loader=yaml.FullLoader)
|
presets = yaml.load(f, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
return render_template('index.html', all_jobs=sorted_jobs(RenderQueue.job_queue),
|
return render_template('index.html', all_jobs=sorted_jobs(RenderQueue.all_jobs()),
|
||||||
hostname=RenderQueue.host_name, renderer_info=renderer_info(),
|
hostname=RenderQueue.host_name, renderer_info=renderer_info(),
|
||||||
render_clients=RenderQueue.render_clients, preset_list=presets)
|
render_clients=RenderQueue.render_clients, preset_list=presets)
|
||||||
|
|
||||||
@@ -101,7 +101,7 @@ def get_job_file(job_id, filename):
|
|||||||
|
|
||||||
@server.get('/api/jobs')
|
@server.get('/api/jobs')
|
||||||
def jobs_json():
|
def jobs_json():
|
||||||
return [x.json() for x in RenderQueue.job_queue]
|
return [x.json() for x in RenderQueue.all_jobs()]
|
||||||
|
|
||||||
|
|
||||||
@server.get('/api/jobs/<status_val>')
|
@server.get('/api/jobs/<status_val>')
|
||||||
@@ -127,7 +127,7 @@ def get_job_status(job_id):
|
|||||||
@server.get('/api/job/<job_id>/logs')
|
@server.get('/api/job/<job_id>/logs')
|
||||||
def get_job_logs(job_id):
|
def get_job_logs(job_id):
|
||||||
found_job = RenderQueue.job_with_id(job_id)
|
found_job = RenderQueue.job_with_id(job_id)
|
||||||
log_path = found_job.worker.log_path
|
log_path = found_job.log_path()
|
||||||
log_data = None
|
log_data = None
|
||||||
if log_path and os.path.exists(log_path):
|
if log_path and os.path.exists(log_path):
|
||||||
with open(log_path) as file:
|
with open(log_path) as file:
|
||||||
@@ -151,9 +151,9 @@ def download_all(job_id):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
found_job = RenderQueue.job_with_id(job_id)
|
found_job = RenderQueue.job_with_id(job_id)
|
||||||
output_dir = os.path.dirname(found_job.worker.output_path)
|
output_dir = os.path.dirname(found_job.output_path)
|
||||||
if os.path.exists(output_dir):
|
if os.path.exists(output_dir):
|
||||||
zip_filename = os.path.join('/tmp', pathlib.Path(found_job.worker.input_path).stem + '.zip')
|
zip_filename = os.path.join('/tmp', pathlib.Path(found_job.input_path).stem + '.zip')
|
||||||
with ZipFile(zip_filename, 'w') as zipObj:
|
with ZipFile(zip_filename, 'w') as zipObj:
|
||||||
for f in os.listdir(output_dir):
|
for f in os.listdir(output_dir):
|
||||||
zipObj.write(filename=os.path.join(output_dir, f),
|
zipObj.write(filename=os.path.join(output_dir, f),
|
||||||
@@ -219,7 +219,7 @@ def full_status():
|
|||||||
@server.get('/api/snapshot')
|
@server.get('/api/snapshot')
|
||||||
def snapshot():
|
def snapshot():
|
||||||
server_status = RenderQueue.status()
|
server_status = RenderQueue.status()
|
||||||
server_jobs = [x.json() for x in RenderQueue.job_queue]
|
server_jobs = [x.json() for x in RenderQueue.all_jobs()]
|
||||||
server_data = {'status': server_status, 'jobs': server_jobs, 'timestamp': datetime.now().isoformat()}
|
server_data = {'status': server_status, 'jobs': server_jobs, 'timestamp': datetime.now().isoformat()}
|
||||||
return server_data
|
return server_data
|
||||||
|
|
||||||
@@ -405,7 +405,7 @@ def delete_job(job_id):
|
|||||||
os.remove(thumb_path)
|
os.remove(thumb_path)
|
||||||
|
|
||||||
# See if we own the input file (i.e. was it uploaded)
|
# See if we own the input file (i.e. was it uploaded)
|
||||||
input_dir = os.path.dirname(found_job.worker.input_path)
|
input_dir = os.path.dirname(found_job.input_path)
|
||||||
if server.config['UPLOAD_FOLDER'] in input_dir and os.path.exists(input_dir):
|
if server.config['UPLOAD_FOLDER'] in input_dir and os.path.exists(input_dir):
|
||||||
shutil.rmtree(input_dir)
|
shutil.rmtree(input_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ columns: [
|
|||||||
data: (row) => row.name,
|
data: (row) => row.name,
|
||||||
formatter: (name, row) => gridjs.html(`<a href="/ui/job/${row.cells[0].data}/full_details">${name}</a>`)
|
formatter: (name, row) => gridjs.html(`<a href="/ui/job/${row.cells[0].data}/full_details">${name}</a>`)
|
||||||
},
|
},
|
||||||
{ id: 'renderer', data: (row) => `${row.renderer}-${row.worker.renderer_version}`, name: 'Renderer' },
|
{ id: 'renderer', data: (row) => `${row.renderer}-${row.worker?.renderer_version}`, name: 'Renderer' },
|
||||||
{ id: 'priority', name: 'Priority' },
|
{ id: 'priority', name: 'Priority' },
|
||||||
{ id: 'status',
|
{ id: 'status',
|
||||||
name: 'Status',
|
name: 'Status',
|
||||||
@@ -21,9 +21,9 @@ columns: [
|
|||||||
value="${(parseFloat(cell.percent_complete) * 100.0)}" max="100">${cell.status}</progress>
|
value="${(parseFloat(cell.percent_complete) * 100.0)}" max="100">${cell.status}</progress>
|
||||||
`)},
|
`)},
|
||||||
{ id: 'time_elapsed', name: 'Time Elapsed' },
|
{ id: 'time_elapsed', name: 'Time Elapsed' },
|
||||||
{ data: (row) => row.worker.total_frames, name: 'Frame Count' },
|
{ data: (row) => row.worker?.total_frames ?? 'N/A', name: 'Frame Count' },
|
||||||
{ id: 'client', name: 'Client'},
|
{ id: 'client', name: 'Client'},
|
||||||
{ data: (row) => row.worker.last_output,
|
{ data: (row) => row.worker?.last_output ?? 'N/A',
|
||||||
name: 'Last Output',
|
name: 'Last Output',
|
||||||
formatter: (output, row) => gridjs.html(`<a href="/api/job/${row.cells[0].data}/logs">${output}</a>`)
|
formatter: (output, row) => gridjs.html(`<a href="/api/job/${row.cells[0].data}/logs">${output}</a>`)
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ def generate_thumbnail_for_job(job, thumb_video_path, thumb_image_path, max_widt
|
|||||||
elif len(job.file_list()) > 1: # if image sequence, use second to last file (last may be in use)
|
elif len(job.file_list()) > 1: # if image sequence, use second to last file (last may be in use)
|
||||||
source_path = [job.file_list()[-2]]
|
source_path = [job.file_list()[-2]]
|
||||||
else:
|
else:
|
||||||
source_path = [job.worker.input_path] # use source if nothing else
|
source_path = [job.input_path] # use source if nothing else
|
||||||
|
|
||||||
if source_path:
|
if source_path:
|
||||||
# Todo: convert image sequence to animated movie
|
# Todo: convert image sequence to animated movie
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
click~=8.1.3
|
click~=8.1.3
|
||||||
requests==2.28.1
|
requests==2.31.0
|
||||||
psutil~=5.9.0
|
psutil==5.9.5
|
||||||
PyYAML~=6.0
|
PyYAML~=6.0
|
||||||
Flask==2.2.2
|
Flask==2.3.2
|
||||||
rich==12.6.0
|
rich==13.3.5
|
||||||
ffmpeg-python
|
ffmpeg-python
|
||||||
Werkzeug~=2.2.2
|
Werkzeug==2.3.4
|
||||||
tkinterdnd2~=0.3.0
|
tkinterdnd2~=0.3.0
|
||||||
future~=0.18.2
|
future==0.18.3
|
||||||
json2html~=1.3.0
|
json2html~=1.3.0
|
||||||
|
SQLAlchemy~=2.0.15
|
||||||
Reference in New Issue
Block a user