Save worker information to database for persistence across runs and abstract away data source

This commit is contained in:
Brett Williams
2023-05-24 12:50:16 -05:00
parent e11c5e7e58
commit 2a7eddb1eb
3 changed files with 41 additions and 29 deletions

View File

@@ -6,7 +6,7 @@ import os
import uuid
from datetime import datetime
from sqlalchemy import Column, Integer, String, DateTime, JSON
from sqlalchemy import Column, Integer, String, DateTime, JSON, event
from sqlalchemy.ext.declarative import declarative_base
from .render_workers.render_worker import RenderStatus, RenderWorkerFactory
@@ -30,6 +30,7 @@ class ScheduledJob(Base):
scheduled_start = Column(DateTime)
name = Column(String)
file_hash = Column(String)
worker_data_store = Column(JSON)
def __init__(self, renderer, input_path, output_path, args, priority=2, owner=None, client=None, notify=None,
custom_id=None, name=None):
@@ -54,18 +55,36 @@ class ScheduledJob(Base):
if not self.file_hash and os.path.exists(input_path):
self.file_hash = hashlib.md5(open(input_path, 'rb').read()).hexdigest()
def worker(self):
def worker_data(self):
if hasattr(self, 'worker_object'):
return self.worker_object
else:
return {}
fetched = self.worker_object.json()
if fetched != self.worker_data_store:
self.worker_data_store = fetched
return self.worker_data_store
@staticmethod
def before_insert(mapper, connection, target):
logger.debug(f"Before insert: {target.name}")
if hasattr(target, 'worker_object'):
target.worker_data_store = target.worker_object.json()
@staticmethod
def before_update(mapper, connection, target):
logger.debug(f"Before update: {target.name}")
if hasattr(target, 'worker_object'):
target.worker_data_store = target.worker_object.json()
@classmethod
def register_user_events(cls):
event.listen(cls, 'before_insert', cls.before_insert)
event.listen(cls, 'before_update', cls.before_update)
def render_status(self):
try:
if self.scheduled_start and self.worker().status == RenderStatus.NOT_STARTED:
if self.scheduled_start and self.worker_object.status == RenderStatus.NOT_STARTED:
return RenderStatus.SCHEDULED
else:
return self.worker().status
return self.worker_object.status
except:
return RenderStatus.CANCELLED
@@ -85,12 +104,12 @@ class ScheduledJob(Base):
'date_created': self.date_created,
'scheduled_start': self.scheduled_start,
'status': self.render_status().value,
'time_elapsed': self.worker().time_elapsed() if hasattr(self.worker(), 'time_elapsed') else None,
'time_elapsed': self.worker_data().get('time_elapsed', None),
'file_hash': self.file_hash,
'percent_complete': self.percent_complete(),
'percent_complete': self.worker_data().get('percent_complete', None),
'file_list': self.file_list(),
'renderer': self.renderer,
'worker': self.worker().json() if hasattr(self.worker(), 'json') else {},
'worker': self.worker_data(),
}
# convert to json and back to auto-convert dates to iso format
@@ -105,15 +124,12 @@ class ScheduledJob(Base):
return job_dict
def start(self):
if hasattr(self, 'worker'):
self.worker().start()
if hasattr(self, 'worker_object'):
self.worker_object.start()
def stop(self):
if hasattr(self, 'worker'):
self.worker().stop()
def frame_count(self):
return self.worker().total_frames
if hasattr(self, 'worker_object'):
self.worker_object.stop()
def work_path(self):
return os.path.dirname(self.output_path)
@@ -125,12 +141,6 @@ class ScheduledJob(Base):
def log_path(self):
return os.path.join(os.path.dirname(self.input_path), self.name + '.log')
def percent_complete(self):
try:
return self.worker().percent_complete()
except:
return -1
@classmethod
def generate_id(cls):
return str(uuid.uuid4()).split('-')[0]