Improve performance of job status updates through caching hashes

This commit is contained in:
Brett Williams
2023-06-03 16:59:01 -05:00
parent 7dc7f17f83
commit a9257a6bf5
4 changed files with 45 additions and 17 deletions

View File

@@ -55,6 +55,22 @@ def index():
render_clients=render_clients(), preset_list=presets)
@server.get('/api/jobs')
def jobs_json():
try:
hash_token = request.args.get('token', None)
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
job_cache_token = str(json.dumps(all_jobs).__hash__())
if hash_token and hash_token == job_cache_token:
return [], 204 # no need to update
else:
return {'jobs': all_jobs, 'token': job_cache_token}
except Exception as e:
logger.exception(f"Exception fetching all_jobs_cached: {e}")
return [], 500
@server.route('/ui/job/<job_id>/full_details')
def job_detail(job_id):
found_job = RenderQueue.job_with_id(job_id)
@@ -131,15 +147,6 @@ def get_job_file(job_id, filename):
abort(404)
@server.get('/api/jobs')
def jobs_json():
try:
return [x.json() for x in RenderQueue.all_jobs()]
except Exception as e:
logger.exception(f"Exception fetching all_jobs: {e}")
return [], 500
@server.get('/api/jobs/<status_val>')
def filtered_jobs_json(status_val):
state = string_to_status(status_val)

View File

@@ -1,3 +1,4 @@
import logging
import os
import json
import requests
@@ -10,13 +11,25 @@ status_colors = {RenderStatus.ERROR: "red", RenderStatus.CANCELLED: 'orange1', R
categories = [RenderStatus.RUNNING, RenderStatus.ERROR, RenderStatus.NOT_STARTED, RenderStatus.SCHEDULED,
RenderStatus.COMPLETED, RenderStatus.CANCELLED, RenderStatus.UNDEFINED]
logger = logging.getLogger()
class RenderServerProxy:
def __init__(self, hostname=None, server_port="8080"):
self.hostname = hostname
self._hostname = hostname
self.port = server_port
self.fetched_status_data = None
self.__jobs_cache_token = None
@property
def hostname(self):
return self._hostname
@hostname.setter
def hostname(self, value):
self._hostname = value
self.__jobs_cache_token = None
def connect(self):
status = self.request_data('status')
@@ -25,24 +38,29 @@ class RenderServerProxy:
def request_data(self, payload, timeout=5):
try:
req = self.request(payload, timeout)
if req.ok:
if req.ok and req.status_code == 200:
return req.json()
except requests.ConnectionError as e:
logger.error(f"Connection error: {e}")
except Exception as e:
pass
logger.exception(f"Uncaught exception: {e}")
return None
def request(self, payload, timeout=5):
return requests.get(f'http://{self.hostname}:{self.port}/api/{payload}', timeout=timeout)
def get_jobs(self, timeout=5):
all_jobs = self.request_data('jobs', timeout=timeout)
if all_jobs is not None:
url = f'jobs?token={self.__jobs_cache_token}' if self.__jobs_cache_token else 'jobs'
status_result = self.request_data(url, timeout=timeout)
all_jobs = None
if status_result is not None:
sorted_jobs = []
for status_category in categories:
found_jobs = [x for x in all_jobs if x['status'] == status_category.value]
found_jobs = [x for x in status_result['jobs'] if x['status'] == status_category.value]
if found_jobs:
sorted_jobs.extend(found_jobs)
all_jobs = sorted_jobs
self.__jobs_cache_token = status_result['token']
return all_jobs
def get_data(self, timeout=5):

View File

@@ -58,7 +58,7 @@ columns: [
autoWidth: true,
server: {
url: '/api/jobs',
then: results => results,
then: results => results['jobs'],
},
sort: true,
}).render(document.getElementById('table'));