Improve performance of job status updates through caching hashes

This commit is contained in:
Brett Williams
2023-06-03 16:59:01 -05:00
parent 7dc7f17f83
commit a9257a6bf5
4 changed files with 45 additions and 17 deletions

View File

@@ -55,6 +55,22 @@ def index():
render_clients=render_clients(), preset_list=presets)
@server.get('/api/jobs')
def jobs_json():
try:
hash_token = request.args.get('token', None)
all_jobs = [x.json() for x in RenderQueue.all_jobs()]
job_cache_token = str(json.dumps(all_jobs).__hash__())
if hash_token and hash_token == job_cache_token:
return [], 204 # no need to update
else:
return {'jobs': all_jobs, 'token': job_cache_token}
except Exception as e:
logger.exception(f"Exception fetching all_jobs_cached: {e}")
return [], 500
@server.route('/ui/job/<job_id>/full_details')
def job_detail(job_id):
found_job = RenderQueue.job_with_id(job_id)
@@ -131,15 +147,6 @@ def get_job_file(job_id, filename):
abort(404)
@server.get('/api/jobs')
def jobs_json():
try:
return [x.json() for x in RenderQueue.all_jobs()]
except Exception as e:
logger.exception(f"Exception fetching all_jobs: {e}")
return [], 500
@server.get('/api/jobs/<status_val>')
def filtered_jobs_json(status_val):
state = string_to_status(status_val)