diff --git a/src/api/api_server.py b/src/api/api_server.py index 6938d71..aab98dc 100755 --- a/src/api/api_server.py +++ b/src/api/api_server.py @@ -55,15 +55,30 @@ def sorted_jobs(all_jobs, sort_by_date=True): @server.get('/api/jobs') def jobs_json(): try: - hash_token = request.args.get('token', None) all_jobs = [x.json() for x in RenderQueue.all_jobs()] job_cache_int = int(json.dumps(all_jobs).__hash__()) job_cache_token = num_to_alphanumeric(job_cache_int) - if hash_token and hash_token == job_cache_token: - return [], 204 # no need to update return {'jobs': all_jobs, 'token': job_cache_token} except Exception as e: - logger.exception(f"Exception fetching all_jobs_cached: {e}") + logger.exception(f"Exception fetching jobs_json: {e}") + return [], 500 + + +@server.get('/api/jobs_long_poll') +def long_polling_jobs(): + try: + hash_token = request.args.get('token', None) + start_time = time.time() + while True: + all_jobs = jobs_json() + if all_jobs['token'] != hash_token: + return all_jobs + # Break after 30 seconds to avoid gateway timeout + if time.time() - start_time > 30: + return [], 204 + time.sleep(1) + except Exception as e: + logger.exception(f"Exception fetching long_polling_jobs: {e}") return [], 500 diff --git a/src/api/server_proxy.py b/src/api/server_proxy.py index 0536446..24e2f4d 100644 --- a/src/api/server_proxy.py +++ b/src/api/server_proxy.py @@ -126,8 +126,9 @@ class RenderServerProxy: self.__update_job_cache(timeout, ignore_token) return self.__jobs_cache.copy() if self.__jobs_cache else None - def __update_job_cache(self, timeout=5, ignore_token=False): - url = f'jobs?token={self.__jobs_cache_token}' if self.__jobs_cache_token and not ignore_token else 'jobs' + def __update_job_cache(self, timeout=30, ignore_token=False): + url = f'jobs_long_poll?token={self.__jobs_cache_token}' if (self.__jobs_cache_token and + not ignore_token) else 'jobs' status_result = self.request_data(url, timeout=timeout) if status_result is not None: sorted_jobs = []