Another attempt #2

This commit is contained in:
Brett Williams
2025-03-12 00:40:33 -05:00
parent 3c92a0775f
commit e07e2bd997
5 changed files with 5 additions and 5 deletions

View File

339
cross_py_builder/agent_manager.py Executable file
View File

@@ -0,0 +1,339 @@
#!/usr/bin/env python3
import argparse
import concurrent.futures
import os
import socket
import tempfile
import time
import zipfile
from datetime import datetime, timedelta
import requests
from packaging.version import Version
from tabulate import tabulate
from build_agent import build_agent_version
from zeroconf_server import ZeroconfServer
DEFAULT_PORT = 9001
TIMEOUT = 10
def find_server_ips():
ZeroconfServer.configure("_crosspybuilder._tcp.local.", socket.gethostname(), DEFAULT_PORT)
hostnames = []
try:
ZeroconfServer.start(listen_only=True)
time.sleep(.3) # give it time to find network
hostnames = ZeroconfServer.found_ip_addresses()
except KeyboardInterrupt:
pass
finally:
ZeroconfServer.stop()
# get known hosts
with open("../known_hosts", "r") as file:
lines = file.readlines()
hostnames.extend(lines)
return hostnames
def get_all_servers_status():
table_data = []
server_ips = find_server_ips()
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = []
for server in server_ips:
ip = server.split(':')[0]
port = server.split(":")[-1].strip() if ":" in server else DEFAULT_PORT
futures.append(executor.submit(get_worker_status, ip, port))
for future in concurrent.futures.as_completed(futures):
try:
result = future.result() # Get the result of the thread
table_data.append(result)
except Exception as e:
print(f"Error fetching status from server: {e}") # Handle potential errors
return table_data
def get_worker_status(hostname, port=DEFAULT_PORT):
"""Fetch worker status from the given hostname."""
try:
response = requests.get(f"http://{hostname}:{port}/status", timeout=TIMEOUT)
status = response.json()
status['port'] = port
if status['hostname'] != hostname and status['ip'] != hostname:
status['ip'] = socket.gethostbyname(hostname)
return status
except requests.exceptions.RequestException as e:
return {"hostname": hostname, "port": port, "status": "offline"}
def zip_project(source_dir, output_zip):
"""Zips the given directory."""
excluded_dirs = {"venv", ".venv", "dist", "build"}
with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(source_dir):
dirs[:] = [d for d in dirs if d.lower() not in excluded_dirs]
for file in files:
file_path = os.path.join(root, file)
zipf.write(file_path, os.path.relpath(file_path, source_dir))
def send_build_request(server_ip, server_port=DEFAULT_PORT, zip_file=None, git_url=None, download_after=True, version=None):
if zip_file:
upload_url = f"http://{server_ip}:{server_port}/upload"
print(f"Submitting build request to URL: {upload_url} - Please wait. This may take a few minutes...")
with open(zip_file, 'rb') as f:
response = requests.post(upload_url, files={"file": f})
elif git_url:
checkout_url = f"http://{server_ip}:{server_port}/checkout_git"
response = requests.post(checkout_url, json={"repo_url": git_url})
else:
raise ValueError("Missing zip file or git url!")
if response.status_code == 200:
response_data = response.json()
print(response_data)
print(f"Build successful. ID: {response_data['id']} Hostname: {response_data['hostname']} OS: {response_data['os']} CPU: {response_data['cpu']} Spec files: {len(response_data['spec_files'])} - Elapsed time: {response_data['duration']}" )
if download_after:
download_url = f"http://{server_ip}:{server_port}/download/{response_data.get('id')}"
try:
base_name = os.path.splitext(os.path.basename(zip_file))[0]
version_string = ("-" + version) if version else ""
save_name = f"{base_name}{version_string}-{response_data['os'].lower()}-{response_data['cpu'].lower()}.zip"
download_zip(download_url, save_name=save_name)
except Exception as e:
print(f"Error downloading zip: {e}")
else:
print("Upload failed:", response.status_code, response.text)
def download_zip(url, save_name, save_dir="."):
"""Download a ZIP file from a URL and save it with its original filename."""
response = requests.get(url, stream=True)
response.raise_for_status() # Raise an error if request fails
save_path = os.path.join(save_dir, save_name)
with open(save_path, "wb") as file:
for chunk in response.iter_content(chunk_size=8192): # Download in chunks
file.write(chunk)
print(f"Saved binaries to file: {save_path}")
return save_path
def select_server(servers, cpu=None, os_name=None):
"""Selects a build server based on CPU architecture and OS filters."""
available = [s for s in servers if s["status"] == "ready"]
if cpu:
available = [s for s in available if cpu.lower() in s["cpu"].lower()]
if os_name:
available = [s for s in available if os_name.lower() in s["os"].lower()]
return available[0] if available else None # Return first matching server or None
def process_new_job(args, server_data):
available_servers = [s for s in server_data if s["status"] == "ready"]
if args.cpu:
available_servers = [s for s in available_servers if args.cpu.lower() in s["cpu"].lower()]
if args.os:
available_servers = [s for s in available_servers if args.os.lower() in s["os"].lower()]
if not available_servers:
print("No available servers matching the criteria.")
return
# Keep only unique servers
unique_servers = {}
for server in available_servers:
key = (server["cpu"], server["os"])
if key not in unique_servers:
unique_servers[key] = server
available_servers = list(unique_servers.values())
print(f"Found {len(available_servers)} servers available to build")
print(tabulate(available_servers, headers="keys", tablefmt="grid"))
zip_file = None
if args.build:
project_path = args.build
tmp_dir = tempfile.gettempdir()
zip_file = os.path.join(tmp_dir, f"{os.path.basename(project_path)}.zip")
zip_project(project_path, zip_file)
print(f"Zipped {project_path} to {zip_file}")
# Start builds on all matching servers
with concurrent.futures.ThreadPoolExecutor() as executor:
print("Submitting builds to:")
download = True
for server in available_servers:
print(f"\t{server['hostname']} - {server['os']} - {server['cpu']}")
futures = {executor.submit(
send_build_request,server["ip"], DEFAULT_PORT, zip_file, args.checkout, download, args.version):
server for server in available_servers}
# Collect results
for future in concurrent.futures.as_completed(futures):
server = futures[future]
try:
response = future.result() # Get the response
except Exception as e:
print(f"Build failed on {server['hostname']}: {e}")
try:
if zip_file:
os.remove(zip_file)
except Exception as e:
print(f"Error removing zip file: {e}")
def delete_cache(server_data):
available_servers = [s for s in server_data if s["status"] == "ready"]
print(f"Deleting cache in from all available servers ({len(available_servers)})")
for server in available_servers:
try:
response = requests.get(f"http://{server['ip']}:{server.get('port', DEFAULT_PORT)}/delete_cache")
response.raise_for_status()
print(f"Cache cleared on {server['hostname']}")
except Exception as e:
print(f"Error deleting cache on {server['hostname']}: {e}")
def update_worker(server):
try:
print(f"Updating {server['hostname']} from {server.get('agent_version')} => {build_agent_version}")
with open("build_agent.py", "rb") as file1, open("../requirements.txt", "rb") as file2:
update_files = {
"file1": open("build_agent.py", "rb"),
"file2": open("../requirements.txt", "rb")
}
response = requests.post(f"http://{server['ip']}:{server.get('port', DEFAULT_PORT)}/update",
files=update_files)
response.raise_for_status()
response_json = response.json()
if response_json.get('updated_files') and not response_json.get('error_files'):
try:
requests.get(f"http://{server['ip']}:{server.get('port', DEFAULT_PORT)}/restart", timeout=TIMEOUT)
except requests.exceptions.ConnectionError:
pass
return server
else:
print(f"Error updating {server['hostname']}. Errors: {response_json.get('error_files')} - Updated: {response_json.get('updated_files')}")
except Exception as e:
print(f"Unhandled error updating {server['hostname']}: {e}")
return None
def update_build_workers(server_data):
available_workers = [s for s in server_data if s["status"] == "ready"]
workers_to_update = [x for x in available_workers if Version(x.get('agent_version')) < Version(build_agent_version)]
if not workers_to_update:
print(f"All {len(available_workers)} workers up to date")
return
print(f"Updating workers on all available servers ({len(workers_to_update)}) to {build_agent_version}")
updated_servers = []
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {executor.submit(update_worker, server): server for server in workers_to_update}
for future in concurrent.futures.as_completed(futures):
server = future.result()
if server:
updated_servers.append(server)
if updated_servers:
print("Waiting for servers to restart...")
unverified_servers = {server["ip"]: server for server in updated_servers}
end_time = datetime.now() + timedelta(seconds=30)
while unverified_servers and datetime.now() < end_time:
for server_ip in list(unverified_servers.keys()): # Iterate over a copy to avoid modification issues
try:
response = requests.get(f"http://{server_ip}:{server.get('port', DEFAULT_PORT)}/status")
response.raise_for_status()
server_info = unverified_servers[server_ip] # Get full server details
agent_version = response.json().get('agent_version')
if agent_version == build_agent_version:
print(f"Agent on {server_info['hostname']} successfully upgraded to {build_agent_version}")
else:
print(f"Agent on {server_info['hostname']} failed to upgrade. Still on version {agent_version}")
unverified_servers.pop(server_ip)
except requests.exceptions.ConnectionError:
pass # Server is still restarting
if unverified_servers:
time.sleep(1) # Short delay before retrying to avoid spamming requests
if unverified_servers:
print("Some servers did not restart in time:", list(unverified_servers.keys()))
print("Update complete")
def shutdown_agent(hostname):
print(f"Shutting down hostname: {hostname}")
try:
requests.get(f"http://{hostname}:{DEFAULT_PORT}/shutdown", timeout=TIMEOUT)
except (requests.exceptions.ConnectionError, TimeoutError):
pass
def restart_agent(hostname):
print(f"Restarting agent: {hostname}")
try:
requests.get(f"http://{hostname}:{DEFAULT_PORT}/restart", timeout=TIMEOUT)
except (requests.exceptions.ConnectionError, TimeoutError):
pass
def main():
parser = argparse.ArgumentParser(description="Build agent manager for cross_py_builder")
parser.add_argument("--status", action="store_true", help="Get status of available servers")
parser.add_argument("--build", type=str, help="Path to the project to build")
parser.add_argument("--checkout", type=str, help="Url to Git repo for checkout")
parser.add_argument("-cpu", type=str, help="CPU architecture")
parser.add_argument("-os", type=str, help="Operating system")
parser.add_argument("-version", type=str, help="Version number for build")
parser.add_argument("--delete-cache", action="store_true", help="Delete cache")
parser.add_argument("--update-all", action="store_true", help="Update build agent")
parser.add_argument("--restart", type=str, help="Hostname to restart")
parser.add_argument("--restart-all", action="store_true", help="Restart all agents")
parser.add_argument("--shutdown", type=str, help="Hostname to shutdown")
parser.add_argument("--shutdown-all", action="store_true", help="Shutdown all agents")
args = parser.parse_args()
if args.status:
server_data = get_all_servers_status()
server_data = [x for x in server_data if x['status'] != 'offline']
print(tabulate(server_data, headers="keys", tablefmt="grid"))
return
elif args.restart:
restart_agent(args.restart)
elif args.restart_all:
print("Restarting all agents...")
for server_ip in find_server_ips():
restart_agent(server_ip)
elif args.shutdown:
shutdown_agent(args.shutdown)
elif args.shutdown_all:
print("Shutting down all agents...")
for server_ip in find_server_ips():
shutdown_agent(server_ip)
elif args.delete_cache:
delete_cache(get_all_servers_status())
elif args.build or args.checkout:
process_new_job(args, get_all_servers_status())
elif args.update_all:
update_build_workers(get_all_servers_status())
else:
print("No path given!")
if __name__ == "__main__":
main()

407
cross_py_builder/build_agent.py Executable file
View File

@@ -0,0 +1,407 @@
#!/usr/bin/env python3
import datetime
import signal
import socket
import sys
import tempfile
import time
from flask import Flask, request, jsonify, send_file
import os
import zipfile
import subprocess
import glob
import shutil
import uuid
import platform
from zeroconf_server import ZeroconfServer
APP_NAME = "cross_py_builder"
build_agent_version = "0.1.34"
app_port = 9001
app = Flask(__name__)
launch_time = datetime.datetime.now()
LAUNCH_DIR = os.curdir
SCRIPT_PATH = os.path.basename(__file__)
LOCAL_DIR = os.path.dirname(__file__)
BUILD_DIR = "pybuild-data"
TMP_DIR = tempfile.gettempdir()
system_status = {"status": "ready", "running_job": None}
def is_windows():
return platform.system().lower() == "windows"
@app.route('/update', methods=['POST'])
def update_files():
if not request.files:
return {"error": "No files"}, 400
print("Updating build agent...")
system_status['status'] = "updating"
requirements_path = os.path.join(LOCAL_DIR, "requirements.txt")
needs_install_requirements = False
updated_files = []
error_files = []
for key in request.files:
uploaded_file = request.files[key]
if uploaded_file.filename:
original_filename = uploaded_file.filename
temp_save_path = os.path.join(LOCAL_DIR, f"{original_filename}.tmp")
uploaded_file.save(temp_save_path)
if os.path.getsize(temp_save_path):
try:
backup_path = os.path.join(LOCAL_DIR, original_filename + ".old")
local_file_path = os.path.join(LOCAL_DIR, original_filename)
os.rename(local_file_path, backup_path)
shutil.move(temp_save_path, local_file_path)
os.remove(backup_path)
needs_install_requirements |= (requirements_path == local_file_path)
updated_files.append(original_filename)
except Exception as e:
print(f"Exception updating file ({original_filename}): {e}")
error_files.append(original_filename)
else:
print(f"Invalid size for {temp_save_path}!")
error_files.append(original_filename)
os.remove(temp_save_path)
if os.path.exists(requirements_path) and needs_install_requirements:
print(f"\nInstalling Required Packages...")
python_exec = "python" if is_windows() else "python3"
subprocess.run([python_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
check=True)
print("Update complete")
return jsonify({'updated_files': updated_files, 'error_files': error_files}), 200 if not error_files else 500
@app.get("/restart")
def restart():
system_status['status'] = "restarting"
ZeroconfServer.stop()
print("=== Restarting ===")
if os.name == "nt": # Windows
DETACHED_PROCESS = 0x00000008
CREATE_NEW_PROCESS_GROUP = 0x00000200
# Fix hanging issue using STARTUPINFO
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW # Prevents extra console window pop-up
subprocess.Popen(
[sys.executable, SCRIPT_PATH],
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
shell=True
)
else: # Linux/macOS
subprocess.Popen(
[sys.executable, SCRIPT_PATH],
stdout=sys.stdout,
stderr=sys.stderr,
start_new_session=True
)
try:
return jsonify({"message": "=== Restarting ==="}), 200
finally:
time.sleep(0.1)
os.kill(os.getpid(), signal.SIGTERM)
@app.get("/shutdown")
def shutdown():
try:
print("=== SHUTTING DOWN ===")
system_status['status'] = "shutting_down"
return jsonify({"message": "Shutting down"}), 200
finally:
time.sleep(0.1)
os.kill(os.getpid(), signal.SIGTERM)
@app.get("/")
def status_page():
version = platform.mac_ver()[0] if platform.mac_ver() else platform.version()
hostname = socket.gethostname()
return (f"{APP_NAME} - Build Agent {build_agent_version} - \n"
f"{system_os()} | {cpu_arch()} | {version} | {hostname} | {ZeroconfServer.get_local_ip()}")
@app.get("/status")
def status():
def get_directory_size(directory):
total_size = 0
for dirpath, _, filenames in os.walk(directory): # Recursively go through files
for filename in filenames:
filepath = os.path.join(dirpath, filename)
if os.path.isfile(filepath): # Ensure it's a file, not a broken symlink
total_size += os.path.getsize(filepath)
return total_size # Size in bytes
def format_size(size_in_bytes):
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
if size_in_bytes < 1024:
return f"{size_in_bytes:.2f} {unit}"
size_in_bytes /= 1024
hostname = socket.gethostname()
return jsonify({"status": system_status['status'],
"agent_version": build_agent_version,
"os": system_os(),
"cpu": cpu_arch(),
"python": platform.python_version(),
"hostname": hostname,
"ip": ZeroconfServer.get_local_ip(),
"port": app_port,
"job_id": system_status['running_job'],
"cache_size": format_size(get_directory_size(TMP_DIR)),
"uptime": str(datetime.datetime.now() - launch_time)
})
def generate_job_id():
return str(uuid.uuid4()).split('-')[-1]
@app.route("/checkout_git", methods=['POST'])
def checkout_project():
start_time = datetime.datetime.now()
repo_url = request.json.get('repo_url')
if not repo_url:
return jsonify({'error': 'Repository URL is required'}), 400
print(f"\n========== Checking Out Git Project ==========")
job_id = generate_job_id()
repo_dir = os.path.join(TMP_DIR, job_id)
try:
system_status['status'] = "cloning_repo"
subprocess.check_call(['git', 'clone', repo_url, repo_dir])
system_status['status'] = "ready"
except subprocess.CalledProcessError as e:
print(f"Error cloning repo: {e}")
system_status['status'] = "ready"
return jsonify({'error': 'Failed to clone repository'}), 500
return install_and_build(repo_dir, job_id, start_time)
@app.route('/upload', methods=['POST'])
def upload_project():
try:
start_time = datetime.datetime.now()
if 'file' not in request.files:
return jsonify({"error": "No file uploaded"}), 400
system_status['status'] = "processing_files"
print(f"\n========== Processing Incoming Project ==========")
job_id = generate_job_id()
working_dir = os.path.join(TMP_DIR, BUILD_DIR, job_id)
file = request.files['file']
zip_path = os.path.join(working_dir, "source.zip")
# Save ZIP file
os.makedirs(working_dir, exist_ok=True)
file.save(zip_path)
# Extract ZIP
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
print(f"Extracting uploaded project zip...")
zip_ref.extractall(working_dir)
return install_and_build(working_dir, job_id, start_time)
except Exception as e:
print(f"Uncaught error processing job: {e}")
system_status['status'] = "ready"
return jsonify({"error": f"Uncaught error processing job: {e}"}), 500
def install_and_build(project_path, job_id, start_time):
# Find the PyInstaller spec file
spec_files = glob.glob(os.path.join(project_path, "*.spec"))
if not spec_files:
return jsonify({"error": "No .spec files found"}), 400
print(f"Starting new build job - {len(spec_files)} spec files found")
system_status['status'] = "working"
system_status['running_job'] = os.path.basename(project_path)
# Set up virtual environment
venv_path = os.path.join(project_path, "venv")
try:
system_status['status'] = "creating_venv"
print(f"\n========== Configuring Virtual Environment ({venv_path}) ==========")
python_exec = "python" if is_windows() else "python3"
subprocess.run([python_exec, "-m", "venv", venv_path], check=True)
py_exec = os.path.join(venv_path, "bin", "python") if os.name != "nt" else os.path.join(venv_path, "Scripts",
"python.exe")
print(f"Virtual environment configured")
except Exception as e:
print(f"Error setting up virtual environment: {e}")
system_status['status'] = "ready"
system_status['running_job'] = None
os.rmdir(project_path)
return jsonify({"error": f"Error setting up virtual environment: {e}"}), 500
# Install requirements
try:
system_status['status'] = "installing_packages"
subprocess.run([py_exec, "-m", "pip", "install", "--upgrade", "pip"], check=True)
subprocess.run([py_exec, "-m", "pip", "install", "pyinstaller", "pyinstaller_versionfile", "--prefer-binary"], check=True)
requirements_path = os.path.join(project_path, "requirements.txt")
if os.path.exists(requirements_path):
print(f"\n========== Installing Required Packages ==========")
subprocess.run([py_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
check=True)
except Exception as e:
print(f"Error installing requirements: {e}")
system_status['status'] = "ready"
system_status['running_job'] = None
os.rmdir(project_path)
return jsonify({"error": f"Error installing requirements: {e}"}), 500
results = {}
try:
for index, spec_file in enumerate(spec_files):
# Compile with PyInstaller
system_status['status'] = "compiling"
print(f"\n========== Compiling spec file {index+1} of {len(spec_files)} - {spec_file} ==========")
simple_name = os.path.splitext(os.path.basename(spec_file))[0]
dist_path = os.path.join(project_path, "dist")
work_path = os.path.join(project_path, "build")
log_file_path = os.path.join(project_path, f"build-{simple_name}.log")
print(f"Log file saved to: {log_file_path}")
with open(log_file_path, "w") as log_file:
process = subprocess.Popen(
[py_exec, "-m", "PyInstaller", spec_file, "--distpath", dist_path, "--workpath", work_path],
text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
last_line = None
for line in process.stdout:
last_line = line
print(line, end="")
log_file.write(line)
log_file.flush()
process.wait()
if process.returncode != 0:
raise RuntimeError(
f"PyInstaller failed with exit code {process.returncode}. Last line: {str(last_line).strip()}")
print(f"\n========== Compilation of spec file {spec_file} complete ==========\n")
except Exception as e:
print(f"Error compiling project: {e}")
system_status['status'] = "ready"
system_status['running_job'] = None
try:
os.remove(project_path)
except PermissionError:
pass
return jsonify({"error": f"Error compiling project: {e}"}), 500
dist_path = os.path.join(project_path, "dist")
system_status['status'] = "ready"
system_status['running_job'] = None
return jsonify({
"id": job_id,
"message": "Build completed",
"spec_files": spec_files,
"output_folder": dist_path,
"duration": str(datetime.datetime.now() - start_time),
"cpu": cpu_arch(),
"os": system_os(),
"hostname": socket.gethostname()
}), 200
def cpu_arch():
intel64 = 'x64'
arm64 = 'arm64'
replacers =[
('aarch64', arm64),
('amd64', intel64),
('x86_64', intel64)
]
arch = platform.machine().lower()
for (x, y) in replacers:
arch = arch.replace(x, y)
return arch
def system_os():
return platform.system().replace("Darwin", "macOS")
@app.route('/download/<job_id>', methods=['GET'])
def download_binaries(job_id):
"""Handles downloading the compiled PyInstaller binaries for a given job."""
try:
# Locate the build directory
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
dist_path = os.path.join(job_path, "dist")
if not os.path.exists(dist_path):
return jsonify({"error": f"No binaries found for ID: {job_id}"}), 404
# Create a temporary zip file
tmp_dir = tempfile.gettempdir()
zip_path = os.path.join(tmp_dir, f"{job_id}_binaries.zip")
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(dist_path):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, dist_path) # Keep correct relative paths
zipf.write(file_path, arcname)
print(f"Created zip file for job {job_id}: {zip_path}")
return send_file(zip_path, as_attachment=True, download_name=f"{job_id}_binaries.zip")
except Exception as e:
print(f"Error processing download: {e}")
return jsonify({"error": f"Failed to process download: {e}"}), 500
@app.route('/delete/<job_id>', methods=['GET'])
def delete_project(job_id):
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
if not os.path.exists(job_path):
return jsonify({"error": f"No project found for ID: {job_id}"}), 404
try:
shutil.rmtree(job_path)
print(f"Deleted: {job_path}")
except FileNotFoundError as e:
return jsonify({"error": f"No project path found: {e}"}), 404
except PermissionError as e:
return jsonify({"error": f"Permission denied: {e}"}), 403
except Exception as e:
return jsonify({"error": f"Unknown error: {e}"}), 500
return "deleted", 200
def job_cache():
try:
job_ids = os.listdir(os.path.join(TMP_DIR, BUILD_DIR))
except Exception:
return []
return job_ids
@app.route('/delete_cache')
def delete_cache():
print(f"Deleting cache - Currently holding {len(job_cache())} jobs in cache")
build_path = os.path.join(TMP_DIR, BUILD_DIR)
try:
shutil.rmtree(build_path)
os.makedirs(build_path, exist_ok=True)
except Exception as e:
print(f"Error deleting {build_path}: {e}")
return jsonify(job_cache()), 200
if __name__ == "__main__":
print(f"===== {APP_NAME} Build Agent (v{build_agent_version}) =====")
ZeroconfServer.configure("_crosspybuilder._tcp.local.", socket.gethostname(), app_port)
try:
ZeroconfServer.start()
app.run(host="0.0.0.0", port=app_port, threaded=True)
except KeyboardInterrupt:
pass
finally:
ZeroconfServer.stop()

View File

@@ -0,0 +1,136 @@
import logging
import socket
from zeroconf import Zeroconf, ServiceInfo, ServiceBrowser, ServiceStateChange, NonUniqueNameException, \
NotRunningException
logger = logging.getLogger()
class ZeroconfServer:
service_type = None
server_name = None
server_port = None
server_ip = None
zeroconf = Zeroconf()
service_info = None
client_cache = {}
properties = {}
@staticmethod
def get_local_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
local_ip = s.getsockname()[0]
s.close()
return local_ip
@classmethod
def configure(cls, service_type, server_name, server_port):
cls.service_type = service_type
cls.server_name = server_name
cls.server_port = server_port
try: # Stop any previously running instances
ip = cls.get_local_ip()
except socket.gaierror:
cls.stop()
@classmethod
def start(cls, listen_only=False):
if not cls.service_type:
raise RuntimeError("The 'configure' method must be run before starting the zeroconf server")
logger.debug("Starting zeroconf service")
if not listen_only:
cls._register_service()
cls._browse_services()
@classmethod
def stop(cls):
logger.debug("Stopping zeroconf service")
cls._unregister_service()
cls.zeroconf.close()
@classmethod
def _register_service(cls):
try:
info = ServiceInfo(
cls.service_type,
f"{cls.server_name}.{cls.service_type}",
addresses=[socket.inet_aton(cls.get_local_ip())],
port=cls.server_port,
properties=cls.properties,
)
cls.service_info = info
cls.zeroconf.register_service(info)
print(f"Registered zeroconf service: {cls.service_info.name}")
except (NonUniqueNameException, socket.gaierror) as e:
logger.error(f"Error establishing zeroconf: {e}")
@classmethod
def _unregister_service(cls):
if cls.service_info:
cls.zeroconf.unregister_service(cls.service_info)
print(f"Unregistered zeroconf service: {cls.service_info.name}")
cls.service_info = None
@classmethod
def _browse_services(cls):
browser = ServiceBrowser(cls.zeroconf, cls.service_type, [cls._on_service_discovered])
browser.is_alive()
@classmethod
def _on_service_discovered(cls, zeroconf, service_type, name, state_change):
try:
info = zeroconf.get_service_info(service_type, name)
hostname = name.split(f'.{cls.service_type}')[0]
logger.debug(f"Zeroconf: {hostname} {state_change}")
if service_type == cls.service_type:
if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated:
cls.client_cache[hostname] = info
else:
cls.client_cache.pop(hostname)
# pub.sendMessage('zeroconf_state_change', hostname=hostname, state_change=state_change)
except NotRunningException:
pass
@classmethod
def found_hostnames(cls):
local_hostname = socket.gethostname()
def sort_key(hostname):
# Return 0 if it's the local hostname so it comes first, else return 1
return False if hostname == local_hostname else True
# Sort the list with the local hostname first
sorted_hostnames = sorted(cls.client_cache.keys(), key=sort_key)
return sorted_hostnames
@classmethod
def found_ip_addresses(cls):
ip_addresses = []
for cache in cls.client_cache.values():
ip_addresses.append(socket.inet_ntoa(cache.addresses[0]))
return ip_addresses
@classmethod
def get_hostname_properties(cls, hostname):
server_info = cls.client_cache.get(hostname).properties
decoded_server_info = {key.decode('utf-8'): value.decode('utf-8') for key, value in server_info.items()}
return decoded_server_info
# Example usage:
if __name__ == "__main__":
import time
logging.basicConfig(level=logging.DEBUG)
ZeroconfServer.configure("_zordon._tcp.local.", "foobar.local", 8080)
try:
ZeroconfServer.start()
while True:
time.sleep(0.1)
except KeyboardInterrupt:
pass
finally:
ZeroconfServer.stop()