mirror of
https://github.com/blw1138/cross-py-builder.git
synced 2025-12-17 08:38:11 +00:00
Initial commit
This commit is contained in:
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
known_hosts
|
||||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Cross-Py-Build
|
||||||
|
|
||||||
|
Cross-Py-Build is a simple remote build tool for compiling Python projects with PyInstaller on different platforms.
|
||||||
294
agent_manager.py
Normal file
294
agent_manager.py
Normal file
@@ -0,0 +1,294 @@
|
|||||||
|
import argparse
|
||||||
|
import concurrent.futures
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from packaging.version import Version
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from build_agent import build_agent_version
|
||||||
|
from zeroconf_server import ZeroconfServer
|
||||||
|
|
||||||
|
def find_server_ips():
|
||||||
|
ZeroconfServer.configure("_crosspybuilder._tcp.local.", socket.gethostname(), 9001)
|
||||||
|
hostnames = []
|
||||||
|
try:
|
||||||
|
ZeroconfServer.start(listen_only=True)
|
||||||
|
time.sleep(.3) # give it time to find network
|
||||||
|
hostnames = ZeroconfServer.found_ip_addresses()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
ZeroconfServer.stop()
|
||||||
|
|
||||||
|
# get known hosts
|
||||||
|
with open("known_hosts", "r") as file:
|
||||||
|
lines = file.readlines()
|
||||||
|
|
||||||
|
lines = [line.split(':')[0].strip() for line in lines]
|
||||||
|
hostnames.extend(lines)
|
||||||
|
|
||||||
|
return hostnames
|
||||||
|
|
||||||
|
def get_all_servers_status():
|
||||||
|
table_data = []
|
||||||
|
server_ips = find_server_ips()
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
results = executor.map(get_worker_status, server_ips) # Runs in parallel
|
||||||
|
table_data.extend(results)
|
||||||
|
return table_data
|
||||||
|
|
||||||
|
def get_worker_status(hostname):
|
||||||
|
"""Fetch worker status from the given hostname."""
|
||||||
|
try:
|
||||||
|
response = requests.get(f"http://{hostname}:9001/status", timeout=5)
|
||||||
|
status = response.json()
|
||||||
|
if status['hostname'] != hostname and status['ip'] != hostname:
|
||||||
|
status['ip'] = socket.gethostbyname(hostname)
|
||||||
|
return status
|
||||||
|
except requests.exceptions.RequestException:
|
||||||
|
return {"hostname": hostname, "status": "offline"}
|
||||||
|
|
||||||
|
|
||||||
|
def zip_project(source_dir, output_zip):
|
||||||
|
"""Zips the given directory."""
|
||||||
|
excluded_dirs = {"venv", ".venv", "dist", "build"}
|
||||||
|
with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||||
|
for root, dirs, files in os.walk(source_dir):
|
||||||
|
|
||||||
|
dirs[:] = [d for d in dirs if d.lower() not in excluded_dirs]
|
||||||
|
for file in files:
|
||||||
|
file_path = os.path.join(root, file)
|
||||||
|
zipf.write(file_path, os.path.relpath(file_path, source_dir))
|
||||||
|
|
||||||
|
|
||||||
|
def send_build_request(zip_file, server_ip, download_after=False):
|
||||||
|
"""Uploads the zip file to the given server."""
|
||||||
|
upload_url = f"http://{server_ip}:9001/upload"
|
||||||
|
print(f"Submitting build request to URL: {upload_url}")
|
||||||
|
print("Please wait. This may take a few minutes...")
|
||||||
|
with open(zip_file, 'rb') as f:
|
||||||
|
response = requests.post(upload_url, files={"file": f})
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
response_data = response.json()
|
||||||
|
print(f"Build successful. ID: {response_data['id']} Hostname: {response_data['hostname']} OS: {response_data['os']} CPU: {response_data['cpu']} Spec files: {len(response_data['spec_files'])} - Elapsed time: {response_data['duration']}" )
|
||||||
|
if download_after:
|
||||||
|
download_url = f"http://{server_ip}:9001/download/{response_data.get('id')}"
|
||||||
|
try:
|
||||||
|
save_name = f"{os.path.splitext(os.path.basename(zip_file))[0]}-{response_data['os'].lower()}-{response_data['cpu'].lower()}.zip"
|
||||||
|
download_zip(download_url, save_name=save_name)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error downloading zip: {e}")
|
||||||
|
else:
|
||||||
|
print("Upload failed:", response.status_code, response.text)
|
||||||
|
|
||||||
|
def download_zip(url, save_name, save_dir="."):
|
||||||
|
"""Download a ZIP file from a URL and save it with its original filename."""
|
||||||
|
response = requests.get(url, stream=True)
|
||||||
|
response.raise_for_status() # Raise an error if request fails
|
||||||
|
|
||||||
|
save_path = os.path.join(save_dir, save_name)
|
||||||
|
with open(save_path, "wb") as file:
|
||||||
|
for chunk in response.iter_content(chunk_size=8192): # Download in chunks
|
||||||
|
file.write(chunk)
|
||||||
|
|
||||||
|
print(f"Saved binaries to file: {save_path}")
|
||||||
|
return save_path
|
||||||
|
|
||||||
|
|
||||||
|
def select_server(servers, cpu=None, os_name=None):
|
||||||
|
"""Selects a build server based on CPU architecture and OS filters."""
|
||||||
|
available = [s for s in servers if s["status"] == "ready"]
|
||||||
|
|
||||||
|
if cpu:
|
||||||
|
available = [s for s in available if cpu.lower() in s["cpu"].lower()]
|
||||||
|
if os_name:
|
||||||
|
available = [s for s in available if os_name.lower() in s["os"].lower()]
|
||||||
|
return available[0] if available else None # Return first matching server or None
|
||||||
|
|
||||||
|
def process_new_job(args, server_data):
|
||||||
|
available_servers = [s for s in server_data if s["status"] == "ready"]
|
||||||
|
|
||||||
|
if args.cpu:
|
||||||
|
available_servers = [s for s in available_servers if args.cpu.lower() in s["cpu"].lower()]
|
||||||
|
if args.os:
|
||||||
|
available_servers = [s for s in available_servers if args.os.lower() in s["os"].lower()]
|
||||||
|
|
||||||
|
if not available_servers:
|
||||||
|
print("No available servers matching the criteria.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Keep only unique servers
|
||||||
|
unique_servers = {}
|
||||||
|
for server in available_servers:
|
||||||
|
key = (server["cpu"], server["os"])
|
||||||
|
if key not in unique_servers:
|
||||||
|
unique_servers[key] = server
|
||||||
|
|
||||||
|
available_servers = list(unique_servers.values())
|
||||||
|
print(f"Found {len(available_servers)} servers available to build")
|
||||||
|
print(tabulate(available_servers, headers="keys", tablefmt="grid"))
|
||||||
|
|
||||||
|
tmp_dir = tempfile.gettempdir()
|
||||||
|
zip_file = os.path.join(tmp_dir, f"{os.path.basename(args.path)}.zip")
|
||||||
|
zip_project(args.path, zip_file)
|
||||||
|
print(f"Zipped {args.path} to {zip_file}")
|
||||||
|
|
||||||
|
# Start builds on all matching servers
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
print("Submitting builds to:")
|
||||||
|
for server in available_servers:
|
||||||
|
print(f"\t{server['hostname']} - {server['os']} - {server['cpu']}")
|
||||||
|
futures = {executor.submit(send_build_request, zip_file, server["ip"], args.download): server for server in
|
||||||
|
available_servers}
|
||||||
|
|
||||||
|
# Collect results
|
||||||
|
for future in concurrent.futures.as_completed(futures):
|
||||||
|
server = futures[future]
|
||||||
|
try:
|
||||||
|
response = future.result() # Get the response
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Build failed on {server['hostname']}: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(zip_file)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error removing zip file: {e}")
|
||||||
|
|
||||||
|
def delete_cache(server_data):
|
||||||
|
available_servers = [s for s in server_data if s["status"] == "ready"]
|
||||||
|
print(f"Deleting cache in from all available servers ({len(available_servers)})")
|
||||||
|
for server in available_servers:
|
||||||
|
try:
|
||||||
|
response = requests.get(f"http://{server['ip']}:9001/delete_cache")
|
||||||
|
response.raise_for_status()
|
||||||
|
print(f"Cache cleared on {server['hostname']}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error deleting cache on {server['hostname']}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def update_worker(server):
|
||||||
|
try:
|
||||||
|
print(f"Updating {server['hostname']} from {server.get('agent_version')} => {build_agent_version}")
|
||||||
|
|
||||||
|
with open("build_agent.py", "rb") as file1, open("requirements.txt", "rb") as file2:
|
||||||
|
update_files = {
|
||||||
|
"file1": open("build_agent.py", "rb"),
|
||||||
|
"file2": open("requirements.txt", "rb")
|
||||||
|
}
|
||||||
|
response = requests.post(f"http://{server['ip']}:9001/update", files=update_files)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
response_json = response.json()
|
||||||
|
if response_json.get('updated_files') and not response_json.get('error_files'):
|
||||||
|
try:
|
||||||
|
requests.get(f"http://{server['ip']}:9001/restart", timeout=2)
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
pass
|
||||||
|
return server
|
||||||
|
else:
|
||||||
|
print(f"Error updating {server['hostname']}. Errors: {response_json.get('error_files')} - Updated: {response_json.get('updated_files')}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Unhandled error updating {server['hostname']}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_build_workers(server_data):
|
||||||
|
available_workers = [s for s in server_data if s["status"] == "ready"]
|
||||||
|
workers_to_update = [x for x in available_workers if Version(x.get('agent_version')) < Version(build_agent_version)]
|
||||||
|
|
||||||
|
if not workers_to_update:
|
||||||
|
print(f"All {len(available_workers)} workers up to date")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"Updating workers on all available servers ({len(workers_to_update)}) to {build_agent_version}")
|
||||||
|
updated_servers = []
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
futures = {executor.submit(update_worker, server): server for server in workers_to_update}
|
||||||
|
for future in concurrent.futures.as_completed(futures):
|
||||||
|
server = future.result()
|
||||||
|
if server:
|
||||||
|
updated_servers.append(server)
|
||||||
|
|
||||||
|
if updated_servers:
|
||||||
|
print("Waiting for servers to restart...")
|
||||||
|
unverified_servers = {server["ip"]: server for server in updated_servers}
|
||||||
|
end_time = datetime.now() + timedelta(seconds=30)
|
||||||
|
while unverified_servers and datetime.now() < end_time:
|
||||||
|
for server_ip in list(unverified_servers.keys()): # Iterate over a copy to avoid modification issues
|
||||||
|
try:
|
||||||
|
response = requests.get(f"http://{server_ip}:9001/status", timeout=2)
|
||||||
|
response.raise_for_status()
|
||||||
|
server_info = unverified_servers[server_ip] # Get full server details
|
||||||
|
agent_version = response.json().get('agent_version')
|
||||||
|
if agent_version == build_agent_version:
|
||||||
|
print(f"Agent on {server_info['hostname']} successfully upgraded to {build_agent_version}")
|
||||||
|
else:
|
||||||
|
print(f"Agent on {server_info['hostname']} failed to upgrade. Still on version {agent_version}")
|
||||||
|
unverified_servers.pop(server_ip)
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
pass # Server is still restarting
|
||||||
|
if unverified_servers:
|
||||||
|
time.sleep(1) # Short delay before retrying to avoid spamming requests
|
||||||
|
if unverified_servers:
|
||||||
|
print("Some servers did not restart in time:", list(unverified_servers.keys()))
|
||||||
|
|
||||||
|
print("Update complete")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Build agent manager for cross-py-builder")
|
||||||
|
parser.add_argument("-status", action="store_true", help="Get status of available servers")
|
||||||
|
parser.add_argument("-path", type=str, help="Path to the project")
|
||||||
|
parser.add_argument("-cpu", type=str, help="CPU architecture")
|
||||||
|
parser.add_argument("-os", type=str, help="Operating system")
|
||||||
|
parser.add_argument("-d", '--download', action="store_true", help="Download after build")
|
||||||
|
parser.add_argument("-delete-cache", action="store_true", help="Delete cache")
|
||||||
|
parser.add_argument("-update-all", action="store_true", help="Update build agent")
|
||||||
|
parser.add_argument("-restart", type=str, help="Hostname to restart")
|
||||||
|
parser.add_argument("-restart-all", action="store_true", help="Restart all agents")
|
||||||
|
parser.add_argument("-shutdown", type=str, help="Hostname to shutdown")
|
||||||
|
# parser.add_argument("-worker", type=str, help="Update Agents")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.status:
|
||||||
|
print(tabulate(get_all_servers_status(), headers="keys", tablefmt="grid"))
|
||||||
|
return
|
||||||
|
elif args.restart:
|
||||||
|
restart_agent(args.restart)
|
||||||
|
elif args.restart_all:
|
||||||
|
print("Restarting all agents...")
|
||||||
|
for server_ip in find_server_ips():
|
||||||
|
restart_agent(server_ip)
|
||||||
|
elif args.shutdown:
|
||||||
|
print(f"Shutting down hostname: {args.shutdown}")
|
||||||
|
try:
|
||||||
|
requests.get(f"http://{args.shutdown}:9001/shutdown")
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
pass
|
||||||
|
elif args.delete_cache:
|
||||||
|
delete_cache(get_all_servers_status())
|
||||||
|
elif args.path:
|
||||||
|
process_new_job(args, get_all_servers_status())
|
||||||
|
elif args.update_all:
|
||||||
|
update_build_workers(get_all_servers_status())
|
||||||
|
else:
|
||||||
|
print("No path given!")
|
||||||
|
|
||||||
|
|
||||||
|
def restart_agent(hostname):
|
||||||
|
print(f"Restarting agent: {hostname}")
|
||||||
|
try:
|
||||||
|
requests.get(f"http://{hostname}:9001/restart")
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
382
build_agent.py
Normal file
382
build_agent.py
Normal file
@@ -0,0 +1,382 @@
|
|||||||
|
import datetime
|
||||||
|
import signal
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
|
||||||
|
from flask import Flask, request, jsonify, send_file
|
||||||
|
import os
|
||||||
|
import zipfile
|
||||||
|
import subprocess
|
||||||
|
import glob
|
||||||
|
import shutil
|
||||||
|
import uuid
|
||||||
|
import platform
|
||||||
|
from zeroconf_server import ZeroconfServer
|
||||||
|
from version import APP_VERSION, APP_NAME
|
||||||
|
|
||||||
|
build_agent_version = "0.1.23"
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
SCRIPT_PATH = os.path.basename(__file__)
|
||||||
|
LOCAL_DIR = os.path.dirname(__file__)
|
||||||
|
BUILD_DIR = "pybuild-data"
|
||||||
|
TMP_DIR = tempfile.gettempdir()
|
||||||
|
system_status = {"status": "ready", "running_job": None}
|
||||||
|
|
||||||
|
def is_windows():
|
||||||
|
return platform.system().lower() == "windows"
|
||||||
|
|
||||||
|
@app.route('/update', methods=['POST'])
|
||||||
|
def update_files():
|
||||||
|
if not request.files:
|
||||||
|
return {"error": "No files"}, 400
|
||||||
|
|
||||||
|
print("Updating build agent...")
|
||||||
|
system_status['status'] = "updating"
|
||||||
|
requirements_path = os.path.join(LOCAL_DIR, "requirements.txt")
|
||||||
|
needs_install_requirements = False
|
||||||
|
updated_files = []
|
||||||
|
error_files = []
|
||||||
|
for key in request.files:
|
||||||
|
uploaded_file = request.files[key]
|
||||||
|
if uploaded_file.filename:
|
||||||
|
original_filename = uploaded_file.filename
|
||||||
|
temp_save_path = os.path.join(LOCAL_DIR, f"{original_filename}.tmp")
|
||||||
|
uploaded_file.save(temp_save_path)
|
||||||
|
if os.path.getsize(temp_save_path):
|
||||||
|
try:
|
||||||
|
backup_path = os.path.join(LOCAL_DIR, original_filename + ".old")
|
||||||
|
local_file_path = os.path.join(LOCAL_DIR, original_filename)
|
||||||
|
os.rename(local_file_path, backup_path)
|
||||||
|
shutil.move(temp_save_path, local_file_path)
|
||||||
|
os.remove(backup_path)
|
||||||
|
needs_install_requirements |= (requirements_path == local_file_path)
|
||||||
|
updated_files.append(original_filename)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Exception updating file ({original_filename}): {e}")
|
||||||
|
error_files.append(original_filename)
|
||||||
|
else:
|
||||||
|
print(f"Invalid size for {temp_save_path}!")
|
||||||
|
error_files.append(original_filename)
|
||||||
|
os.remove(temp_save_path)
|
||||||
|
|
||||||
|
if os.path.exists(requirements_path) and needs_install_requirements:
|
||||||
|
print(f"\nInstalling Required Packages...")
|
||||||
|
python_exec = "python" if is_windows() else "python3"
|
||||||
|
subprocess.run([python_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
|
||||||
|
check=True)
|
||||||
|
|
||||||
|
print("Update complete")
|
||||||
|
|
||||||
|
return jsonify({'updated_files': updated_files, 'error_files': error_files}), 200 if not error_files else 500
|
||||||
|
|
||||||
|
@app.get("/restart")
|
||||||
|
def restart():
|
||||||
|
system_status['status'] = "restarting"
|
||||||
|
ZeroconfServer.stop()
|
||||||
|
print("=== Restarting ===")
|
||||||
|
if os.name == "nt": # Windows
|
||||||
|
DETACHED_PROCESS = 0x00000008
|
||||||
|
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||||
|
# Fix hanging issue using STARTUPINFO
|
||||||
|
si = subprocess.STARTUPINFO()
|
||||||
|
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW # Prevents extra console window pop-up
|
||||||
|
subprocess.Popen(
|
||||||
|
[sys.executable, SCRIPT_PATH],
|
||||||
|
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
shell=True
|
||||||
|
)
|
||||||
|
else: # Linux/macOS
|
||||||
|
subprocess.Popen(
|
||||||
|
[sys.executable, SCRIPT_PATH],
|
||||||
|
stdout=sys.stdout,
|
||||||
|
stderr=sys.stderr,
|
||||||
|
start_new_session=True
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
return jsonify({"message": "=== Restarting ==="}), 200
|
||||||
|
finally:
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
|
||||||
|
@app.get("/shutdown")
|
||||||
|
def shutdown():
|
||||||
|
try:
|
||||||
|
print("=== SHUTTING DOWN ===")
|
||||||
|
system_status['status'] = "shutting_down"
|
||||||
|
return jsonify({"message": "Shutting down"}), 200
|
||||||
|
finally:
|
||||||
|
time.sleep(1)
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
def status_page():
|
||||||
|
version = platform.mac_ver()[0] if platform.mac_ver() else platform.version()
|
||||||
|
hostname = socket.gethostname()
|
||||||
|
return (f"{APP_NAME} - Build Agent {build_agent_version} - \n"
|
||||||
|
f"{platform.system()} | {cpu_arch()} | {version} | {hostname} | {ZeroconfServer.get_local_ip()}")
|
||||||
|
|
||||||
|
@app.get("/status")
|
||||||
|
def status():
|
||||||
|
def get_directory_size(directory):
|
||||||
|
total_size = 0
|
||||||
|
for dirpath, _, filenames in os.walk(directory): # Recursively go through files
|
||||||
|
for filename in filenames:
|
||||||
|
filepath = os.path.join(dirpath, filename)
|
||||||
|
if os.path.isfile(filepath): # Ensure it's a file, not a broken symlink
|
||||||
|
total_size += os.path.getsize(filepath)
|
||||||
|
return total_size # Size in bytes
|
||||||
|
|
||||||
|
def format_size(size_in_bytes):
|
||||||
|
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||||
|
if size_in_bytes < 1024:
|
||||||
|
return f"{size_in_bytes:.2f} {unit}"
|
||||||
|
size_in_bytes /= 1024
|
||||||
|
|
||||||
|
hostname = socket.gethostname()
|
||||||
|
return jsonify({"status": system_status['status'], "agent_version": build_agent_version, "os": platform.system(), "cpu": cpu_arch(),
|
||||||
|
"python_version": platform.python_version(), "hostname": hostname, "ip": ZeroconfServer.get_local_ip(),
|
||||||
|
"running_job": system_status['running_job'], "job_cache": len(job_cache()), "job_cache_size": format_size(get_directory_size(TMP_DIR))})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/upload', methods=['POST'])
|
||||||
|
def upload_project():
|
||||||
|
try:
|
||||||
|
start_time = datetime.datetime.now()
|
||||||
|
if 'file' not in request.files:
|
||||||
|
return jsonify({"error": "No file uploaded"}), 400
|
||||||
|
|
||||||
|
print(f"\n========== Processing Incoming Project ==========")
|
||||||
|
job_id = str(uuid.uuid4()).split('-')[-1]
|
||||||
|
working_dir = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||||
|
|
||||||
|
file = request.files['file']
|
||||||
|
zip_path = os.path.join(working_dir, "source.zip")
|
||||||
|
|
||||||
|
# Save ZIP file
|
||||||
|
os.makedirs(working_dir, exist_ok=True)
|
||||||
|
file.save(zip_path)
|
||||||
|
|
||||||
|
# Extract ZIP
|
||||||
|
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||||
|
print(f"Extracting uploaded project zip...")
|
||||||
|
zip_ref.extractall(working_dir)
|
||||||
|
|
||||||
|
return install_and_build(working_dir, job_id, start_time)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Uncaught error processing job: {e}")
|
||||||
|
jsonify({"error": f"Uncaught error processing job: {e}"}), 500
|
||||||
|
|
||||||
|
def install_and_build(project_path, job_id, start_time):
|
||||||
|
|
||||||
|
# Find the PyInstaller spec file
|
||||||
|
spec_files = glob.glob(os.path.join(project_path, "*.spec"))
|
||||||
|
if not spec_files:
|
||||||
|
return jsonify({"error": "No .spec files found"}), 400
|
||||||
|
|
||||||
|
print(f"Starting new build job - {len(spec_files)} spec files found")
|
||||||
|
system_status['status'] = "working"
|
||||||
|
system_status['running_job'] = os.path.basename(project_path)
|
||||||
|
|
||||||
|
# Set up virtual environment
|
||||||
|
venv_path = os.path.join(project_path, "venv")
|
||||||
|
try:
|
||||||
|
print(f"\n========== Configuring Virtual Environment ({venv_path}) ==========")
|
||||||
|
python_exec = "python" if is_windows() else "python3"
|
||||||
|
subprocess.run([python_exec, "-m", "venv", venv_path], check=True)
|
||||||
|
|
||||||
|
py_exec = os.path.join(venv_path, "bin", "python") if os.name != "nt" else os.path.join(venv_path, "Scripts",
|
||||||
|
"python.exe")
|
||||||
|
print(f"Virtual environment configured")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error setting up virtual environment: {e}")
|
||||||
|
system_status['status'] = "ready"
|
||||||
|
system_status['running_job'] = None
|
||||||
|
os.rmdir(project_path)
|
||||||
|
return jsonify({"error": f"Error setting up virtual environment: {e}"}), 500
|
||||||
|
|
||||||
|
# Install requirements
|
||||||
|
try:
|
||||||
|
subprocess.run([py_exec, "-m", "pip", "install", "--upgrade", "pip"], check=True)
|
||||||
|
subprocess.run([py_exec, "-m", "pip", "install", "pyinstaller", "pyinstaller_versionfile", "--prefer-binary"], check=True)
|
||||||
|
requirements_path = os.path.join(project_path, "requirements.txt")
|
||||||
|
if os.path.exists(requirements_path):
|
||||||
|
print(f"\n========== Installing Required Packages ==========")
|
||||||
|
subprocess.run([py_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
|
||||||
|
check=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error installing requirements: {e}")
|
||||||
|
system_status['status'] = "ready"
|
||||||
|
system_status['running_job'] = None
|
||||||
|
os.rmdir(project_path)
|
||||||
|
return jsonify({"error": f"Error installing requirements: {e}"}), 500
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
try:
|
||||||
|
for index, spec_file in enumerate(spec_files):
|
||||||
|
# Compile with PyInstaller
|
||||||
|
print(f"\n========== Compiling spec file {index+1} of {len(spec_files)} - {spec_file} ==========")
|
||||||
|
simple_name = os.path.splitext(os.path.basename(spec_file))[0]
|
||||||
|
os.chdir(project_path)
|
||||||
|
dist_path = os.path.join(project_path, "dist")
|
||||||
|
work_path = os.path.join(project_path, "build")
|
||||||
|
log_file_path = os.path.join(project_path, f"build-{simple_name}.log")
|
||||||
|
print(f"Log file saved to: {log_file_path}")
|
||||||
|
|
||||||
|
with open(log_file_path, "w") as log_file:
|
||||||
|
process = subprocess.Popen(
|
||||||
|
[py_exec, "-m", "PyInstaller", spec_file, "--distpath", dist_path, "--workpath", work_path],
|
||||||
|
text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in process.stdout:
|
||||||
|
print(line, end="") # Print to console
|
||||||
|
log_file.write(line) # Save to log file
|
||||||
|
log_file.flush() # Ensure real-time writing
|
||||||
|
|
||||||
|
process.wait() # Wait for the process to complete
|
||||||
|
print(f"\n========== Compilation of spec file {spec_file} complete ==========\n")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error compiling project: {e}")
|
||||||
|
system_status['status'] = "ready"
|
||||||
|
system_status['running_job'] = None
|
||||||
|
os.remove(project_path)
|
||||||
|
return jsonify({"error": f"Error compiling project: {e}"}), 500
|
||||||
|
|
||||||
|
dist_path = os.path.join(project_path, "dist")
|
||||||
|
system_status['status'] = "ready"
|
||||||
|
system_status['running_job'] = None
|
||||||
|
return jsonify({
|
||||||
|
"id": job_id,
|
||||||
|
"message": "Build completed",
|
||||||
|
"spec_files": spec_files,
|
||||||
|
"output_folder": dist_path,
|
||||||
|
"duration": str(datetime.datetime.now() - start_time),
|
||||||
|
"cpu": cpu_arch(),
|
||||||
|
"os": platform.system(),
|
||||||
|
"hostname": socket.gethostname()
|
||||||
|
}), 200
|
||||||
|
|
||||||
|
def cpu_arch():
|
||||||
|
intel64 = 'x64'
|
||||||
|
arm64 = 'arm64'
|
||||||
|
replacers =[
|
||||||
|
('aarch64', arm64),
|
||||||
|
('amd64', intel64),
|
||||||
|
('x86_64', intel64)
|
||||||
|
]
|
||||||
|
arch = platform.machine().lower()
|
||||||
|
for (x, y) in replacers:
|
||||||
|
arch = arch.replace(x, y)
|
||||||
|
return arch
|
||||||
|
|
||||||
|
@app.route('/download/<job_id>', methods=['GET'])
|
||||||
|
def download_binaries(job_id):
|
||||||
|
"""Handles downloading the compiled PyInstaller binaries for a given job."""
|
||||||
|
try:
|
||||||
|
# Locate the build directory
|
||||||
|
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||||
|
dist_path = os.path.join(job_path, "dist")
|
||||||
|
|
||||||
|
if not os.path.exists(dist_path):
|
||||||
|
return jsonify({"error": f"No binaries found for ID: {job_id}"}), 404
|
||||||
|
|
||||||
|
# Create a temporary zip file
|
||||||
|
tmp_dir = tempfile.gettempdir()
|
||||||
|
zip_path = os.path.join(tmp_dir, f"{job_id}_binaries.zip")
|
||||||
|
|
||||||
|
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||||
|
for root, _, files in os.walk(dist_path):
|
||||||
|
for file in files:
|
||||||
|
file_path = os.path.join(root, file)
|
||||||
|
arcname = os.path.relpath(file_path, dist_path) # Keep correct relative paths
|
||||||
|
zipf.write(file_path, arcname)
|
||||||
|
|
||||||
|
print(f"Created zip file for job {job_id}: {zip_path}")
|
||||||
|
return send_file(zip_path, as_attachment=True, download_name=f"{job_id}_binaries.zip")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing download: {e}")
|
||||||
|
return jsonify({"error": f"Failed to process download: {e}"}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/delete/<job_id>', methods=['GET'])
|
||||||
|
def delete_project(job_id):
|
||||||
|
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||||
|
|
||||||
|
if not os.path.exists(job_path):
|
||||||
|
return jsonify({"error": f"No project found for ID: {job_id}"}), 404
|
||||||
|
try:
|
||||||
|
shutil.rmtree(job_path)
|
||||||
|
print(f"Deleted: {job_path}")
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
return jsonify({"error": f"No project path found: {e}"}), 404
|
||||||
|
except PermissionError as e:
|
||||||
|
return jsonify({"error": f"Permission denied: {e}"}), 403
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": f"Unknown error: {e}"}), 500
|
||||||
|
return "deleted", 200
|
||||||
|
|
||||||
|
def job_cache():
|
||||||
|
try:
|
||||||
|
job_ids = os.listdir(os.path.join(TMP_DIR, BUILD_DIR))
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
return job_ids
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/delete_cache')
|
||||||
|
def delete_cache():
|
||||||
|
print(f"Deleting cache - Currently holding {len(job_cache())} jobs in cache")
|
||||||
|
build_path = os.path.join(TMP_DIR, BUILD_DIR)
|
||||||
|
try:
|
||||||
|
shutil.rmtree(build_path)
|
||||||
|
os.makedirs(build_path, exist_ok=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error deleting {build_path}: {e}")
|
||||||
|
return jsonify(job_cache()), 200
|
||||||
|
|
||||||
|
|
||||||
|
def is_another_instance_running():
|
||||||
|
if os.name == "nt": # Windows
|
||||||
|
cmd = [
|
||||||
|
"powershell",
|
||||||
|
"-Command",
|
||||||
|
"Get-WmiObject Win32_Process | Where-Object { $_.CommandLine -match 'python' } | Select-Object ProcessId, CommandLine"
|
||||||
|
]
|
||||||
|
minimum_count = 1
|
||||||
|
else:
|
||||||
|
cmd = ["pgrep", "-fl", " "]
|
||||||
|
minimum_count = 0
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(cmd, text=True)
|
||||||
|
running_versions = []
|
||||||
|
for line in output.splitlines():
|
||||||
|
if SCRIPT_PATH in line and str(os.getpid()) not in line:
|
||||||
|
running_versions.append(line)
|
||||||
|
return len(running_versions) > minimum_count
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
had_previous_instances = False
|
||||||
|
while is_another_instance_running():
|
||||||
|
had_previous_instances = True
|
||||||
|
print("Another instance is running. Waiting until it exits.")
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
os.system('cls' if os.name == 'nt' else 'clear')
|
||||||
|
print(f"===== {APP_NAME} {APP_VERSION} Build Agent (v{build_agent_version}) =====")
|
||||||
|
ZeroconfServer.configure("_crosspybuilder._tcp.local.", socket.gethostname(), 9001)
|
||||||
|
try:
|
||||||
|
ZeroconfServer.start()
|
||||||
|
app.run(host="0.0.0.0", port=9001, threaded=True)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
ZeroconfServer.stop()
|
||||||
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Flask~=3.1.0
|
||||||
|
requests~=2.32.3
|
||||||
|
tabulate~=0.9.0
|
||||||
|
zeroconf~=0.145.1
|
||||||
|
packaging~=24.2
|
||||||
2
version.py
Normal file
2
version.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
APP_NAME = "cross-py-builder"
|
||||||
|
APP_VERSION = "1.0.0"
|
||||||
137
zeroconf_server.py
Normal file
137
zeroconf_server.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
|
||||||
|
from zeroconf import Zeroconf, ServiceInfo, ServiceBrowser, ServiceStateChange, NonUniqueNameException, \
|
||||||
|
NotRunningException
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class ZeroconfServer:
|
||||||
|
service_type = None
|
||||||
|
server_name = None
|
||||||
|
server_port = None
|
||||||
|
server_ip = None
|
||||||
|
zeroconf = Zeroconf()
|
||||||
|
service_info = None
|
||||||
|
client_cache = {}
|
||||||
|
properties = {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_local_ip():
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
s.connect(("8.8.8.8", 80))
|
||||||
|
local_ip = s.getsockname()[0]
|
||||||
|
s.close()
|
||||||
|
return local_ip
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def configure(cls, service_type, server_name, server_port):
|
||||||
|
cls.service_type = service_type
|
||||||
|
cls.server_name = server_name
|
||||||
|
cls.server_port = server_port
|
||||||
|
try: # Stop any previously running instances
|
||||||
|
ip = cls.get_local_ip()
|
||||||
|
except socket.gaierror:
|
||||||
|
cls.stop()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def start(cls, listen_only=False):
|
||||||
|
if not cls.service_type:
|
||||||
|
raise RuntimeError("The 'configure' method must be run before starting the zeroconf server")
|
||||||
|
logger.debug("Starting zeroconf service")
|
||||||
|
if not listen_only:
|
||||||
|
cls._register_service()
|
||||||
|
cls._browse_services()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def stop(cls):
|
||||||
|
logger.debug("Stopping zeroconf service")
|
||||||
|
cls._unregister_service()
|
||||||
|
cls.zeroconf.close()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _register_service(cls):
|
||||||
|
try:
|
||||||
|
|
||||||
|
info = ServiceInfo(
|
||||||
|
cls.service_type,
|
||||||
|
f"{cls.server_name}.{cls.service_type}",
|
||||||
|
addresses=[socket.inet_aton(cls.get_local_ip())],
|
||||||
|
port=cls.server_port,
|
||||||
|
properties=cls.properties,
|
||||||
|
)
|
||||||
|
|
||||||
|
cls.service_info = info
|
||||||
|
cls.zeroconf.register_service(info)
|
||||||
|
print(f"Registered zeroconf service: {cls.service_info.name}")
|
||||||
|
except (NonUniqueNameException, socket.gaierror) as e:
|
||||||
|
logger.error(f"Error establishing zeroconf: {e}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _unregister_service(cls):
|
||||||
|
if cls.service_info:
|
||||||
|
cls.zeroconf.unregister_service(cls.service_info)
|
||||||
|
print(f"Unregistered zeroconf service: {cls.service_info.name}")
|
||||||
|
cls.service_info = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _browse_services(cls):
|
||||||
|
browser = ServiceBrowser(cls.zeroconf, cls.service_type, [cls._on_service_discovered])
|
||||||
|
browser.is_alive()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _on_service_discovered(cls, zeroconf, service_type, name, state_change):
|
||||||
|
try:
|
||||||
|
info = zeroconf.get_service_info(service_type, name)
|
||||||
|
hostname = name.split(f'.{cls.service_type}')[0]
|
||||||
|
logger.debug(f"Zeroconf: {hostname} {state_change}")
|
||||||
|
if service_type == cls.service_type:
|
||||||
|
if state_change == ServiceStateChange.Added or state_change == ServiceStateChange.Updated:
|
||||||
|
cls.client_cache[hostname] = info
|
||||||
|
else:
|
||||||
|
cls.client_cache.pop(hostname)
|
||||||
|
# pub.sendMessage('zeroconf_state_change', hostname=hostname, state_change=state_change)
|
||||||
|
except NotRunningException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def found_hostnames(cls):
|
||||||
|
local_hostname = socket.gethostname()
|
||||||
|
|
||||||
|
def sort_key(hostname):
|
||||||
|
# Return 0 if it's the local hostname so it comes first, else return 1
|
||||||
|
return False if hostname == local_hostname else True
|
||||||
|
|
||||||
|
# Sort the list with the local hostname first
|
||||||
|
sorted_hostnames = sorted(cls.client_cache.keys(), key=sort_key)
|
||||||
|
return sorted_hostnames
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def found_ip_addresses(cls):
|
||||||
|
ip_addresses = []
|
||||||
|
for cache in cls.client_cache.values():
|
||||||
|
ip_addresses.append(socket.inet_ntoa(cache.addresses[0]))
|
||||||
|
return ip_addresses
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_hostname_properties(cls, hostname):
|
||||||
|
server_info = cls.client_cache.get(hostname).properties
|
||||||
|
decoded_server_info = {key.decode('utf-8'): value.decode('utf-8') for key, value in server_info.items()}
|
||||||
|
return decoded_server_info
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage:
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import time
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
ZeroconfServer.configure("_zordon._tcp.local.", "foobar.local", 8080)
|
||||||
|
try:
|
||||||
|
ZeroconfServer.start()
|
||||||
|
while True:
|
||||||
|
time.sleep(0.1)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
ZeroconfServer.stop()
|
||||||
Reference in New Issue
Block a user