mirror of
https://github.com/blw1138/cross-py-builder.git
synced 2025-12-17 08:38:11 +00:00
Another attempt #2
This commit is contained in:
407
cross_py_builder/build_agent.py
Executable file
407
cross_py_builder/build_agent.py
Executable file
@@ -0,0 +1,407 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import datetime
|
||||
import signal
|
||||
import socket
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from flask import Flask, request, jsonify, send_file
|
||||
import os
|
||||
import zipfile
|
||||
import subprocess
|
||||
import glob
|
||||
import shutil
|
||||
import uuid
|
||||
import platform
|
||||
from zeroconf_server import ZeroconfServer
|
||||
|
||||
APP_NAME = "cross_py_builder"
|
||||
build_agent_version = "0.1.34"
|
||||
app_port = 9001
|
||||
|
||||
app = Flask(__name__)
|
||||
launch_time = datetime.datetime.now()
|
||||
LAUNCH_DIR = os.curdir
|
||||
SCRIPT_PATH = os.path.basename(__file__)
|
||||
LOCAL_DIR = os.path.dirname(__file__)
|
||||
BUILD_DIR = "pybuild-data"
|
||||
TMP_DIR = tempfile.gettempdir()
|
||||
system_status = {"status": "ready", "running_job": None}
|
||||
|
||||
def is_windows():
|
||||
return platform.system().lower() == "windows"
|
||||
|
||||
@app.route('/update', methods=['POST'])
|
||||
def update_files():
|
||||
if not request.files:
|
||||
return {"error": "No files"}, 400
|
||||
|
||||
print("Updating build agent...")
|
||||
system_status['status'] = "updating"
|
||||
requirements_path = os.path.join(LOCAL_DIR, "requirements.txt")
|
||||
needs_install_requirements = False
|
||||
updated_files = []
|
||||
error_files = []
|
||||
for key in request.files:
|
||||
uploaded_file = request.files[key]
|
||||
if uploaded_file.filename:
|
||||
original_filename = uploaded_file.filename
|
||||
temp_save_path = os.path.join(LOCAL_DIR, f"{original_filename}.tmp")
|
||||
uploaded_file.save(temp_save_path)
|
||||
if os.path.getsize(temp_save_path):
|
||||
try:
|
||||
backup_path = os.path.join(LOCAL_DIR, original_filename + ".old")
|
||||
local_file_path = os.path.join(LOCAL_DIR, original_filename)
|
||||
os.rename(local_file_path, backup_path)
|
||||
shutil.move(temp_save_path, local_file_path)
|
||||
os.remove(backup_path)
|
||||
needs_install_requirements |= (requirements_path == local_file_path)
|
||||
updated_files.append(original_filename)
|
||||
except Exception as e:
|
||||
print(f"Exception updating file ({original_filename}): {e}")
|
||||
error_files.append(original_filename)
|
||||
else:
|
||||
print(f"Invalid size for {temp_save_path}!")
|
||||
error_files.append(original_filename)
|
||||
os.remove(temp_save_path)
|
||||
|
||||
if os.path.exists(requirements_path) and needs_install_requirements:
|
||||
print(f"\nInstalling Required Packages...")
|
||||
python_exec = "python" if is_windows() else "python3"
|
||||
subprocess.run([python_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
|
||||
check=True)
|
||||
|
||||
print("Update complete")
|
||||
return jsonify({'updated_files': updated_files, 'error_files': error_files}), 200 if not error_files else 500
|
||||
|
||||
@app.get("/restart")
|
||||
def restart():
|
||||
system_status['status'] = "restarting"
|
||||
ZeroconfServer.stop()
|
||||
print("=== Restarting ===")
|
||||
if os.name == "nt": # Windows
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
# Fix hanging issue using STARTUPINFO
|
||||
si = subprocess.STARTUPINFO()
|
||||
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW # Prevents extra console window pop-up
|
||||
subprocess.Popen(
|
||||
[sys.executable, SCRIPT_PATH],
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
shell=True
|
||||
)
|
||||
else: # Linux/macOS
|
||||
subprocess.Popen(
|
||||
[sys.executable, SCRIPT_PATH],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
start_new_session=True
|
||||
)
|
||||
try:
|
||||
return jsonify({"message": "=== Restarting ==="}), 200
|
||||
finally:
|
||||
time.sleep(0.1)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
@app.get("/shutdown")
|
||||
def shutdown():
|
||||
try:
|
||||
print("=== SHUTTING DOWN ===")
|
||||
system_status['status'] = "shutting_down"
|
||||
return jsonify({"message": "Shutting down"}), 200
|
||||
finally:
|
||||
time.sleep(0.1)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
@app.get("/")
|
||||
def status_page():
|
||||
version = platform.mac_ver()[0] if platform.mac_ver() else platform.version()
|
||||
hostname = socket.gethostname()
|
||||
return (f"{APP_NAME} - Build Agent {build_agent_version} - \n"
|
||||
f"{system_os()} | {cpu_arch()} | {version} | {hostname} | {ZeroconfServer.get_local_ip()}")
|
||||
|
||||
@app.get("/status")
|
||||
def status():
|
||||
def get_directory_size(directory):
|
||||
total_size = 0
|
||||
for dirpath, _, filenames in os.walk(directory): # Recursively go through files
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
if os.path.isfile(filepath): # Ensure it's a file, not a broken symlink
|
||||
total_size += os.path.getsize(filepath)
|
||||
return total_size # Size in bytes
|
||||
|
||||
def format_size(size_in_bytes):
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if size_in_bytes < 1024:
|
||||
return f"{size_in_bytes:.2f} {unit}"
|
||||
size_in_bytes /= 1024
|
||||
|
||||
hostname = socket.gethostname()
|
||||
return jsonify({"status": system_status['status'],
|
||||
"agent_version": build_agent_version,
|
||||
"os": system_os(),
|
||||
"cpu": cpu_arch(),
|
||||
"python": platform.python_version(),
|
||||
"hostname": hostname,
|
||||
"ip": ZeroconfServer.get_local_ip(),
|
||||
"port": app_port,
|
||||
"job_id": system_status['running_job'],
|
||||
"cache_size": format_size(get_directory_size(TMP_DIR)),
|
||||
"uptime": str(datetime.datetime.now() - launch_time)
|
||||
})
|
||||
|
||||
|
||||
def generate_job_id():
|
||||
return str(uuid.uuid4()).split('-')[-1]
|
||||
|
||||
|
||||
@app.route("/checkout_git", methods=['POST'])
|
||||
def checkout_project():
|
||||
start_time = datetime.datetime.now()
|
||||
repo_url = request.json.get('repo_url')
|
||||
if not repo_url:
|
||||
return jsonify({'error': 'Repository URL is required'}), 400
|
||||
|
||||
print(f"\n========== Checking Out Git Project ==========")
|
||||
|
||||
job_id = generate_job_id()
|
||||
repo_dir = os.path.join(TMP_DIR, job_id)
|
||||
try:
|
||||
system_status['status'] = "cloning_repo"
|
||||
subprocess.check_call(['git', 'clone', repo_url, repo_dir])
|
||||
system_status['status'] = "ready"
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error cloning repo: {e}")
|
||||
system_status['status'] = "ready"
|
||||
return jsonify({'error': 'Failed to clone repository'}), 500
|
||||
|
||||
return install_and_build(repo_dir, job_id, start_time)
|
||||
|
||||
@app.route('/upload', methods=['POST'])
|
||||
def upload_project():
|
||||
try:
|
||||
start_time = datetime.datetime.now()
|
||||
if 'file' not in request.files:
|
||||
return jsonify({"error": "No file uploaded"}), 400
|
||||
|
||||
system_status['status'] = "processing_files"
|
||||
print(f"\n========== Processing Incoming Project ==========")
|
||||
job_id = generate_job_id()
|
||||
working_dir = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||
|
||||
file = request.files['file']
|
||||
zip_path = os.path.join(working_dir, "source.zip")
|
||||
|
||||
# Save ZIP file
|
||||
os.makedirs(working_dir, exist_ok=True)
|
||||
file.save(zip_path)
|
||||
|
||||
# Extract ZIP
|
||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||
print(f"Extracting uploaded project zip...")
|
||||
zip_ref.extractall(working_dir)
|
||||
|
||||
return install_and_build(working_dir, job_id, start_time)
|
||||
except Exception as e:
|
||||
print(f"Uncaught error processing job: {e}")
|
||||
system_status['status'] = "ready"
|
||||
return jsonify({"error": f"Uncaught error processing job: {e}"}), 500
|
||||
|
||||
def install_and_build(project_path, job_id, start_time):
|
||||
|
||||
# Find the PyInstaller spec file
|
||||
spec_files = glob.glob(os.path.join(project_path, "*.spec"))
|
||||
if not spec_files:
|
||||
return jsonify({"error": "No .spec files found"}), 400
|
||||
|
||||
print(f"Starting new build job - {len(spec_files)} spec files found")
|
||||
system_status['status'] = "working"
|
||||
system_status['running_job'] = os.path.basename(project_path)
|
||||
|
||||
# Set up virtual environment
|
||||
venv_path = os.path.join(project_path, "venv")
|
||||
try:
|
||||
system_status['status'] = "creating_venv"
|
||||
print(f"\n========== Configuring Virtual Environment ({venv_path}) ==========")
|
||||
python_exec = "python" if is_windows() else "python3"
|
||||
subprocess.run([python_exec, "-m", "venv", venv_path], check=True)
|
||||
|
||||
py_exec = os.path.join(venv_path, "bin", "python") if os.name != "nt" else os.path.join(venv_path, "Scripts",
|
||||
"python.exe")
|
||||
print(f"Virtual environment configured")
|
||||
except Exception as e:
|
||||
print(f"Error setting up virtual environment: {e}")
|
||||
system_status['status'] = "ready"
|
||||
system_status['running_job'] = None
|
||||
os.rmdir(project_path)
|
||||
return jsonify({"error": f"Error setting up virtual environment: {e}"}), 500
|
||||
|
||||
# Install requirements
|
||||
try:
|
||||
system_status['status'] = "installing_packages"
|
||||
subprocess.run([py_exec, "-m", "pip", "install", "--upgrade", "pip"], check=True)
|
||||
subprocess.run([py_exec, "-m", "pip", "install", "pyinstaller", "pyinstaller_versionfile", "--prefer-binary"], check=True)
|
||||
requirements_path = os.path.join(project_path, "requirements.txt")
|
||||
if os.path.exists(requirements_path):
|
||||
print(f"\n========== Installing Required Packages ==========")
|
||||
subprocess.run([py_exec, "-m", "pip", "install", "--prefer-binary", "-r", requirements_path],
|
||||
check=True)
|
||||
except Exception as e:
|
||||
print(f"Error installing requirements: {e}")
|
||||
system_status['status'] = "ready"
|
||||
system_status['running_job'] = None
|
||||
os.rmdir(project_path)
|
||||
return jsonify({"error": f"Error installing requirements: {e}"}), 500
|
||||
|
||||
results = {}
|
||||
try:
|
||||
for index, spec_file in enumerate(spec_files):
|
||||
# Compile with PyInstaller
|
||||
system_status['status'] = "compiling"
|
||||
print(f"\n========== Compiling spec file {index+1} of {len(spec_files)} - {spec_file} ==========")
|
||||
simple_name = os.path.splitext(os.path.basename(spec_file))[0]
|
||||
dist_path = os.path.join(project_path, "dist")
|
||||
work_path = os.path.join(project_path, "build")
|
||||
log_file_path = os.path.join(project_path, f"build-{simple_name}.log")
|
||||
print(f"Log file saved to: {log_file_path}")
|
||||
|
||||
with open(log_file_path, "w") as log_file:
|
||||
process = subprocess.Popen(
|
||||
[py_exec, "-m", "PyInstaller", spec_file, "--distpath", dist_path, "--workpath", work_path],
|
||||
text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
last_line = None
|
||||
for line in process.stdout:
|
||||
last_line = line
|
||||
print(line, end="")
|
||||
log_file.write(line)
|
||||
log_file.flush()
|
||||
process.wait()
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"PyInstaller failed with exit code {process.returncode}. Last line: {str(last_line).strip()}")
|
||||
|
||||
print(f"\n========== Compilation of spec file {spec_file} complete ==========\n")
|
||||
except Exception as e:
|
||||
print(f"Error compiling project: {e}")
|
||||
system_status['status'] = "ready"
|
||||
system_status['running_job'] = None
|
||||
try:
|
||||
os.remove(project_path)
|
||||
except PermissionError:
|
||||
pass
|
||||
return jsonify({"error": f"Error compiling project: {e}"}), 500
|
||||
|
||||
dist_path = os.path.join(project_path, "dist")
|
||||
system_status['status'] = "ready"
|
||||
system_status['running_job'] = None
|
||||
return jsonify({
|
||||
"id": job_id,
|
||||
"message": "Build completed",
|
||||
"spec_files": spec_files,
|
||||
"output_folder": dist_path,
|
||||
"duration": str(datetime.datetime.now() - start_time),
|
||||
"cpu": cpu_arch(),
|
||||
"os": system_os(),
|
||||
"hostname": socket.gethostname()
|
||||
}), 200
|
||||
|
||||
def cpu_arch():
|
||||
intel64 = 'x64'
|
||||
arm64 = 'arm64'
|
||||
replacers =[
|
||||
('aarch64', arm64),
|
||||
('amd64', intel64),
|
||||
('x86_64', intel64)
|
||||
]
|
||||
arch = platform.machine().lower()
|
||||
for (x, y) in replacers:
|
||||
arch = arch.replace(x, y)
|
||||
return arch
|
||||
|
||||
def system_os():
|
||||
return platform.system().replace("Darwin", "macOS")
|
||||
|
||||
@app.route('/download/<job_id>', methods=['GET'])
|
||||
def download_binaries(job_id):
|
||||
"""Handles downloading the compiled PyInstaller binaries for a given job."""
|
||||
try:
|
||||
# Locate the build directory
|
||||
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||
dist_path = os.path.join(job_path, "dist")
|
||||
|
||||
if not os.path.exists(dist_path):
|
||||
return jsonify({"error": f"No binaries found for ID: {job_id}"}), 404
|
||||
|
||||
# Create a temporary zip file
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
zip_path = os.path.join(tmp_dir, f"{job_id}_binaries.zip")
|
||||
|
||||
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
for root, _, files in os.walk(dist_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
arcname = os.path.relpath(file_path, dist_path) # Keep correct relative paths
|
||||
zipf.write(file_path, arcname)
|
||||
|
||||
print(f"Created zip file for job {job_id}: {zip_path}")
|
||||
return send_file(zip_path, as_attachment=True, download_name=f"{job_id}_binaries.zip")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing download: {e}")
|
||||
return jsonify({"error": f"Failed to process download: {e}"}), 500
|
||||
|
||||
|
||||
@app.route('/delete/<job_id>', methods=['GET'])
|
||||
def delete_project(job_id):
|
||||
job_path = os.path.join(TMP_DIR, BUILD_DIR, job_id)
|
||||
|
||||
if not os.path.exists(job_path):
|
||||
return jsonify({"error": f"No project found for ID: {job_id}"}), 404
|
||||
try:
|
||||
shutil.rmtree(job_path)
|
||||
print(f"Deleted: {job_path}")
|
||||
except FileNotFoundError as e:
|
||||
return jsonify({"error": f"No project path found: {e}"}), 404
|
||||
except PermissionError as e:
|
||||
return jsonify({"error": f"Permission denied: {e}"}), 403
|
||||
except Exception as e:
|
||||
return jsonify({"error": f"Unknown error: {e}"}), 500
|
||||
return "deleted", 200
|
||||
|
||||
def job_cache():
|
||||
try:
|
||||
job_ids = os.listdir(os.path.join(TMP_DIR, BUILD_DIR))
|
||||
except Exception:
|
||||
return []
|
||||
return job_ids
|
||||
|
||||
|
||||
@app.route('/delete_cache')
|
||||
def delete_cache():
|
||||
print(f"Deleting cache - Currently holding {len(job_cache())} jobs in cache")
|
||||
build_path = os.path.join(TMP_DIR, BUILD_DIR)
|
||||
try:
|
||||
shutil.rmtree(build_path)
|
||||
os.makedirs(build_path, exist_ok=True)
|
||||
except Exception as e:
|
||||
print(f"Error deleting {build_path}: {e}")
|
||||
return jsonify(job_cache()), 200
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
print(f"===== {APP_NAME} Build Agent (v{build_agent_version}) =====")
|
||||
ZeroconfServer.configure("_crosspybuilder._tcp.local.", socket.gethostname(), app_port)
|
||||
try:
|
||||
ZeroconfServer.start()
|
||||
app.run(host="0.0.0.0", port=app_port, threaded=True)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
ZeroconfServer.stop()
|
||||
Reference in New Issue
Block a user