Remake/remake/remake.py

364 lines
11 KiB
Python
Raw Normal View History

2025-04-13 18:17:59 +00:00
import os
import subprocess
from pathlib import Path
import sys
import shutil
import time
import json
from remake_config import *
2025-04-19 03:12:29 +00:00
import threading
from concurrent.futures import ThreadPoolExecutor, as_completed
print_lock = threading.Lock()
2025-04-13 18:17:59 +00:00
# ========== COLOR UTILS ==========
class Colors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
BOLD = '\033[1m'
RESET = '\033[0m'
GRAY = '\033[90m'
def color(text, style): return f"{style}{text}{Colors.RESET}"
def banner(title): print(color(f"\n╔═ {title} ═══════════════════════════════╗", Colors.BOLD + Colors.OKBLUE))
def info(msg): print(color(f"{msg}", Colors.OKGREEN))
def warn(msg): print(color(f"⚠️ {msg}", Colors.WARNING))
def error(msg): print(color(f"{msg}", Colors.FAIL))
def log(msg: str):
with LOG_FILE.open("a", encoding="utf-8") as f:
f.write(msg + "\n")
# ========== CACHE ==========
def load_cache():
try:
if CACHE_FILE.exists():
return json.loads(CACHE_FILE.read_text())
except json.decoder.JSONDecodeError:
error("Failed to Read Cache File.")
return {}
def save_cache(data):
CACHE_FILE.write_text(json.dumps(data, indent=2))
# ========== PACKAGE DISCOVERY ==========
class AutoLib:
def __init__(self, name):
self.name = name
self.path = None
def find(self, search_paths, cache):
if self.name in cache:
self.path = Path(cache[self.name])
return self.path.exists()
for path in search_paths:
for root, _, files in os.walk(path):
for ext in [".lib", ".a"]:
fname = f"lib{self.name}{ext}"
if fname in files:
self.path = Path(root) / fname
cache[self.name] = str(self.path)
info(f"Found {self.name} at {self.path}")
return True
return False
class AutoInclude:
def __init__(self, name):
self.name = name
self.path = None
def find(self, search_paths, cache):
if self.name in cache:
self.path = Path(cache[self.name])
return self.path.exists()
for path in search_paths:
for root, _, files in os.walk(path):
if f"{self.name}.h" in files or Path(root).name == self.name:
self.path = Path(root)
cache[self.name] = str(self.path)
info(f"Found header {self.name} at {self.path}")
return True
return False
def resolve_packages():
cache = load_cache()
extra_link_paths, resolved_libs, extra_includes = [], [], []
for name in AUTO_LIBS:
lib = AutoLib(name)
if lib.find(LIB_DIRS, cache):
extra_link_paths.append(str(lib.path.parent))
resolved_libs.append(f"-l{lib.name}")
else:
error(f"Library {lib.name} not found.")
sys.exit(1)
for name in AUTO_INCLUDES:
inc = AutoInclude(name)
if inc.find(INCLUDE_DIRS, cache):
extra_includes.append(str(inc.path))
save_cache(cache)
return list(set(extra_link_paths)), resolved_libs, list(set(extra_includes))
# ========== BUILD SYSTEM ==========
def find_cpp_files():
extensions = [".c", ".cpp"]
files = []
2025-04-13 18:17:59 +00:00
for folder in SRC_DIRS:
for ext in extensions:
for path in Path(folder).rglob(f"*{ext}"):
files.append(path)
return files
def obj_path(source):
source_abs = source.resolve()
for src_root in SRC_DIRS:
try:
base = Path(src_root).resolve()
rel = source_abs.relative_to(base)
return BUILD_DIR / rel.with_suffix(".o")
except ValueError:
continue
# fallback: flatten path
return BUILD_DIR / source.name.replace("/", "_").replace("\\", "_") + ".o"
2025-04-13 18:17:59 +00:00
def dep_path(obj): return obj.with_suffix(".d")
def parse_dep_file(dep_file):
if not dep_file.exists(): return []
deps = []
with dep_file.open() as f:
for line in f:
line = line.strip().replace("\\", "")
if ":" in line:
line = line.split(":", 1)[1]
deps.extend(line.strip().split())
return deps
def compile_source(source, obj, includes):
obj.parent.mkdir(parents=True, exist_ok=True)
is_cpp = source.suffix == ".cpp"
compiler = CXX if is_cpp else CC
flags = CXXFLAGS if is_cpp else CFLAGS
cmd = [compiler, *flags, *[f"-I{inc}" for inc in includes], "-MMD", "-MP", "-c", str(source), "-o", str(obj)]
2025-04-13 18:17:59 +00:00
try:
2025-04-19 03:12:29 +00:00
with print_lock:
print(f"\r{color('🔨 Compiling:', Colors.OKCYAN)} {source}{' ' * 40}", end="", flush=True)
2025-04-13 18:17:59 +00:00
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
log(f"[COMPILE] {' '.join(cmd)}")
2025-04-19 03:12:29 +00:00
return True
2025-04-13 18:17:59 +00:00
except subprocess.CalledProcessError as e:
2025-04-19 03:12:29 +00:00
with print_lock:
print() # move to new line before showing error
error(f"Failed to compile {source}")
print("🔧 Command:", " ".join(cmd))
print(e.stderr.decode())
2025-04-13 18:17:59 +00:00
log(f"[ERROR] {' '.join(cmd)}\n{e.stderr.decode()}")
2025-04-19 03:12:29 +00:00
return False
2025-04-13 18:17:59 +00:00
2025-04-16 22:04:33 +00:00
2025-04-13 18:17:59 +00:00
def link_objects(obj_files, link_dirs, libs):
cmd = [CXX, *map(str, obj_files), "-o", str(TARGET), *[f"-L{p}" for p in link_dirs], *libs]
try:
print(f"{color('📦 Linking:', Colors.OKBLUE)} {TARGET}")
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
log(f"[LINK] {' '.join(cmd)}")
except subprocess.CalledProcessError as e:
error("Linking failed.")
print("🔧 Command:", " ".join(cmd))
print(e.stderr.decode())
log(f"[ERROR] {' '.join(cmd)}\n{e.stderr.decode()}")
sys.exit(1)
2025-04-19 23:34:44 +00:00
import itertools
import math
import shutil
2025-04-13 18:17:59 +00:00
def build():
build_start = time.time()
banner("🚀 Building Project")
cpp_files = find_cpp_files()
obj_files = []
link_dirs, libs, extra_includes = resolve_packages()
all_includes = INCLUDE_DIRS + extra_includes
2025-04-19 23:34:44 +00:00
compile_tasks = {}
status = {}
status_lock = threading.Lock()
last_status_snapshot = {}
def print_status():
with print_lock:
# Avoid redrawing if nothing changed
snapshot = {k: v for k, v in status.items() if v == "compiling"}
if snapshot == last_status_snapshot:
return
last_status_snapshot.clear()
last_status_snapshot.update(snapshot)
# Clear panel area only, not entire terminal
term_size = shutil.get_terminal_size((80, 20))
max_name_len = 24
grid_cols = max(1, term_size.columns // (max_name_len + 6))
active = list(snapshot.keys())
rows = math.ceil(len(active) / grid_cols)
grid = [[""] * grid_cols for _ in range(rows)]
for i, src in enumerate(active):
col = i % grid_cols
row = i // grid_cols
name = src.name[:max_name_len].ljust(max_name_len)
grid[row][col] = f"[🔧] {name}"
# Draw status block
sys.stdout.write("\033[s") # Save cursor
sys.stdout.write(f"\033[1;1H") # Move to top-left
sys.stdout.write("\033[0J") # Clear below
print(color(f"🔨 Compiling ({len(active)} jobs)...", Colors.OKCYAN))
for row in grid:
if any(cell.strip() for cell in row):
print(" ".join(row))
print(f"\n{color('⏱ Elapsed:', Colors.GRAY)} {time.time() - build_start:.1f}s")
sys.stdout.write("\033[u") # Restore cursor
sys.stdout.flush()
def compile_and_track(source, obj):
with status_lock:
status[source] = "compiling"
print_status()
result = compile_source(source, obj, all_includes)
with status_lock:
status.pop(source, None)
print_status()
return result
2025-04-19 03:12:29 +00:00
with ThreadPoolExecutor() as executor:
for source in cpp_files:
obj = obj_path(source)
dep = dep_path(obj)
obj_mtime = obj.stat().st_mtime if obj.exists() else 0
needs_build = not obj.exists() or source.stat().st_mtime > obj_mtime
if not needs_build and dep.exists():
for dep_file in parse_dep_file(dep):
try:
if Path(dep_file).exists() and Path(dep_file).stat().st_mtime > obj_mtime:
needs_build = True
break
except Exception:
2025-04-13 18:17:59 +00:00
needs_build = True
break
2025-04-19 03:12:29 +00:00
if needs_build:
2025-04-19 23:34:44 +00:00
future = executor.submit(compile_and_track, source, obj)
compile_tasks[future] = source
2025-04-19 03:12:29 +00:00
obj_files.append(obj)
2025-04-13 18:17:59 +00:00
2025-04-19 23:34:44 +00:00
print_status()
for future in as_completed(compile_tasks):
if not future.result():
2025-04-19 03:12:29 +00:00
sys.exit(1)
2025-04-19 23:34:44 +00:00
banner("📦 Linking")
2025-04-13 18:17:59 +00:00
link_objects(obj_files, link_dirs, libs)
2025-04-19 23:34:44 +00:00
2025-04-13 18:17:59 +00:00
banner("✅ Build Complete")
print(color(f"⏱ Build time: {time.time() - build_start:.2f}s", Colors.OKCYAN))
2025-04-19 03:12:29 +00:00
2025-04-19 23:34:44 +00:00
2025-04-13 18:17:59 +00:00
def run():
build()
if TARGET.exists():
banner("🚀 Running")
try:
subprocess.run(str(TARGET), check=True)
log("[RUN] Executed app.exe successfully.")
except subprocess.CalledProcessError as e:
error("Program exited with error.")
log(f"[ERROR] Runtime crash\n{e}")
sys.exit(e.returncode)
else:
error("Executable not found.")
def clean():
banner("🧹 Cleaning")
if BUILD_DIR.exists():
shutil.rmtree(BUILD_DIR)
info("Build directory removed.")
if LOG_FILE.exists():
LOG_FILE.unlink()
info("Log file cleared.")
if CACHE_FILE.exists():
CACHE_FILE.unlink()
info("Cache file cleared.")
2025-04-16 20:02:59 +00:00
def ensure_directories():
BUILD_DIR.mkdir(parents=True, exist_ok=True)
LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
CACHE_FILE.parent.mkdir(parents=True, exist_ok=True)
TARGET.parent.mkdir(parents=True, exist_ok=True) # <- ensure this!
for dir in SRC_DIRS:
Path(dir).mkdir(parents=True, exist_ok=True)
for inc in INCLUDE_DIRS:
Path(inc).mkdir(parents=True, exist_ok=True)
for lib in LIB_DIRS:
Path(lib).mkdir(parents=True, exist_ok=True)
2025-04-13 18:17:59 +00:00
# ========== ENTRY ==========
if __name__ == "__main__":
start = time.time()
LOG_FILE.write_text("", encoding="utf-8")
2025-04-16 20:02:59 +00:00
ensure_directories()
2025-04-13 18:17:59 +00:00
try:
if "clean" in sys.argv:
clean()
2025-04-19 23:34:44 +00:00
elif "clear" in sys.argv:
2025-04-19 03:09:05 +00:00
clean()
2025-04-13 18:17:59 +00:00
elif "run" in sys.argv:
run()
else:
build()
except KeyboardInterrupt:
error("Interrupted by user.")
log("[ERROR] Interrupted by user.")
sys.exit(1)
print(color(f"\n⏱ Total time: {time.time() - start:.2f}s", Colors.BOLD + Colors.OKGREEN))