ifc-commit/webapp/main.py
2026-03-25 10:36:30 +01:00

299 lines
10 KiB
Python

"""
ifc-commit web app — FastAPI entry point.
Routes:
GET / — serve index.html
GET /api/health — liveness check
POST /api/preview — fetch and parse a pipeline yaml from the forge
POST /api/run — clone → pipeline → commit/push, stream log lines
"""
import asyncio
import contextlib
import io
import os
import shutil
import subprocess
import tempfile
from pathlib import Path
import httpx
import ifcopenshell
from fastapi import FastAPI, HTTPException
from fastapi.responses import FileResponse, StreamingResponse
from webapp.schema import RepoRequest, EntityRequest, HistoryRequest
from webapp import forge, pipeline
from webapp.pipeline import has_runnable_ops
app = FastAPI(title="ifc-commit")
def serve():
"""Entry point for the `ifccommit-web` script."""
import uvicorn
uvicorn.run("webapp.main:app", host="127.0.0.1", port=8095, reload=True)
# Only one pipeline run at a time — IFC files are large and CPU-heavy
_run_lock = asyncio.Lock()
TEMPLATES_DIR = __file__.replace("main.py", "templates")
# ---------------------------------------------------------------------------
# Routes
# ---------------------------------------------------------------------------
@app.get("/", response_class=FileResponse)
async def index():
html = Path(__file__).parent.parent / "index.html"
if not html.exists():
raise HTTPException(status_code=404, detail="Presentation page not built. Run: make html")
return FileResponse(str(html), media_type="text/html")
@app.get("/app", response_class=FileResponse)
async def app_ui():
return FileResponse(f"{TEMPLATES_DIR}/index.html", media_type="text/html")
@app.get("/research", response_class=FileResponse)
async def research():
html = Path(__file__).parent.parent / "research.html"
if not html.exists():
raise HTTPException(status_code=404, detail="Research page not built. Run: make html")
return FileResponse(str(html), media_type="text/html")
@app.get("/api/health")
async def health():
return {"status": "ok"}
@app.post("/api/yamls")
async def yamls(body: RepoRequest):
"""List yaml pipeline files available in the yaml/ directory of the repo."""
try:
files = await forge.list_yaml_files(body.repo, body.branch, body.token)
except httpx.HTTPStatusError as exc:
status = exc.response.status_code
if status == 404:
raise HTTPException(404, "yaml/ directory not found in repository")
raise HTTPException(status, f"Forge API error: {exc.response.text}")
return {"files": files}
@app.post("/api/preview")
async def preview(body: RepoRequest):
"""Fetch a pipeline yaml from the forge and return the parsed operations list."""
try:
raw = await forge.get_file_content(
body.repo, body.yaml_file, body.branch, body.token
)
except httpx.HTTPStatusError as exc:
status = exc.response.status_code
if status == 404:
raise HTTPException(404, f"{body.yaml_file} not found in repository")
raise HTTPException(status, f"Forge API error: {exc.response.text}")
try:
config = pipeline.parse_yaml(raw)
except Exception as exc:
raise HTTPException(422, f"Invalid {body.yaml_file}: {exc}")
return {"operations": [op.model_dump() for op in config.operations]}
@app.post("/api/entities")
async def entities(body: EntityRequest):
"""Shallow-clone the repo and return entity names for the given IFC type."""
loop = asyncio.get_running_loop()
tmpdir = tempfile.mkdtemp(prefix="ifccommit_ent_")
repo_dir = f"{tmpdir}/repo"
try:
await loop.run_in_executor(None, forge.clone_repo_shallow, body.repo, body.token, repo_dir)
import ifcopenshell.util.selector
ifc_path = os.path.join(repo_dir, body.file)
model = ifcopenshell.open(ifc_path)
if body.location:
elements = ifcopenshell.util.selector.filter_elements(
model, f'IfcElement, location = "{body.location}"'
)
names = sorted(e.Name for e in elements if e.Name)
else:
names = sorted(e.Name for e in model.by_type(body.ifc_type) if e.Name)
return {"names": names}
except subprocess.CalledProcessError as exc:
raise HTTPException(500, f"Clone failed: {exc.output}")
except Exception as exc:
raise HTTPException(500, str(exc))
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
@app.post("/api/ifc-history")
async def ifc_history(body: HistoryRequest):
"""Shallow-clone the repo, read ifc/history.json, and return the records."""
import json as _json
loop = asyncio.get_running_loop()
tmpdir = tempfile.mkdtemp(prefix="ifccommit_hist_")
repo_dir = f"{tmpdir}/repo"
try:
await loop.run_in_executor(
None, forge.clone_repo_shallow, body.repo, body.token, repo_dir
)
json_path = os.path.join(repo_dir, body.file)
if not os.path.exists(json_path):
return {"records": [], "repo": body.repo, "branch": body.branch}
with open(json_path) as f:
records = _json.load(f)
return {"records": records, "repo": body.repo, "branch": body.branch}
except subprocess.CalledProcessError as exc:
raise HTTPException(500, f"Clone failed: {exc.output}")
except HTTPException:
raise
except Exception as exc:
raise HTTPException(500, str(exc))
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
@app.post("/api/run")
async def run(body: RepoRequest):
"""
Clone the repository, run all operations from the pipeline yaml,
commit the results, and push back to the forge.
Streams newline-terminated log lines to the client in real time.
"""
return StreamingResponse(_run_stream(body), media_type="text/plain")
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
class _LogWriter:
"""
A file-like object that splits writes on newlines and forwards each
complete line to a log callback. Used to capture print() from cmd_*.
"""
def __init__(self, log_fn):
self._log = log_fn
self._buf = ""
def write(self, s: str) -> None:
self._buf += s
while "\n" in self._buf:
line, self._buf = self._buf.split("\n", 1)
if line:
self._log(line)
def flush(self) -> None:
if self._buf.strip():
self._log(self._buf)
self._buf = ""
# ---------------------------------------------------------------------------
# Streaming pipeline
# ---------------------------------------------------------------------------
async def _run_stream(body: RepoRequest):
"""Async generator that runs the full pipeline and yields log lines."""
if _run_lock.locked():
yield "ERROR: another run is already in progress — try again shortly\n"
return
async with _run_lock:
loop = asyncio.get_running_loop()
tmpdir = tempfile.mkdtemp(prefix="ifccommit_")
# git clone creates its own directory — give it a path that doesn't exist yet
repo_dir = f"{tmpdir}/repo"
try:
# 1. Fetch and parse pipeline yaml
yield f"Fetching {body.yaml_file}...\n"
try:
raw = await forge.get_file_content(
body.repo, body.yaml_file, body.branch, body.token
)
except httpx.HTTPStatusError as exc:
yield f"ERROR: could not fetch {body.yaml_file} ({exc.response.status_code})\n"
return
try:
config = pipeline.parse_yaml(raw)
except Exception as exc:
yield f"ERROR: invalid {body.yaml_file}{exc}\n"
return
yield f"Found {len(config.operations)} operation(s)\n"
if not has_runnable_ops(config):
yield "No runnable operations — load history via the panel below.\n"
return
# 2. Clone the repository
yield "Cloning repository...\n"
try:
await loop.run_in_executor(
None, forge.clone_repo, body.repo, body.token, repo_dir
)
except subprocess.CalledProcessError as exc:
yield f"ERROR: git clone failed\n{exc.output}\n"
return
yield "Clone OK\n"
# 3. Run the pipeline (synchronous — in thread pool).
# Log lines AND stdout from cmd_* functions are forwarded to the
# client via an asyncio.Queue.
queue: asyncio.Queue[str | None] = asyncio.Queue()
def log(line: str) -> None:
loop.call_soon_threadsafe(queue.put_nowait, line + "\n")
def _run() -> bool:
# Capture print() output from cmd_* and forward to log.
# Safe because _run_lock ensures only one thread does this.
writer = _LogWriter(log)
with contextlib.redirect_stdout(writer):
ok = pipeline.run_pipeline(config, repo_dir, log, body.overrides, body.id_overrides, body.element_overrides, yaml_path=body.yaml_file)
writer.flush()
loop.call_soon_threadsafe(queue.put_nowait, None) # sentinel
return ok
future = loop.run_in_executor(None, _run)
while True:
line = await queue.get()
if line is None:
break
yield line
ok = await future
if not ok:
yield "Pipeline completed with errors — skipping commit.\n"
return
# 4. Commit and push results
yield "Committing results to forge...\n"
try:
committed = await loop.run_in_executor(
None,
forge.commit_and_push,
repo_dir,
"chore: ifccommit pipeline results [skip ci]",
)
except subprocess.CalledProcessError as exc:
yield f"ERROR: git commit/push failed\n{exc.output}\n"
return
if committed:
yield "Done — results committed and pushed.\n"
else:
yield "Done — no changes to commit (outputs unchanged).\n"
finally:
shutil.rmtree(tmpdir, ignore_errors=True)