Initial commit: MLflow dashboard project

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-19 17:21:51 +09:00
commit 15c2dc95f6
16 changed files with 536 additions and 0 deletions

0
routers/__init__.py Normal file
View File

21
routers/experiments.py Normal file
View File

@@ -0,0 +1,21 @@
from typing import List, Optional
from fastapi import APIRouter, Query
from schemas import ExperimentSummary, RunSummary
from services import mlflow_service
router = APIRouter()
@router.get("/experiments", response_model=List[ExperimentSummary])
def list_experiments(
tracking_uri: Optional[str] = Query(None, description="MLflow Tracking URI"),
):
return mlflow_service.get_experiments(tracking_uri)
@router.get("/experiments/{exp_id}/runs", response_model=List[RunSummary])
def list_runs(
exp_id: str,
tracking_uri: Optional[str] = Query(None, description="MLflow Tracking URI"),
):
return mlflow_service.get_runs(tracking_uri, exp_id)

23
routers/runs.py Normal file
View File

@@ -0,0 +1,23 @@
from fastapi import APIRouter, Query
from typing import Optional
from schemas import RunDetail, MLflowLink
from services import mlflow_service
router = APIRouter()
@router.get("/runs/{run_id}", response_model=RunDetail)
def get_run(
run_id: str,
tracking_uri: Optional[str] = Query(None, description="MLflow Tracking URI"),
):
return mlflow_service.get_run_detail(tracking_uri, run_id)
@router.get("/runs/{run_id}/mlflow-link", response_model=MLflowLink)
def get_mlflow_link(
run_id: str,
tracking_uri: Optional[str] = Query(None, description="MLflow Tracking URI"),
):
url = mlflow_service.get_mlflow_link(tracking_uri, run_id)
return {"url": url}

73
routers/serve.py Normal file
View File

@@ -0,0 +1,73 @@
import subprocess
import uuid
from typing import Dict, List
from fastapi import APIRouter, HTTPException
from schemas import ServeRequest, ServeStatus
router = APIRouter()
# In-memory store for serving processes
_serving_processes = {} # type: Dict[str, Dict]
@router.post("/serve", response_model=ServeStatus)
def start_serve(req: ServeRequest):
serve_id = uuid.uuid4().hex[:8]
cmd = [
"mlflow", "models", "serve",
"-m", req.model_uri,
"-p", str(req.port),
"--no-conda",
]
if req.tracking_uri:
cmd.extend(["--env-manager", "local"])
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except FileNotFoundError:
raise HTTPException(status_code=500, detail="mlflow CLI not found")
_serving_processes[serve_id] = {
"id": serve_id,
"model_uri": req.model_uri,
"port": req.port,
"pid": proc.pid,
"process": proc,
}
return ServeStatus(
id=serve_id,
model_uri=req.model_uri,
port=req.port,
pid=proc.pid,
status="running",
)
@router.get("/serve", response_model=List[ServeStatus])
def list_serve():
results = []
for sid, info in _serving_processes.items():
proc = info["process"]
status = "running" if proc.poll() is None else "stopped"
results.append(ServeStatus(
id=sid,
model_uri=info["model_uri"],
port=info["port"],
pid=info["pid"],
status=status,
))
return results
@router.delete("/serve/{serve_id}")
def stop_serve(serve_id: str):
if serve_id not in _serving_processes:
raise HTTPException(status_code=404, detail="Serve process not found")
proc = _serving_processes[serve_id]["process"]
if proc.poll() is None:
proc.terminate()
model_uri = _serving_processes.pop(serve_id)["model_uri"]
return {"message": f"Stopped serving {model_uri}"}

11
routers/train.py Normal file
View File

@@ -0,0 +1,11 @@
from fastapi import APIRouter, HTTPException
router = APIRouter()
@router.post("/train")
def start_train():
raise HTTPException(
status_code=501,
detail="Train trigger is not implemented yet. Will be connected to training server.",
)