import os
import uuid
from fastapi import APIRouter, File, UploadFile, Form, Depends
from ...models import Job, JobStatus
from .models import JobParams
from ...job_manager import save_job_metadata
router = APIRouter()
UPLOAD_DIR = "/app/data/uploads"
OUTPUT_DIR = "/app/data/outputs"
JOB_QUEUE_DIR = "/app/data/job_queue"
def get_job_manager():
# This is a placeholder for a more robust job manager
# For now, it just provides the necessary directories
return {
"upload_dir": UPLOAD_DIR,
"output_dir": OUTPUT_DIR,
"job_queue_dir": JOB_QUEUE_DIR
}
@router.post("/upload/")
async def upload_mesh_file(
file: UploadFile = File(...),
params: JobParams = Depends(),
job_manager: dict = Depends(get_job_manager)
):
"""
Accepts a file upload and saves it to a temporary location.
Creates a new job and returns its ID.
"""
job_id = uuid.uuid4()
input_path = os.path.join(job_manager["upload_dir"], f"{job_id}_{file.filename}")
output_path = os.path.join(job_manager["output_dir"], f"{job_id}_curves.dxf")
# Save the uploaded file
with open(input_path, "wb") as buffer:
buffer.write(await file.read())
# Create and save the initial job metadata
job = Job(
id=job_id,
feature_id="dxf_layered_curves",
filename=file.filename,
input_path=input_path,
output_path=output_path,
params=params.dict(),
status=JobStatus.QUEUED,
message=f"File ''{file.filename}'' uploaded, job queued."
)
save_job_metadata(job)
# Create a trigger file for the worker
with open(os.path.join(job_manager["job_queue_dir"], f"{job_id}.trigger"), "w") as f:
f.write(str(job_id))
return {"job_id": str(job.id), "filename": job.filename, "status": job.status.value}