Skip to content

Commit

Permalink
refactored dataset routes
Browse files Browse the repository at this point in the history
  • Loading branch information
VukW committed Oct 3, 2024
1 parent 04f8c11 commit d617a04
Show file tree
Hide file tree
Showing 6 changed files with 237 additions and 208 deletions.
2 changes: 1 addition & 1 deletion cli/medperf/web_ui/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from medperf.decorators import clean_except
from medperf.ui.web_ui_proxy import WebUIProxy
from medperf.web_ui.common import custom_exception_handler
from medperf.web_ui.datasets.routes import router as datasets_router
from medperf.web_ui.datasets import router as datasets_router
from medperf.web_ui.benchmarks.routes import router as benchmarks_router
from medperf.web_ui.mlcubes.routes import router as mlcubes_router
from medperf.web_ui.yaml_fetch.routes import router as yaml_fetch_router
Expand Down
12 changes: 12 additions & 0 deletions cli/medperf/web_ui/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from fastapi import APIRouter
from .routes_submit import router as submit_router
from .routes_prepare import router as prepare_router
from .routes_operational import router as operational_router
from .routes import router as ui_router

router = APIRouter()

router.include_router(submit_router)
router.include_router(prepare_router)
router.include_router(operational_router)
router.include_router(ui_router)
210 changes: 3 additions & 207 deletions cli/medperf/web_ui/datasets/routes.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,17 @@
import logging
import uuid
from dataclasses import dataclass
from typing import Dict, Optional
import asyncio as aio

import yaml
from fastapi import APIRouter, Form
from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse, StreamingResponse
from fastapi import Request
from fastapi.concurrency import run_in_threadpool
from pydantic import BaseModel
from fastapi.responses import HTMLResponse
from fastapi import Request, APIRouter

from medperf import config
from medperf.account_management import get_medperf_user_data
from medperf.commands.dataset.prepare import DataPreparation
from medperf.commands.dataset.set_operational import DatasetSetOperational
from medperf.commands.dataset.submit import DataCreation
from medperf.entities.cube import Cube
from medperf.entities.dataset import Dataset
from medperf.entities.benchmark import Benchmark
from medperf.utils import dict_pretty_format
from medperf.web_ui.common import templates, sort_associations_display

router = APIRouter()
logger = logging.getLogger(__name__)


@dataclass
class _DatasetDraft:
preparation: DataCreation
submission_dict: dict
draft_id: str


# stores some draft data for the dataset creation form
_drafts: Dict[str, _DatasetDraft] = {}
router = APIRouter()


@router.get("/ui", response_class=HTMLResponse)
Expand Down Expand Up @@ -76,184 +53,3 @@ def dataset_detail_ui(request: Request, dataset_id: int):
"benchmark_associations": benchmark_associations,
"benchmarks": benchmarks
})


@router.get("/ui/create", response_class=HTMLResponse)
def create_dataset_ui(request: Request):
# Fetch the list of benchmarks to populate the benchmark dropdown
benchmarks = Benchmark.all()
# Render the dataset creation form with the list of benchmarks
return templates.TemplateResponse("create_dataset.html", {"request": request, "benchmarks": benchmarks})


@router.post("/draft/generate", response_class=JSONResponse)
async def generate_draft(
benchmark: int = Form(...),
name: str = Form(...),
description: str = Form(...),
location: str = Form(...),
data_path: str = Form(...),
labels_path: str = Form(...)
):
draft_id = str(uuid.uuid4())
# Run the dataset creation logic using the CLI method
preparation = DataCreation(
benchmark_uid=benchmark,
prep_cube_uid=None,
data_path=data_path,
labels_path=labels_path,
metadata_path=None, # metadata_path,
name=name,
description=description,
location=location,
approved=False,
submit_as_prepared=False,
for_test=False,
)
submission_dict = preparation.prepare_dict(False)
draft = _DatasetDraft(
preparation=preparation,
submission_dict=submission_dict,
draft_id=draft_id
)
_drafts[draft_id] = draft

return {"data": draft.submission_dict, "draft_id": draft.draft_id}


@router.get("/draft/submit", response_class=RedirectResponse)
async def submit_draft(
draft_id: str,
):
draft = _drafts[draft_id]
preparation = draft.preparation
preparation.approved = True

updated_dataset_dict = preparation.upload()
preparation.to_permanent_path(updated_dataset_dict)
preparation.write(updated_dataset_dict)
dataset_id = updated_dataset_dict["id"]
return RedirectResponse(f"/datasets/ui/display/{dataset_id}")


@router.get("/draft/decline", response_class=RedirectResponse)
async def decline_draft(draft_id: str):
del _drafts[draft_id]
return RedirectResponse("/datasets/ui")


_drafts_prepare: dict[int, DataPreparation] = {}


@router.get("/ui/prepare", response_class=HTMLResponse)
async def prepare_ui(dataset_id: int, request: Request):
return templates.TemplateResponse("dataset_prepare.html", {"request": request, "dataset_id": dataset_id})


@router.get("/prepare_draft/generate", response_class=StreamingResponse)
async def prepare_generate(
dataset_id: int
):
preparation = DataPreparation(dataset_id, approve_sending_reports=False)
_drafts_prepare[dataset_id] = preparation

preparation.get_dataset() # prints nothing
preparation.validate() # may run Invalid Exception

async def run_preparation():
with preparation.ui.proxy():
await run_in_threadpool(preparation.get_prep_cube) # docker pull logs
preparation.setup_parameters() # Prints nothing

_ = aio.create_task(run_preparation())

def message_stream():
for msg in preparation.ui.get_message_generator():
yield msg + "\n" # Yield each message as a chunk

# if preparation.should_prompt_for_report_sending_approval():
# preparation.prompt_for_report_sending_approval() # Asks for an approval and stores it in the preparation object
return StreamingResponse(message_stream(), media_type="text/plain")


class ReportSendApprovalRequest(BaseModel):
dataset_id: int
ask_for_approval: bool
message_to_user: Optional[str]


@router.get("/prepare_draft/ask_send_approval", response_model=ReportSendApprovalRequest)
async def prepare_generate(
dataset_id: int
):
preparation = _drafts_prepare[dataset_id]
msg = None
ask_for_approval = preparation.should_prompt_for_report_sending_approval()
if ask_for_approval:
msg = preparation._report_sending_approval_msg()
return ReportSendApprovalRequest(dataset_id=dataset_id,
ask_for_approval=ask_for_approval,
message_to_user=msg)


@router.get("/prepare_draft/run", response_class=StreamingResponse)
async def prepare_run(
dataset_id: int,
approved_sending_reports: bool
):
preparation = _drafts_prepare[dataset_id]
preparation.allow_sending_reports = approved_sending_reports

async def run_preparation():
with preparation.ui.proxy():
if preparation.should_run_prepare():
await run_in_threadpool(preparation.run_prepare) # Prints docker run logs

with preparation.ui.interactive():
await run_in_threadpool(
preparation.run_sanity_check) # Run a sanity-check task and prints docker run logs
await run_in_threadpool(preparation.run_statistics) # Run a statistics task and prints docker run logs
await run_in_threadpool(preparation.mark_dataset_as_ready)

_ = aio.create_task(run_preparation())

def message_stream():
for msg in preparation.ui.get_message_generator():
yield msg + "\n" # Yield each message as a chunk

return StreamingResponse(message_stream(), media_type="text/plain")


_drafts_operational: dict[int, DatasetSetOperational] = {}


@router.post("/operational_draft/generate", response_class=JSONResponse)
async def set_operational(dataset_id: int):
preparation = DatasetSetOperational(dataset_id, approved=False)
_drafts_operational[dataset_id] = preparation
preparation.validate()
preparation.generate_uids()
preparation.set_statistics()
preparation.set_operational()
body = preparation.todict()
statistics = {k: v for (k, v) in body.items() if v is not None}
return {"yaml_statistics": yaml.dump(statistics)}


@router.post("/operational_draft/submit", response_class=JSONResponse)
async def submit_operational(dataset_id: int):
preparation = _drafts_operational[dataset_id]
try:
preparation.approved = True
body = preparation.todict()
config.comms.update_dataset(preparation.dataset.id, body)
preparation.write()
return {"dataset_id": dataset_id}
except Exception as e:
return JSONResponse({"error": f"Error moving to operational state: {str(e)}"}, 400)


@router.get("/operational_draft/decline", response_class=JSONResponse)
async def decline_draft(dataset_id: int):
del _drafts_operational[dataset_id]
return {"dataset_id": dataset_id, "op_declined": True}
42 changes: 42 additions & 0 deletions cli/medperf/web_ui/datasets/routes_operational.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import yaml
from fastapi import APIRouter
from starlette.responses import JSONResponse

from medperf import config
from medperf.commands.dataset.set_operational import DatasetSetOperational

_drafts_operational: dict[int, DatasetSetOperational] = {}

router = APIRouter()


@router.post("/operational_draft/generate", response_class=JSONResponse)
async def set_operational(dataset_id: int):
preparation = DatasetSetOperational(dataset_id, approved=False)
_drafts_operational[dataset_id] = preparation
preparation.validate()
preparation.generate_uids()
preparation.set_statistics()
preparation.set_operational()
body = preparation.todict()
statistics = {k: v for (k, v) in body.items() if v is not None}
return {"yaml_statistics": yaml.dump(statistics)}


@router.post("/operational_draft/submit", response_class=JSONResponse)
async def submit_operational(dataset_id: int):
preparation = _drafts_operational[dataset_id]
try:
preparation.approved = True
body = preparation.todict()
config.comms.update_dataset(preparation.dataset.id, body)
preparation.write()
return {"dataset_id": dataset_id}
except Exception as e:
return JSONResponse({"error": f"Error moving to operational state: {str(e)}"}, 400)


@router.get("/operational_draft/decline", response_class=JSONResponse)
async def decline_operational(dataset_id: int):
del _drafts_operational[dataset_id]
return {"dataset_id": dataset_id, "op_declined": True}
92 changes: 92 additions & 0 deletions cli/medperf/web_ui/datasets/routes_prepare.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
import asyncio as aio
from typing import Optional

from fastapi import APIRouter
from starlette.concurrency import run_in_threadpool
from starlette.requests import Request
from starlette.responses import HTMLResponse, StreamingResponse
from pydantic import BaseModel

from medperf.commands.dataset.prepare import DataPreparation
from medperf.web_ui.common import templates

router = APIRouter()

_drafts_prepare: dict[int, DataPreparation] = {}


@router.get("/ui/prepare", response_class=HTMLResponse)
async def prepare_ui(dataset_id: int, request: Request):
return templates.TemplateResponse("dataset_prepare.html", {"request": request, "dataset_id": dataset_id})


@router.get("/prepare_draft/generate", response_class=StreamingResponse)
async def prepare_generate(
dataset_id: int
):
preparation = DataPreparation(dataset_id, approve_sending_reports=False)
_drafts_prepare[dataset_id] = preparation

preparation.get_dataset() # prints nothing
preparation.validate() # may run Invalid Exception

async def run_preparation():
with preparation.ui.proxy():
await run_in_threadpool(preparation.get_prep_cube) # docker pull logs
preparation.setup_parameters() # Prints nothing

_ = aio.create_task(run_preparation())

def message_stream():
for msg in preparation.ui.get_message_generator():
yield msg + "\n" # Yield each message as a chunk

return StreamingResponse(message_stream(), media_type="text/plain")


class ReportSendApprovalRequest(BaseModel):
dataset_id: int
ask_for_approval: bool
message_to_user: Optional[str]


@router.get("/prepare_draft/ask_send_approval", response_model=ReportSendApprovalRequest)
async def prepare_ask_approval(
dataset_id: int
):
preparation = _drafts_prepare[dataset_id]
msg = None
ask_for_approval = preparation.should_prompt_for_report_sending_approval()
if ask_for_approval:
msg = preparation._report_sending_approval_msg()
return ReportSendApprovalRequest(dataset_id=dataset_id,
ask_for_approval=ask_for_approval,
message_to_user=msg)


@router.get("/prepare_draft/run", response_class=StreamingResponse)
async def prepare_run(
dataset_id: int,
approved_sending_reports: bool
):
preparation = _drafts_prepare[dataset_id]
preparation.allow_sending_reports = approved_sending_reports

async def run_preparation():
with preparation.ui.proxy():
if preparation.should_run_prepare():
await run_in_threadpool(preparation.run_prepare) # Prints docker run logs

with preparation.ui.interactive():
await run_in_threadpool(
preparation.run_sanity_check) # Run a sanity-check task and prints docker run logs
await run_in_threadpool(preparation.run_statistics) # Run a statistics task and prints docker run logs
await run_in_threadpool(preparation.mark_dataset_as_ready)

_ = aio.create_task(run_preparation())

def message_stream():
for msg in preparation.ui.get_message_generator():
yield msg + "\n" # Yield each message as a chunk

return StreamingResponse(message_stream(), media_type="text/plain")
Loading

0 comments on commit d617a04

Please sign in to comment.