Skip to content

Commit

Permalink
refacto: +++ (ports, vessels, zones)
Browse files Browse the repository at this point in the history
  • Loading branch information
RV committed Oct 7, 2024
1 parent d381cd5 commit e9c7c2b
Show file tree
Hide file tree
Showing 6 changed files with 381 additions and 275 deletions.
8 changes: 7 additions & 1 deletion backend/bloom/domain/api.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,22 @@
from fastapi import Request
from fastapi import Request, HTTPException
from pydantic import BaseModel, ConfigDict, Field,conint
from typing import Generic,TypeVar, List
from typing_extensions import Annotated, Literal, Optional
from datetime import datetime, timedelta
from enum import Enum
from pydantic.generics import GenericModel
from fastapi.security import APIKeyHeader
from bloom.config import settings

## Reference for pagination design
## https://jayhawk24.hashnode.dev/how-to-implement-pagination-in-fastapi-feat-sqlalchemy
X_API_KEY_HEADER=APIKeyHeader(name="x-key")

def check_apikey(key:str):
if key != settings.api_key :
raise HTTPException(status_code=401, detail="Unauthorized")
return True

class DatetimeRangeRequest(BaseModel):
start_at: datetime = datetime.now()-timedelta(days=7)
end_at: datetime = datetime.now()
Expand Down
8 changes: 4 additions & 4 deletions backend/bloom/routers/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

@router.get("/metrics/vessels-in-activity",
response_model=list[ResponseMetricsVesselInActiviySchema],
tags=['metrics'])
tags=['Metrics'])
def read_metrics_vessels_in_activity_total(request: Request,
datetime_range: DatetimeRangeRequest = Depends(),
#pagination: PageParams = Depends(),
Expand Down Expand Up @@ -73,7 +73,7 @@ def read_metrics_vessels_in_activity_total(request: Request,

@router.get("/metrics/zone-visited",
response_model=list[ResponseMetricsZoneVisitedSchema],
tags=['metrics'] )
tags=['Metrics'] )
def read_metrics_vessels_in_activity_total(datetime_range: DatetimeRangeRequest = Depends(),
pagination: PaginatedRequest = Depends(),
auth: str = Depends(X_API_KEY_HEADER),):
Expand Down Expand Up @@ -101,7 +101,7 @@ def read_metrics_vessels_in_activity_total(datetime_range: DatetimeRangeRequest

@router.get("/metrics/zones/{zone_id}/visiting-time-by-vessel",
response_model=list[ResponseMetricsZoneVisitingTimeByVesselSchema],
tags=['metrics'])
tags=['Metrics'])
def read_metrics_zone_visiting_time_by_vessel(
datetime_range: Annotated[DatetimeRangeRequest,Body()],
zone_id: int,
Expand Down Expand Up @@ -133,7 +133,7 @@ def read_metrics_zone_visiting_time_by_vessel(



@router.get("/metrics/vessels/{vessel_id}/visits/{visit_type}", tags=['metrics'])
@router.get("/metrics/vessels/{vessel_id}/visits/{visit_type}", tags=['Metrics'])
def read_metrics_vessels_visits_by_visit_type(
vessel_id: int,
visit_type: str,
Expand Down
63 changes: 63 additions & 0 deletions backend/bloom/routers/ports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from fastapi import APIRouter, Depends, HTTPException, Request
from redis import Redis
from bloom.config import settings
from bloom.container import UseCases
from pydantic import BaseModel, Field
from typing_extensions import Annotated, Literal, Optional
from datetime import datetime, timedelta
import time
import redis
import json
from sqlalchemy import select, func, and_, or_
from bloom.infra.database import sql_model
from bloom.infra.repositories.repository_segment import SegmentRepository
from bloom.config import settings
from bloom.container import UseCases
from bloom.domain.vessel import Vessel
from bloom.logger import logger
from bloom.domain.metrics import (ResponseMetricsVesselInActiviySchema,
ResponseMetricsZoneVisitedSchema,
ResponseMetricsZoneVisitingTimeByVesselSchema)
from bloom.domain.api import ( DatetimeRangeRequest,
PaginatedRequest,OrderByRequest,OrderByEnum,
paginate,PagedResponseSchema,PageParams,
X_API_KEY_HEADER,check_apikey)
from bloom.config import settings

router = APIRouter()
rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0)

@router.get("/ports",
tags=['Ports'])
async def list_ports(request:Request,nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
endpoint=f"/ports"
cache= rd.get(endpoint)
start = time.time()
if cache and not nocache:
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s")
payload=json.loads(cache)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return payload
else:
use_cases = UseCases()
port_repository = use_cases.port_repository()
db = use_cases.db()
with db.session() as session:
json_data = [json.loads(p.model_dump_json() if p else "{}")
for p in port_repository.get_all_ports(session)]
rd.set(endpoint, json.dumps(json_data))
rd.expire(endpoint,settings.redis_cache_expiration)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return json_data


@router.get("/ports/{port_id}",
tags=['Ports'])
async def get_port(port_id:int,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
use_cases = UseCases()
port_repository = use_cases.port_repository()
db = use_cases.db()
with db.session() as session:
return port_repository.get_port_by_id(session,port_id)
166 changes: 166 additions & 0 deletions backend/bloom/routers/vessels.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
from fastapi import APIRouter, Depends, HTTPException
from redis import Redis
from bloom.config import settings
from bloom.container import UseCases
from pydantic import BaseModel, Field
from typing_extensions import Annotated, Literal, Optional
from datetime import datetime, timedelta
import time
import redis
import json
from sqlalchemy import select, func, and_, or_
from bloom.infra.database import sql_model
from bloom.infra.repositories.repository_segment import SegmentRepository
from bloom.config import settings
from bloom.container import UseCases
from bloom.domain.vessel import Vessel
from bloom.logger import logger
from bloom.domain.metrics import (ResponseMetricsVesselInActiviySchema,
ResponseMetricsZoneVisitedSchema,
ResponseMetricsZoneVisitingTimeByVesselSchema)
from bloom.domain.api import ( DatetimeRangeRequest,
PaginatedRequest,OrderByRequest,OrderByEnum,
paginate,PagedResponseSchema,PageParams,
X_API_KEY_HEADER,check_apikey)

router = APIRouter()
rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0)

@router.get("/vessels",
tags=['Vessels'])
async def list_vessels(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)):
print(f"KEY:{key}")
check_apikey(key)
endpoint=f"/vessels"
cache= rd.get(endpoint)
start = time.time()
if cache and not nocache:
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s")
payload=json.loads(cache)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return payload
else:
use_cases = UseCases()
vessel_repository = use_cases.vessel_repository()
db = use_cases.db()
with db.session() as session:

json_data = [json.loads(v.model_dump_json() if v else "{}")
for v in vessel_repository.get_vessels_list(session)]
rd.set(endpoint, json.dumps(json_data))
rd.expire(endpoint,settings.redis_cache_expiration)
return json_data

@router.get("/vessels/{vessel_id}",
tags=['Vessels'])
async def get_vessel(vessel_id: int,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
use_cases = UseCases()
vessel_repository = use_cases.vessel_repository()
db = use_cases.db()
with db.session() as session:
return vessel_repository.get_vessel_by_id(session,vessel_id)

@router.get("/vessels/all/positions/last",
tags=['Vessels'])
async def list_all_vessel_last_position(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
endpoint=f"/vessels/all/positions/last"
cache= rd.get(endpoint)
start = time.time()
if cache and not nocache:
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s")
payload=json.loads(cache)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return payload
else:
use_cases = UseCases()
segment_repository = use_cases.segment_repository()
db = use_cases.db()
with db.session() as session:
json_data = [json.loads(p.model_dump_json() if p else "{}")
for p in segment_repository.get_all_vessels_last_position(session)]
rd.set(endpoint, json.dumps(json_data))
rd.expire(endpoint,settings.redis_cache_expiration)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return json_data

@router.get("/vessels/{vessel_id}/positions/last",
tags=['Vessels'])
async def get_vessel_last_position(vessel_id: int, nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
endpoint=f"/vessels/{vessel_id}/positions/last"
cache= rd.get(endpoint)
start = time.time()
if cache and not nocache:
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s")
payload=json.loads(cache)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return payload
else:
use_cases = UseCases()
segment_repository = use_cases.segment_repository()
db = use_cases.db()
with db.session() as session:
result=segment_repository.get_vessel_last_position(session,vessel_id)
json_data = json.loads(result.model_dump_json() if result else "{}")
rd.set(endpoint, json.dumps(json_data))
rd.expire(endpoint,settings.redis_cache_expiration)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return json_data

@router.get("/vessels/{vessel_id}/excursions",
tags=['Vessels'])
async def list_vessel_excursions(vessel_id: int, nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
endpoint=f"/vessels/{vessel_id}/excursions"
cache= rd.get(endpoint)
start = time.time()
if cache and not nocache:
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s")
payload=json.loads(cache)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return payload
else:
use_cases = UseCases()
excursion_repository = use_cases.excursion_repository()
db = use_cases.db()
with db.session() as session:
json_data = [json.loads(p.model_dump_json() if p else "{}")
for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)]
rd.set(endpoint, json.dumps(json_data))
rd.expire(endpoint,settings.redis_cache_expiration)
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}")
return json_data


@router.get("/vessels/{vessel_id}/excursions/{excursions_id}",
tags=['Vessels'])
async def get_vessel_excursion(vessel_id: int,excursions_id: int,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
use_cases = UseCases()
excursion_repository = use_cases.excursion_repository()
db = use_cases.db()
with db.session() as session:
return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id)


@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments",
tags=['Vessels'])
async def list_vessel_excursion_segments(vessel_id: int,excursions_id: int,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
use_cases = UseCases()
segment_repository = use_cases.segment_repository()
db = use_cases.db()
with db.session() as session:
return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id)

@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}",
tags=['Vessels'])
async def get_vessel_excursion_segment(vessel_id: int,excursions_id: int, segment_id:int,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
use_cases = UseCases()
segment_repository = use_cases.segment_repository()
db = use_cases.db()
with db.session() as session:
return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id)
Loading

0 comments on commit e9c7c2b

Please sign in to comment.