diff --git a/backend/bloom/domain/api.py b/backend/bloom/domain/api.py new file mode 100644 index 00000000..baff6beb --- /dev/null +++ b/backend/bloom/domain/api.py @@ -0,0 +1,89 @@ +from fastapi import Request, HTTPException +from pydantic import BaseModel, ConfigDict, Field,conint +from typing import Generic,TypeVar, List +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +from enum import Enum +import redis +from pydantic.generics import GenericModel +from fastapi.security import APIKeyHeader +from bloom.config import settings + +## Reference for pagination design +## https://jayhawk24.hashnode.dev/how-to-implement-pagination-in-fastapi-feat-sqlalchemy +X_API_KEY_HEADER=APIKeyHeader(name="x-key") + +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +class CachedRequest(BaseModel): + nocache:bool=False + +def check_apikey(key:str): + if key != settings.api_key : + raise HTTPException(status_code=401, detail="Unauthorized") + return True + +def check_cache(request:Request): + cache= rd.get(request.url.path) + +class DatetimeRangeRequest(BaseModel): + start_at: datetime = Field(default=datetime.now()-timedelta(days=7)) + end_at: datetime = datetime.now() + +class OrderByEnum(str, Enum): + ascending = "ASC" + descending = "DESC" + + +class TotalTimeActivityTypeEnum(str, Enum): + total_time_at_sea: str = "Total Time at Sea" + total_time_in_amp: str = "Total Time in AMP" + total_time_in_territorial_waters: str = "Total Time in Territorial Waters" + total_time_in_costal_waters: str = "Total Time in Costal Waters" + total_time_fishing: str = "Total Time Fishing" + total_time_fishing_in_amp: str = "Total Time Fishing in AMP" + total_time_fishing_in_territorial_waters: str = "Total Time Fishing in Territorial Waters" + total_time_fishing_in_costal_waters: str = "Total Time Fishing in Costal Waters" + total_time_fishing_in_extincting_amp: str = "Total Time in Extincting AMP" + +class TotalTimeActivityTypeRequest(BaseModel): + type: TotalTimeActivityTypeEnum + +class OrderByRequest(BaseModel): + order: OrderByEnum = OrderByEnum.ascending + +class PaginatedRequest(BaseModel): + offset: int|None = 0 + limit: int|None = 100 + order_by: OrderByRequest = OrderByEnum.ascending + + +class PageParams(BaseModel): + """ Request query params for paginated API. """ + offset: conint(ge=0) = 0 + limit: conint(ge=1, le=100000) = 100 + +T = TypeVar("T") + +class PagedResponseSchema(GenericModel,Generic[T]): + total: int + limit: int + offset: int + next: str|None + previous: str|None + results: List[T] + +def paginate(request: Request, page_params: PageParams, query, ResponseSchema: BaseModel) -> PagedResponseSchema[T]: + """Paginate the query.""" + + print(f"{request.url.scheme}://{request.client}/{request.url.path}") + paginated_query = query.offset((page_params.offset) * page_params.limit).limit(page_params.limit).all() + + return PagedResponseSchema( + total=query.count(), + offset=page_params.offset, + limit=page_params.limit, + next="", + previous="", + results=[ResponseSchema.from_orm(item) for item in paginated_query], + ) \ No newline at end of file diff --git a/backend/bloom/domain/metrics.py b/backend/bloom/domain/metrics.py new file mode 100644 index 00000000..54be1c94 --- /dev/null +++ b/backend/bloom/domain/metrics.py @@ -0,0 +1,49 @@ +from pydantic import BaseModel, ConfigDict +from typing import Generic,TypeVar, List +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +from enum import Enum +from bloom.domain.vessel import Vessel + +class ResponseMetricsVesselInActivitySchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + vessel_id: Optional[int] + vessel_mmsi: int + vessel_ship_name: str + vessel_width: Optional[float] = None + vessel_length: Optional[float] = None + vessel_country_iso3: Optional[str] = None + vessel_type: Optional[str] = None + vessel_imo: Optional[int] = None + vessel_cfr: Optional[str] = None + vessel_external_marking: Optional[str] = None + vessel_ircs: Optional[str] = None + vessel_home_port_id: Optional[int] = None + vessel_details: Optional[str] = None + vessel_tracking_activated: Optional[bool] + vessel_tracking_status: Optional[str] + vessel_length_class: Optional[str] + vessel_check: Optional[str] + total_time_at_sea: Optional[timedelta] + +class ResponseMetricsZoneVisitedSchema(BaseModel): + zone_id : int + zone_category: Optional[str] + zone_sub_category: Optional[str] = None + zone_name: str + visiting_duration: timedelta + +class ResponseMetricsZoneVisitingTimeByVesselSchema(BaseModel): + zone_id : int + zone_category: str + zone_sub_category: Optional[str] = None + zone_name: str + vessel_id : int + vessel_name: str + vessel_type: Optional[str] = None + vessel_length_class: Optional[str] = None + zone_visiting_time_by_vessel: timedelta + +class ResponseMetricsVesselTotalTimeActivityByActivityTypeSchema(BaseModel): + vessel_id : int + total_activity_time: timedelta \ No newline at end of file diff --git a/backend/bloom/infra/database/sql_model.py b/backend/bloom/infra/database/sql_model.py index 22e17ca7..91652f13 100644 --- a/backend/bloom/infra/database/sql_model.py +++ b/backend/bloom/infra/database/sql_model.py @@ -13,7 +13,10 @@ PrimaryKeyConstraint ) from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.sql import func +from sqlalchemy.sql import func, select +from sqlalchemy.orm import mapped_column, Mapped, relationship +from typing_extensions import Annotated, Literal, Optional +from datetime import timedelta class Vessel(Base): @@ -235,3 +238,21 @@ class RelSegmentZone(Base): segment_id = Column("segment_id", Integer, ForeignKey("fct_segment.id"), nullable=False) zone_id = Column("zone_id", Integer, ForeignKey("dim_zone.id"), nullable=False) created_at = Column("created_at", DateTime(timezone=True), server_default=func.now()) + +vessel_in_activity_request=( + select( + Vessel.id, + Excursion.vessel_id, + func.sum(Excursion.total_time_at_sea).label("total_time_at_sea") + )\ + .select_from(Segment)\ + .join(Excursion, Segment.excursion_id == Excursion.id)\ + .join(Vessel, Excursion.vessel_id == Vessel.id)\ + .group_by(Vessel.id,Excursion.vessel_id,Excursion.total_time_at_sea)\ + .subquery()) + +class MetricsVesselInActivity(Base): + __table__ = vessel_in_activity_request + #vessel_id: Mapped[Optional[int]] + #total_time_at_sea: Mapped[Optional[timedelta]] + diff --git a/backend/bloom/routers/metrics.py b/backend/bloom/routers/metrics.py new file mode 100644 index 00000000..d599c36e --- /dev/null +++ b/backend/bloom/routers/metrics.py @@ -0,0 +1,250 @@ +from fastapi import APIRouter, Depends, Query, Body,Request +from redis import Redis +from bloom.config import settings +from bloom.container import UseCases +from bloom.logger import logger +from pydantic import BaseModel, Field +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +from sqlalchemy import select, func, and_, or_, text, literal_column, Row +from bloom.infra.database import sql_model +from bloom.infra.repositories.repository_segment import SegmentRepository +from sqlalchemy.ext.serializer import loads,dumps +import json +import time +from bloom.infra.database.database_manager import Base +from bloom.domain.metrics import (ResponseMetricsVesselInActivitySchema, + ResponseMetricsZoneVisitedSchema, + ResponseMetricsZoneVisitingTimeByVesselSchema, + ResponseMetricsVesselTotalTimeActivityByActivityTypeSchema) +from bloom.domain.api import ( DatetimeRangeRequest, + PaginatedRequest,OrderByRequest,OrderByEnum, + paginate,PagedResponseSchema,PageParams, + X_API_KEY_HEADER, check_apikey,CachedRequest, + TotalTimeActivityTypeRequest) + +router = APIRouter() +rd = Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +@router.get("/metrics/vessels-in-activity", + response_model=list[ResponseMetricsVesselInActivitySchema], + tags=['Metrics']) +def read_metrics_vessels_in_activity_total(request: Request, + datetime_range: DatetimeRangeRequest = Depends(), + pagination: PageParams = Depends(), + order: OrderByRequest = Depends(), + caching: CachedRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER), + ): + check_apikey(key) + use_cases = UseCases() + db = use_cases.db() + cache_key=f"{request.url.path}?{request.query_params}" + cache_payload= rd.get(cache_key) + start = time.time() + payload = [] + if cache_payload and not caching.nocache: + logger.debug(f"{cache_key} cached ({settings.redis_cache_expiration})s") + payload=loads(cache_payload) + else: + with db.session() as session: + stmt=select(sql_model.Vessel.id.label("vessel_id"), + sql_model.Vessel.mmsi.label("vessel_mmsi"), + sql_model.Vessel.ship_name.label("vessel_ship_name"), + sql_model.Vessel.width.label("vessel_width"), + sql_model.Vessel.length.label("vessel_length"), + sql_model.Vessel.country_iso3.label("vessel_country_iso3"), + sql_model.Vessel.type.label("vessel_type"), + sql_model.Vessel.imo.label("vessel_imo"), + sql_model.Vessel.cfr.label("vessel_cfr"), + sql_model.Vessel.external_marking.label("vessel_external_marking"), + sql_model.Vessel.ircs.label("vessel_ircs"), + sql_model.Vessel.home_port_id.label("vessel_home_port_id"), + sql_model.Vessel.details.label("vessel_details"), + sql_model.Vessel.tracking_activated.label("vessel_tracking_activated"), + sql_model.Vessel.tracking_status.label("vessel_tracking_status"), + sql_model.Vessel.length_class.label("vessel_length_class"), + sql_model.Vessel.check.label("vessel_check"), + func.sum(sql_model.Excursion.total_time_at_sea).label("total_time_at_sea") + )\ + .select_from(sql_model.Segment)\ + .join(sql_model.Excursion, sql_model.Segment.excursion_id == sql_model.Excursion.id)\ + .join(sql_model.Vessel, sql_model.Excursion.vessel_id == sql_model.Vessel.id)\ + .where( + or_( + sql_model.Excursion.arrival_at.between(datetime_range.start_at,datetime_range.end_at), + and_(sql_model.Excursion.departure_at <= datetime_range.end_at, + sql_model.Excursion.arrival_at == None)) + )\ + .group_by(sql_model.Vessel.id,sql_model.Excursion.total_time_at_sea) + stmt = stmt.offset(pagination.offset) if pagination.offset != None else stmt + stmt = stmt.order_by(sql_model.Excursion.total_time_at_sea.asc())\ + if order.order == OrderByEnum.ascending \ + else stmt.order_by(sql_model.Excursion.total_time_at_sea.desc()) + stmt = stmt.limit(pagination.limit) if pagination.limit != None else stmt + payload=session.execute(stmt).all() + serialized=dumps(payload) + rd.set(cache_key, serialized) + rd.expire(cache_key,settings.redis_cache_expiration) + logger.debug(f"{cache_key} elapsed Time: {time.time()-start}") + return payload + +@router.get("/metrics/zone-visited", + response_model=list[ResponseMetricsZoneVisitedSchema], + tags=['Metrics'] ) +def read_metrics_vessels_in_activity_total(request: Request, + datetime_range: DatetimeRangeRequest = Depends(), + pagination: PageParams = Depends(), + order: OrderByRequest = Depends(), + caching: CachedRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER),): + check_apikey(key) + cache_key=f"{request.url.path}?{request.query_params}" + cache_payload= rd.get(cache_key) + start = time.time() + payload=[] + if cache_payload and not caching.nocache: + logger.debug(f"{cache_key} cached ({settings.redis_cache_expiration})s") + payload=loads(cache_payload) + else: + use_cases = UseCases() + payload = [] + db = use_cases.db() + with db.session() as session: + stmt=select( + sql_model.Zone.id.label("zone_id"), + sql_model.Zone.category.label("zone_category"), + sql_model.Zone.sub_category.label("zone_sub_category"), + sql_model.Zone.name.label("zone_name"), + func.sum(sql_model.Segment.segment_duration).label("visiting_duration") + )\ + .select_from(sql_model.Zone)\ + .join(sql_model.RelSegmentZone,sql_model.RelSegmentZone.zone_id == sql_model.Zone.id)\ + .join(sql_model.Segment,sql_model.RelSegmentZone.segment_id == sql_model.Segment.id)\ + .where( + or_( + sql_model.Segment.timestamp_start.between(datetime_range.start_at,datetime_range.end_at), + sql_model.Segment.timestamp_end.between(datetime_range.start_at,datetime_range.end_at),) + )\ + .group_by(sql_model.Zone.id) + stmt = stmt.order_by(func.sum(sql_model.Segment.segment_duration).asc())\ + if order.order == OrderByEnum.ascending \ + else stmt.order_by(func.sum(sql_model.Segment.segment_duration).desc()) + stmt = stmt.offset(pagination.offset) if pagination.offset != None else stmt + stmt = stmt.limit(pagination.limit) if pagination.limit != None else stmt + print(stmt) + payload=session.execute(stmt).all() + serialized=dumps(payload) + rd.set(cache_key, serialized) + rd.expire(cache_key,settings.redis_cache_expiration) + logger.debug(f"{cache_key} elapsed Time: {time.time()-start}") + return payload + +@router.get("/metrics/zones/{zone_id}/visiting-time-by-vessel", + response_model=list[ResponseMetricsZoneVisitingTimeByVesselSchema], + tags=['Metrics']) +def read_metrics_zone_visiting_time_by_vessel(request: Request, + zone_id: int, + datetime_range: DatetimeRangeRequest = Depends(), + pagination: PageParams = Depends(), + order: OrderByRequest = Depends(), + caching: CachedRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER),): + check_apikey(key) + cache_key=f"{request.url.path}?{request.query_params}" + cache_payload= rd.get(cache_key) + start = time.time() + payload=[] + if cache_payload and not caching.nocache: + logger.debug(f"{cache_key} cached ({settings.redis_cache_expiration})s") + payload=loads(cache_payload) + else: + use_cases = UseCases() + db = use_cases.db() + with db.session() as session: + stmt=select( + sql_model.Zone.id.label("zone_id"), + sql_model.Zone.category.label("zone_category"), + sql_model.Zone.sub_category.label("zone_sub_category"), + sql_model.Zone.name.label("zone_name"), + sql_model.Vessel.id.label("vessel_id"), + sql_model.Vessel.ship_name.label("vessel_name"), + sql_model.Vessel.type.label("vessel_type"), + sql_model.Vessel.length_class.label("vessel_length_class"), + func.sum(sql_model.Segment.segment_duration).label("zone_visiting_time_by_vessel") + )\ + .select_from(sql_model.Zone)\ + .join(sql_model.RelSegmentZone, sql_model.RelSegmentZone.zone_id == sql_model.Zone.id)\ + .join(sql_model.Segment, sql_model.RelSegmentZone.segment_id == sql_model.Segment.id)\ + .join(sql_model.Excursion, sql_model.Excursion.id == sql_model.Segment.excursion_id)\ + .join(sql_model.Vessel, sql_model.Excursion.vessel_id == sql_model.Vessel.id)\ + .where( + and_(sql_model.Zone.id == zone_id, + or_( + sql_model.Segment.timestamp_start.between(datetime_range.start_at,datetime_range.end_at), + sql_model.Segment.timestamp_end.between(datetime_range.start_at,datetime_range.end_at),)) + )\ + .group_by(sql_model.Zone.id,sql_model.Vessel.id) + + stmt = stmt.order_by(func.sum(sql_model.Segment.segment_duration).asc())\ + if order.order == OrderByEnum.ascending \ + else stmt.order_by(func.sum(sql_model.Segment.segment_duration).desc()) + stmt = stmt.offset(pagination.offset) if pagination.offset != None else stmt + stmt = stmt.limit(pagination.limit) if pagination.limit != None else stmt + + payload=session.execute(stmt).all() + serialized=dumps(payload) + rd.set(cache_key, serialized) + rd.expire(cache_key,settings.redis_cache_expiration) + logger.debug(f"{cache_key} elapsed Time: {time.time()-start}") + return payload + + +@router.get("/metrics/vessels/{vessel_id}/activity/{activity_type}", + response_model=ResponseMetricsVesselTotalTimeActivityByActivityTypeSchema, + tags=['Metrics']) +def read_metrics_vessels_visits_by_activity_type(request: Request, + vessel_id: int, + activity_type: TotalTimeActivityTypeRequest = Depends(), + datetime_range: DatetimeRangeRequest = Depends(), + #pagination: PageParams = Depends(), + #order: OrderByRequest = Depends(), + caching: CachedRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER),): + check_apikey(key) + cache_key=f"{request.url.path}?{request.query_params}" + cache_payload= rd.get(cache_key) + start = time.time() + payload=[] + if cache_payload and not caching.nocache: + logger.debug(f"{cache_key} cached ({settings.redis_cache_expiration})s") + payload=loads(cache_payload) + else: + use_cases = UseCases() + db = use_cases.db() + with db.session() as session: + stmt=select(sql_model.Excursion.vessel_id, + literal_column(f"'{activity_type.type.value}'").label('activity'), + func.sum(sql_model.Excursion.total_time_at_sea).label("total_activity_time") + )\ + .select_from(sql_model.Excursion)\ + .where( + and_(sql_model.Excursion.vessel_id == vessel_id, + or_( + sql_model.Excursion.departure_at.between(datetime_range.start_at,datetime_range.end_at), + sql_model.Excursion.arrival_at.between(datetime_range.start_at,datetime_range.end_at),)) + )\ + .group_by(sql_model.Excursion.vessel_id)\ + .union(select( + literal_column(vessel_id), + literal_column(f"'{activity_type.type.value}'"), + literal_column('0 seconds'), + )) + print(type(session.execute(stmt.limit(1)).all()[0])) + payload=session.execute(stmt.limit(1)).all()[0] + serialized=dumps(payload) + rd.set(cache_key, serialized) + rd.expire(cache_key,settings.redis_cache_expiration) + + logger.debug(f"{cache_key} elapsed Time: {time.time()-start}") + return payload \ No newline at end of file diff --git a/backend/bloom/routers/ports.py b/backend/bloom/routers/ports.py new file mode 100644 index 00000000..4397f2f5 --- /dev/null +++ b/backend/bloom/routers/ports.py @@ -0,0 +1,63 @@ +from fastapi import APIRouter, Depends, HTTPException, Request +from redis import Redis +from bloom.config import settings +from bloom.container import UseCases +from pydantic import BaseModel, Field +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +import time +import redis +import json +from sqlalchemy import select, func, and_, or_ +from bloom.infra.database import sql_model +from bloom.infra.repositories.repository_segment import SegmentRepository +from bloom.config import settings +from bloom.container import UseCases +from bloom.domain.vessel import Vessel +from bloom.logger import logger +from bloom.domain.api import ( DatetimeRangeRequest, + PaginatedRequest,OrderByRequest,OrderByEnum, + paginate,PagedResponseSchema,PageParams, + X_API_KEY_HEADER,check_apikey,CachedRequest) +from bloom.config import settings + +router = APIRouter() +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +@router.get("/ports", + tags=['Ports']) +async def list_ports( request:Request, + caching: CachedRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/ports" + cache= rd.get(endpoint) + start = time.time() + if cache and not caching.nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + port_repository = use_cases.port_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(p.model_dump_json() if p else "{}") + for p in port_repository.get_all_ports(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + + +@router.get("/ports/{port_id}", + tags=['Ports']) +async def get_port(port_id:int, + key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + port_repository = use_cases.port_repository() + db = use_cases.db() + with db.session() as session: + return port_repository.get_port_by_id(session,port_id) \ No newline at end of file diff --git a/backend/bloom/routers/vessels.py b/backend/bloom/routers/vessels.py new file mode 100644 index 00000000..e052119d --- /dev/null +++ b/backend/bloom/routers/vessels.py @@ -0,0 +1,171 @@ +from fastapi import APIRouter, Depends, HTTPException +from redis import Redis +from bloom.config import settings +from bloom.container import UseCases +from pydantic import BaseModel, Field +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +import time +import redis +import json +from sqlalchemy import select, func, and_, or_ +from bloom.infra.database import sql_model +from bloom.infra.repositories.repository_segment import SegmentRepository +from bloom.config import settings +from bloom.container import UseCases +from bloom.domain.vessel import Vessel +from bloom.logger import logger +from bloom.domain.api import ( DatetimeRangeRequest, + PaginatedRequest,OrderByRequest,OrderByEnum, + paginate,PagedResponseSchema,PageParams, + X_API_KEY_HEADER,check_apikey) + +router = APIRouter() +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +@router.get("/vessels", + tags=['Vessels']) +async def list_vessels(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/vessels" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + + json_data = [json.loads(v.model_dump_json() if v else "{}") + for v in vessel_repository.get_vessels_list(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + return json_data + +@router.get("/vessels/{vessel_id}", + tags=['Vessels']) +async def get_vessel(vessel_id: int,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + vessel_repository = use_cases.vessel_repository() + db = use_cases.db() + with db.session() as session: + return vessel_repository.get_vessel_by_id(session,vessel_id) + +@router.get("/vessels/all/positions/last", + tags=['Vessels']) +async def list_all_vessel_last_position(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/vessels/all/positions/last" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(p.model_dump_json() if p else "{}") + for p in segment_repository.get_all_vessels_last_position(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/vessels/{vessel_id}/positions/last", + tags=['Vessels']) +async def get_vessel_last_position(vessel_id: int, nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/vessels/{vessel_id}/positions/last" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + result=segment_repository.get_vessel_last_position(session,vessel_id) + json_data = json.loads(result.model_dump_json() if result else "{}") + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/vessels/{vessel_id}/excursions", + tags=['Vessels']) +async def list_vessel_excursions(vessel_id: int, nocache:bool=False, + datetime_range: DatetimeRangeRequest = Depends(), + pagination: PageParams = Depends(), + order: OrderByRequest = Depends(), + key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/vessels/{vessel_id}/excursions" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + excursion_repository = use_cases.excursion_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(p.model_dump_json() if p else "{}") + for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}", + tags=['Vessels']) +async def get_vessel_excursion(vessel_id: int,excursions_id: int,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + excursion_repository = use_cases.excursion_repository() + db = use_cases.db() + with db.session() as session: + return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id) + + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments", + tags=['Vessels']) +async def list_vessel_excursion_segments(vessel_id: int, + excursions_id: int, + key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id) + +@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}", + tags=['Vessels']) +async def get_vessel_excursion_segment(vessel_id: int, + excursions_id: int, + segment_id:int, + key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + segment_repository = use_cases.segment_repository() + db = use_cases.db() + with db.session() as session: + return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id) \ No newline at end of file diff --git a/backend/bloom/routers/zones.py b/backend/bloom/routers/zones.py new file mode 100644 index 00000000..4f6bce34 --- /dev/null +++ b/backend/bloom/routers/zones.py @@ -0,0 +1,130 @@ +from fastapi import APIRouter, Depends, HTTPException, Request +from redis import Redis +from bloom.config import settings +from bloom.container import UseCases +from pydantic import BaseModel, Field +from typing_extensions import Annotated, Literal, Optional +from datetime import datetime, timedelta +import time +import redis +import json +from sqlalchemy import select, func, and_, or_ +from bloom.infra.database import sql_model +from bloom.infra.repositories.repository_segment import SegmentRepository +from bloom.config import settings +from bloom.container import UseCases +from bloom.domain.vessel import Vessel +from bloom.logger import logger +from bloom.domain.api import ( DatetimeRangeRequest, + PaginatedRequest,OrderByRequest,OrderByEnum, + paginate,PagedResponseSchema,PageParams, + X_API_KEY_HEADER,check_apikey) + +router = APIRouter() +rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) + +@router.get("/zones", + tags=["Zones"]) +async def list_zones(request:Request,nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/zones" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/zones/all/categories", + tags=["Zones"]) +async def list_zone_categories(request:Request,nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/zones/all/categories" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zone_categories(session)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/zones/by-category/{category}/by-sub-category/{sub}", + tags=["Zones"]) +async def get_zone_all_by_category(category:str="all",sub:str=None,nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/zones/by-category/{category}/by-sub-category/{sub}" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None,sub)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/zones/by-category/{category}", + tags=["Zones"]) +async def get_zone_all_by_category(category:str="all",nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + endpoint=f"/zones/by-category/{category}" + cache= rd.get(endpoint) + start = time.time() + if cache and not nocache: + logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") + payload=json.loads(cache) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return payload + else: + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + json_data = [json.loads(z.model_dump_json() if z else "{}") + for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None)] + rd.set(endpoint, json.dumps(json_data)) + rd.expire(endpoint,settings.redis_cache_expiration) + logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") + return json_data + +@router.get("/zones/{zones_id}", + tags=["Zones"]) +async def get_zone(zones_id:int,key: str = Depends(X_API_KEY_HEADER)): + check_apikey(key) + use_cases = UseCases() + zone_repository = use_cases.zone_repository() + db = use_cases.db() + with db.session() as session: + return zone_repository.get_zone_by_id(session,zones_id) \ No newline at end of file diff --git a/backend/bloom/services/api.py b/backend/bloom/services/api.py index be15b3e1..099a2a05 100644 --- a/backend/bloom/services/api.py +++ b/backend/bloom/services/api.py @@ -2,7 +2,14 @@ from fastapi import Request from fastapi.security import APIKeyHeader -header_scheme = APIKeyHeader(name="x-key") +from bloom.routers.metrics import router as router_metrics +from bloom.routers.vessels import router as router_vessels +from bloom.routers.ports import router as router_ports +from bloom.routers.zones import router as router_zones +from bloom.domain.api import ( DatetimeRangeRequest, + PaginatedRequest,OrderByRequest, + paginate,PagedResponseSchema,PageParams, + X_API_KEY_HEADER,check_apikey) import redis import json @@ -18,282 +25,19 @@ app = FastAPI() +app.include_router(router_metrics) +app.include_router(router_vessels) +app.include_router(router_ports) +app.include_router(router_zones) + -def check_apikey(key:str): - if key != settings.api_key : - raise HTTPException(status_code=401, detail="Unauthorized") - return True @app.get("/cache/all/flush") -async def cache_all_flush(request:Request,key: str = Depends(header_scheme)): +async def cache_all_flush(request:Request,key: str = Depends(X_API_KEY_HEADER)): check_apikey(key) rd.flushall() return {"code":0} -@app.get("/vessels") -async def list_vessels(nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/vessels" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - - json_data = [json.loads(v.model_dump_json() if v else "{}") - for v in vessel_repository.get_vessels_list(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - return json_data - -@app.get("/vessels/{vessel_id}") -async def get_vessel(vessel_id: int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - vessel_repository = use_cases.vessel_repository() - db = use_cases.db() - with db.session() as session: - return vessel_repository.get_vessel_by_id(session,vessel_id) - -@app.get("/vessels/all/positions/last") -async def list_all_vessel_last_position(nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/vessels/all/positions/last" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in segment_repository.get_all_vessels_last_position(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/vessels/{vessel_id}/positions/last") -async def get_vessel_last_position(vessel_id: int, nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/vessels/{vessel_id}/positions/last" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - result=segment_repository.get_vessel_last_position(session,vessel_id) - json_data = json.loads(result.model_dump_json() if result else "{}") - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/vessels/{vessel_id}/excursions") -async def list_vessel_excursions(vessel_id: int, nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/vessels/{vessel_id}/excursions" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - excursion_repository = use_cases.excursion_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}") -async def get_vessel_excursion(vessel_id: int,excursions_id: int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - excursion_repository = use_cases.excursion_repository() - db = use_cases.db() - with db.session() as session: - return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id) - - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments") -async def list_vessel_excursion_segments(vessel_id: int,excursions_id: int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id) - -@app.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}") -async def get_vessel_excursion_segment(vessel_id: int,excursions_id: int, segment_id:int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - segment_repository = use_cases.segment_repository() - db = use_cases.db() - with db.session() as session: - return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id) - -@app.get("/ports") -async def list_ports(request:Request,nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/ports" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - port_repository = use_cases.port_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(p.model_dump_json() if p else "{}") - for p in port_repository.get_all_ports(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - - -@app.get("/ports/{port_id}") -async def get_port(port_id:int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - port_repository = use_cases.port_repository() - db = use_cases.db() - with db.session() as session: - return port_repository.get_port_by_id(session,port_id) - -@app.get("/zones") -async def list_zones(request:Request,nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/zones" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/all/categories") -async def list_zone_categories(request:Request,nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/zones/all/categories" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zone_categories(session)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/by-category/{category}/by-sub-category/{sub}") -async def get_zone_all_by_category(category:str="all",sub:str=None,nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/zones/by-category/{category}/by-sub-category/{sub}" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None,sub)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/by-category/{category}") -async def get_zone_all_by_category(category:str="all",nocache:bool=False,key: str = Depends(header_scheme)): - check_apikey(key) - endpoint=f"/zones/by-category/{category}" - cache= rd.get(endpoint) - start = time.time() - if cache and not nocache: - logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") - payload=json.loads(cache) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return payload - else: - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - json_data = [json.loads(z.model_dump_json() if z else "{}") - for z in zone_repository.get_all_zones_by_category(session,category if category != 'all' else None)] - rd.set(endpoint, json.dumps(json_data)) - rd.expire(endpoint,settings.redis_cache_expiration) - logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") - return json_data - -@app.get("/zones/{zones_id}") -async def get_zone(zones_id:int,key: str = Depends(header_scheme)): - check_apikey(key) - use_cases = UseCases() - zone_repository = use_cases.zone_repository() - db = use_cases.db() - with db.session() as session: - return zone_repository.get_zone_by_id(session,zones_id) - @app.get("/") async def root(request:Request): return {