-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refacto: +++ (ports, vessels, zones)
- Loading branch information
RV
committed
Oct 7, 2024
1 parent
d381cd5
commit e9c7c2b
Showing
6 changed files
with
381 additions
and
275 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
from fastapi import APIRouter, Depends, HTTPException, Request | ||
from redis import Redis | ||
from bloom.config import settings | ||
from bloom.container import UseCases | ||
from pydantic import BaseModel, Field | ||
from typing_extensions import Annotated, Literal, Optional | ||
from datetime import datetime, timedelta | ||
import time | ||
import redis | ||
import json | ||
from sqlalchemy import select, func, and_, or_ | ||
from bloom.infra.database import sql_model | ||
from bloom.infra.repositories.repository_segment import SegmentRepository | ||
from bloom.config import settings | ||
from bloom.container import UseCases | ||
from bloom.domain.vessel import Vessel | ||
from bloom.logger import logger | ||
from bloom.domain.metrics import (ResponseMetricsVesselInActiviySchema, | ||
ResponseMetricsZoneVisitedSchema, | ||
ResponseMetricsZoneVisitingTimeByVesselSchema) | ||
from bloom.domain.api import ( DatetimeRangeRequest, | ||
PaginatedRequest,OrderByRequest,OrderByEnum, | ||
paginate,PagedResponseSchema,PageParams, | ||
X_API_KEY_HEADER,check_apikey) | ||
from bloom.config import settings | ||
|
||
router = APIRouter() | ||
rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) | ||
|
||
@router.get("/ports", | ||
tags=['Ports']) | ||
async def list_ports(request:Request,nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
endpoint=f"/ports" | ||
cache= rd.get(endpoint) | ||
start = time.time() | ||
if cache and not nocache: | ||
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") | ||
payload=json.loads(cache) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return payload | ||
else: | ||
use_cases = UseCases() | ||
port_repository = use_cases.port_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
json_data = [json.loads(p.model_dump_json() if p else "{}") | ||
for p in port_repository.get_all_ports(session)] | ||
rd.set(endpoint, json.dumps(json_data)) | ||
rd.expire(endpoint,settings.redis_cache_expiration) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return json_data | ||
|
||
|
||
@router.get("/ports/{port_id}", | ||
tags=['Ports']) | ||
async def get_port(port_id:int,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
use_cases = UseCases() | ||
port_repository = use_cases.port_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
return port_repository.get_port_by_id(session,port_id) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,166 @@ | ||
from fastapi import APIRouter, Depends, HTTPException | ||
from redis import Redis | ||
from bloom.config import settings | ||
from bloom.container import UseCases | ||
from pydantic import BaseModel, Field | ||
from typing_extensions import Annotated, Literal, Optional | ||
from datetime import datetime, timedelta | ||
import time | ||
import redis | ||
import json | ||
from sqlalchemy import select, func, and_, or_ | ||
from bloom.infra.database import sql_model | ||
from bloom.infra.repositories.repository_segment import SegmentRepository | ||
from bloom.config import settings | ||
from bloom.container import UseCases | ||
from bloom.domain.vessel import Vessel | ||
from bloom.logger import logger | ||
from bloom.domain.metrics import (ResponseMetricsVesselInActiviySchema, | ||
ResponseMetricsZoneVisitedSchema, | ||
ResponseMetricsZoneVisitingTimeByVesselSchema) | ||
from bloom.domain.api import ( DatetimeRangeRequest, | ||
PaginatedRequest,OrderByRequest,OrderByEnum, | ||
paginate,PagedResponseSchema,PageParams, | ||
X_API_KEY_HEADER,check_apikey) | ||
|
||
router = APIRouter() | ||
rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0) | ||
|
||
@router.get("/vessels", | ||
tags=['Vessels']) | ||
async def list_vessels(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): | ||
print(f"KEY:{key}") | ||
check_apikey(key) | ||
endpoint=f"/vessels" | ||
cache= rd.get(endpoint) | ||
start = time.time() | ||
if cache and not nocache: | ||
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") | ||
payload=json.loads(cache) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return payload | ||
else: | ||
use_cases = UseCases() | ||
vessel_repository = use_cases.vessel_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
|
||
json_data = [json.loads(v.model_dump_json() if v else "{}") | ||
for v in vessel_repository.get_vessels_list(session)] | ||
rd.set(endpoint, json.dumps(json_data)) | ||
rd.expire(endpoint,settings.redis_cache_expiration) | ||
return json_data | ||
|
||
@router.get("/vessels/{vessel_id}", | ||
tags=['Vessels']) | ||
async def get_vessel(vessel_id: int,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
use_cases = UseCases() | ||
vessel_repository = use_cases.vessel_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
return vessel_repository.get_vessel_by_id(session,vessel_id) | ||
|
||
@router.get("/vessels/all/positions/last", | ||
tags=['Vessels']) | ||
async def list_all_vessel_last_position(nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
endpoint=f"/vessels/all/positions/last" | ||
cache= rd.get(endpoint) | ||
start = time.time() | ||
if cache and not nocache: | ||
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") | ||
payload=json.loads(cache) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return payload | ||
else: | ||
use_cases = UseCases() | ||
segment_repository = use_cases.segment_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
json_data = [json.loads(p.model_dump_json() if p else "{}") | ||
for p in segment_repository.get_all_vessels_last_position(session)] | ||
rd.set(endpoint, json.dumps(json_data)) | ||
rd.expire(endpoint,settings.redis_cache_expiration) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return json_data | ||
|
||
@router.get("/vessels/{vessel_id}/positions/last", | ||
tags=['Vessels']) | ||
async def get_vessel_last_position(vessel_id: int, nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
endpoint=f"/vessels/{vessel_id}/positions/last" | ||
cache= rd.get(endpoint) | ||
start = time.time() | ||
if cache and not nocache: | ||
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") | ||
payload=json.loads(cache) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return payload | ||
else: | ||
use_cases = UseCases() | ||
segment_repository = use_cases.segment_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
result=segment_repository.get_vessel_last_position(session,vessel_id) | ||
json_data = json.loads(result.model_dump_json() if result else "{}") | ||
rd.set(endpoint, json.dumps(json_data)) | ||
rd.expire(endpoint,settings.redis_cache_expiration) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return json_data | ||
|
||
@router.get("/vessels/{vessel_id}/excursions", | ||
tags=['Vessels']) | ||
async def list_vessel_excursions(vessel_id: int, nocache:bool=False,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
endpoint=f"/vessels/{vessel_id}/excursions" | ||
cache= rd.get(endpoint) | ||
start = time.time() | ||
if cache and not nocache: | ||
logger.debug(f"{endpoint} cached ({settings.redis_cache_expiration})s") | ||
payload=json.loads(cache) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return payload | ||
else: | ||
use_cases = UseCases() | ||
excursion_repository = use_cases.excursion_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
json_data = [json.loads(p.model_dump_json() if p else "{}") | ||
for p in excursion_repository.get_excursions_by_vessel_id(session,vessel_id)] | ||
rd.set(endpoint, json.dumps(json_data)) | ||
rd.expire(endpoint,settings.redis_cache_expiration) | ||
logger.debug(f"{endpoint} elapsed Time: {time.time()-start}") | ||
return json_data | ||
|
||
|
||
@router.get("/vessels/{vessel_id}/excursions/{excursions_id}", | ||
tags=['Vessels']) | ||
async def get_vessel_excursion(vessel_id: int,excursions_id: int,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
use_cases = UseCases() | ||
excursion_repository = use_cases.excursion_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
return excursion_repository.get_vessel_excursion_by_id(session,vessel_id,excursions_id) | ||
|
||
|
||
@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments", | ||
tags=['Vessels']) | ||
async def list_vessel_excursion_segments(vessel_id: int,excursions_id: int,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
use_cases = UseCases() | ||
segment_repository = use_cases.segment_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
return segment_repository.list_vessel_excursion_segments(session,vessel_id,excursions_id) | ||
|
||
@router.get("/vessels/{vessel_id}/excursions/{excursions_id}/segments/{segment_id}", | ||
tags=['Vessels']) | ||
async def get_vessel_excursion_segment(vessel_id: int,excursions_id: int, segment_id:int,key: str = Depends(X_API_KEY_HEADER)): | ||
check_apikey(key) | ||
use_cases = UseCases() | ||
segment_repository = use_cases.segment_repository() | ||
db = use_cases.db() | ||
with db.session() as session: | ||
return segment_repository.get_vessel_excursion_segment_by_id(session,vessel_id,excursions_id,segment_id) |
Oops, something went wrong.