Skip to content

Commit

Permalink
Add Line and Points
Browse files Browse the repository at this point in the history
  • Loading branch information
CannonLock committed Mar 19, 2024
1 parent 54251a7 commit 7706c25
Show file tree
Hide file tree
Showing 6 changed files with 336 additions and 223 deletions.
168 changes: 5 additions & 163 deletions api/app.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,21 @@
import secrets
import urllib.parse
from contextlib import asynccontextmanager
from typing import List

import starlette.requests
import uvicorn
from fastapi import FastAPI, HTTPException, Response, status, Depends
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy import select, func
from sqlalchemy.exc import NoResultFound, NoSuchTableError

import dotenv
dotenv.load_dotenv()

import api.routes.security
import api.database as db
from api.database import (
connect_engine,
dispose_engine,
get_async_session,
get_engine,
patch_sources_sub_table,
select_sources_sub_table,
dispose_engine
)
from api.models.geometries import PolygonModel, PolygonRequestModel, PolygonResponseModel, CopyColumnRequest
from api.models.source import Sources
from api.query_parser import ParserException
from api.routes.security import has_access

from api.routes.object import router as object_router
from api.routes.ingest import router as ingest_router
from api.routes.sources import router as sources_router

import api.schemas as schemas

@asynccontextmanager
async def setup_engine(a: FastAPI):
Expand Down Expand Up @@ -63,150 +48,7 @@ async def setup_engine(a: FastAPI):
app.include_router(api.routes.security.router)
app.include_router(object_router)
app.include_router(ingest_router)


@app.get("/sources")
async def get_sources(response: Response, page: int = 0, page_size: int = 100, include_geom: bool = False) -> List[Sources]:
async_session = get_async_session(get_engine())
sources = await db.get_sources(async_session, page, page_size)

# Delete the geom if not required
if not include_geom:
for source in sources:
del source.rgeom
del source.web_geom

# Add the appropriate headers
response.headers["Link"] = "/sources" + urllib.parse.urlencode({page: page + 1, page_size: page_size})

return sources


@app.get("/sources/{source_id}")
async def get_source(source_id: int, include_geom: bool = False) -> Sources:
"""Get a single object"""

engine = get_engine()
async_session = get_async_session(engine)

async with async_session() as session:

select_stmt = select(
*[c for c in schemas.Sources.__table__.c if c.name not in ['rgeom', 'web_geom']]
).where(schemas.Sources.source_id == source_id)

results = await session.execute(select_stmt)

if results is None:
raise HTTPException(status_code=404, detail=f"Object with id ({id}) not found")

return db.results_to_model(results, Sources)[0]


@app.get("/sources/{table_id}/polygons", response_model=List[PolygonResponseModel])
async def get_sub_sources(
response: Response,
request: starlette.requests.Request,
table_id: int,
page: int = 0,
page_size: int = 100
):

try:
# Get the query results
filter_query_params = [*filter(lambda x: x[0] not in ["page", "page_size"], request.query_params.multi_items())]
result = await select_sources_sub_table(
engine=get_engine(),
table_id=table_id,
page=page,
page_size=page_size,
query_params=filter_query_params
)

# Add metadata to the response
response.headers["X-Total-Count"] = str(
await db.get_sources_sub_table_count(
engine=get_engine(),
query_params=filter_query_params,
table_id=table_id)
)

return result.to_dict()

except ParserException as e:
raise HTTPException(status_code=400, detail=e)

except NoSuchTableError:
raise HTTPException(status_code=400, detail=f"Source table with id ({table_id}) not found")


@app.patch("/sources/{table_id}/polygons", response_model=List[PolygonModel])
async def patch_sub_sources(
request: starlette.requests.Request,
table_id: int,
polygon_updates: PolygonRequestModel,
user_has_access: bool = Depends(has_access),
):

if not user_has_access:
raise HTTPException(status_code=401, detail="User does not have access to patch object")

try:
result = await patch_sources_sub_table(
engine=get_engine(),
table_id=table_id,
update_values=polygon_updates.model_dump(exclude_none=True),
query_params=request.query_params.multi_items()
)

except ParserException as e:
raise HTTPException(status_code=400, detail=e)

except NoSuchTableError:
raise HTTPException(status_code=400, detail=f"Source table with id ({table_id}) not found")

if result.rowcount == 0:
raise HTTPException(status_code=400, detail="No rows patched, if this is unexpected please report as bug")

return Response(status_code=status.HTTP_204_NO_CONTENT)


@app.patch("/sources/{table_id}/polygons/{target_column}", response_model=List[PolygonModel])
async def patch_sub_sources(
request: starlette.requests.Request,
target_column: str,
table_id: int,
copy_column: CopyColumnRequest,
user_has_access: bool = Depends(has_access),
):

if not user_has_access:
raise HTTPException(status_code=401, detail="User does not have access to patch object")

try:
result = await db.patch_sources_sub_table_set_columns_equal(
engine=get_engine(),
table_id=table_id,
source_column=copy_column.source_column,
target_column=target_column,
query_params=request.query_params.multi_items()
)

except ParserException as e:
raise HTTPException(status_code=400, detail=e)

except NoSuchTableError:
raise HTTPException(status_code=400, detail=f"Source table with id ({copy_column.table_id}) not found")

if result.rowcount == 0:
raise HTTPException(status_code=400, detail="No rows patched, if this is unexpected please report as bug")

return Response(status_code=status.HTTP_204_NO_CONTENT)





app.include_router(sources_router)

if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000, headers=[("Access-Control-Expose-Headers", "X-Total-Count")])
87 changes: 46 additions & 41 deletions api/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#
import datetime
from os import environ
from typing import Type, List
from typing import Type, List, Literal

from pydantic import BaseModel
from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine
Expand Down Expand Up @@ -58,10 +58,11 @@ def get_async_session(engine: AsyncEngine, **kwargs) -> async_sessionmaker[Async
return async_sessionmaker(engine, **kwargs)


async def source_id_to_primary_table(
async_session: async_sessionmaker[AsyncSession], source_id: id
async def source_id_to_slug(
async_engine: AsyncEngine,
source_id: id
):
async with async_session() as session:
async with get_async_session(async_engine)() as session:
stmt = select(schemas.Sources).where(schemas.Sources.source_id == source_id)
result = await session.scalar(stmt)

Expand All @@ -70,7 +71,7 @@ async def source_id_to_primary_table(
f"Could not find primary_table corresponding with source_id: {source_id}"
)

return result.primary_table
return result.slug


async def get_sources(
Expand Down Expand Up @@ -163,23 +164,29 @@ def to_dict(self):
return l


async def get_polygon_table_name(engine: AsyncEngine, table_id: int) -> str:
session = get_async_session(engine)
try:
primary_table = await source_id_to_primary_table(session, table_id)
return f"{primary_table}"
except NoResultFound as e:
raise NoSuchTableError(e)


async def get_sources_sub_table_count(engine: AsyncEngine, table_id: int, query_params: list = None) -> int:
async def get_table(
conn,
table_id: int,
geometry_type: Literal["polygons", "points", "linestrings"]
) -> Table:
metadata = MetaData(schema="sources")
table_slug = await source_id_to_slug(engine, table_id)
table_name = f"{table_slug}_{geometry_type}"
table = await conn.run_sync(
lambda sync_conn: Table(table_name, metadata, autoload_with=sync_conn)
)
return table


async def get_sources_sub_table_count(
engine: AsyncEngine,
table_id: int,
geometry_type: Literal["polygons", "points", "linestrings"],
query_params: list = None
) -> int:
async with engine.begin() as conn:
# Grabbing a table from the database as it is
metadata = MetaData(schema="sources")
polygon_table = await get_polygon_table_name(engine, table_id)
table = await conn.run_sync(
lambda sync_conn: Table(polygon_table, metadata, autoload_with=sync_conn)
)

table = await get_table(conn, table_id, geometry_type)

# Extract filters from the query parameters
query_parser = QueryParser(columns=table.columns, query_params=query_params)
Expand Down Expand Up @@ -213,18 +220,14 @@ async def get_sources_sub_table_count(engine: AsyncEngine, table_id: int, query_
async def select_sources_sub_table(
engine: AsyncEngine,
table_id: int,
geometry_type: Literal["polygons", "points", "linestrings"],
page: int = 0,
page_size: int = 100,
query_params: list = None,
) -> SQLResponse:
async with engine.begin() as conn:

# Grabbing a table from the database as it is
metadata = MetaData(schema="sources")
polygon_table = await get_polygon_table_name(engine, table_id)
table = await conn.run_sync(
lambda sync_conn: Table(polygon_table, metadata, autoload_with=sync_conn)
)
table = await get_table(conn, table_id, geometry_type)

# Strip out the unwanted columns
ignored_columns = ["geom", "geometry"] # No reason that this moment to pass this through
Expand Down Expand Up @@ -265,16 +268,16 @@ async def select_sources_sub_table(


async def patch_sources_sub_table(
engine: AsyncEngine, table_id: int, update_values: dict, query_params: list = None
engine: AsyncEngine,
table_id: int,
geometry_type: Literal["polygons", "points", "linestrings"],
update_values: dict,
query_params: list = None
) -> CursorResult:

async with engine.begin() as conn:
# Grabbing a table from the database as it is
metadata = MetaData(schema="sources")
polygon_table = await get_polygon_table_name(engine, table_id)
table = await conn.run_sync(
lambda sync_conn: Table(polygon_table, metadata, autoload_with=sync_conn)
)

table = await get_table(conn, table_id, geometry_type)

# Extract filters from the query parameters
query_parser = QueryParser(columns=table.columns, query_params=query_params)
Expand All @@ -291,16 +294,18 @@ async def patch_sources_sub_table(

return result


async def patch_sources_sub_table_set_columns_equal(
engine: AsyncEngine, table_id: int, target_column: str, source_column: str, query_params: list = None
engine: AsyncEngine,
table_id: int,
geometry_type: Literal["polygons", "points", "lines"],
target_column: str,
source_column: str,
query_params: list = None
) -> CursorResult:
async with engine.begin() as conn:
# Grabbing a table from the database as it is
metadata = MetaData(schema="sources")
polygon_table = await get_polygon_table_name(engine, table_id)
table = await conn.run_sync(
lambda sync_conn: Table(polygon_table, metadata, autoload_with=sync_conn)
)

table = get_table(conn, table_id, geometry_type)

# Extract filters from the query parameters
query_parser = QueryParser(columns=table.columns, query_params=query_params)
Expand Down
24 changes: 17 additions & 7 deletions api/models/geometries.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,23 @@ class CommonModel(BaseModel):
source_id: Optional[Union[int | str]] = None
orig_id: Optional[Union[int | str]] = None
descrip: Optional[str] = None
ready: Optional[Union[bool | str]] = None
omit: Optional[Union[bool | str]] = None


class LineStringModel(CommonModel):
name: Optional[str] = None
descrip: Optional[str] = None
type: Optional[str] = None
direction: Optional[str] = None


class PointModel(CommonModel):
strike: Optional[Union[int | str]] = None
dip: Optional[Union[int | str]] = None
dip_dir: Optional[Union[int | str]] = None
point_type: Optional[str] = None
certainty: Optional[str] = None
comments: Optional[str] = None


class PolygonModel(CommonModel):
Expand Down Expand Up @@ -42,11 +58,5 @@ def change_nan_to_none(cls, v):
return v


class LineworkModel(CommonModel):
name: Optional[str] = None
type: Optional[str] = None
direction: Optional[str] = None


class CopyColumnRequest(BaseModel):
source_column: str
Loading

0 comments on commit 7706c25

Please sign in to comment.