Skip to content

Commit

Permalink
Thread include_data_sources through client and server.
Browse files Browse the repository at this point in the history
  • Loading branch information
danielballan committed Feb 6, 2024
1 parent 623d025 commit 83f1585
Show file tree
Hide file tree
Showing 6 changed files with 83 additions and 18 deletions.
30 changes: 28 additions & 2 deletions tiled/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,21 @@ def delete_revision(self, n):


class BaseClient:
def __init__(self, context, *, item, structure_clients, structure=None):
def __init__(
self,
context,
*,
item,
structure_clients,
structure=None,
include_data_sources=False,
):
self._context = context
self._item = item
self._cached_len = None # a cache just for __len__
self.structure_clients = structure_clients
self._metadata_revisions = None
self._include_data_sources = include_data_sources
attributes = self.item["attributes"]
structure_family = attributes["structure_family"]
if structure is not None:
Expand Down Expand Up @@ -190,17 +199,34 @@ def structure_family(self):

@property
def data_sources(self):
if not self._include_data_sources:
raise RuntimeError(
"Data Sources were not fetched. Use include_data_sources()"
)
return self.item["attributes"].get("data_sources")

def new_variation(self, structure_clients=UNCHANGED, **kwargs):
def include_data_sources(self):
return self.new_variation(self.context, include_data_sources=True).refresh()

def new_variation(
self,
context,
structure_clients=UNCHANGED,
include_data_sources=UNCHANGED,
**kwargs,
):
"""
This is intended primarily for internal use and use by subclasses.
"""
if structure_clients is UNCHANGED:
structure_clients = self.structure_clients
if include_data_sources is UNCHANGED:
include_data_sources = self._include_data_sources
return type(self)(
context,
item=self._item,
structure_clients=structure_clients,
include_data_sources=include_data_sources,
**kwargs,
)

Expand Down
21 changes: 18 additions & 3 deletions tiled/client/constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def from_uri(
prompt_for_reauthentication=UNSET,
headers=None,
timeout=None,
include_data_sources=False,
):
"""
Connect to a Node on a local or remote server.
Expand Down Expand Up @@ -57,6 +58,8 @@ def from_uri(
timeout : httpx.Timeout, optional
If None, use Tiled default settings.
(To disable timeouts, use httpx.Timeout(None)).
include_data_sources : bool, optional
Default False. If True, fetch information about underlying data sources.
"""
context, node_path_parts = Context.from_any_uri(
uri,
Expand All @@ -73,6 +76,7 @@ def from_uri(
username=username,
auth_provider=auth_provider,
node_path_parts=node_path_parts,
include_data_sources=include_data_sources,
)


Expand All @@ -83,6 +87,7 @@ def from_context(
username=UNSET,
auth_provider=UNSET,
node_path_parts=None,
include_data_sources=False,
):
"""
Advanced: Connect to a Node using a custom instance of httpx.Client or httpx.AsyncClient.
Expand Down Expand Up @@ -132,7 +137,11 @@ def from_context(
item_uri = f"{context.api_uri}metadata/{'/'.join(node_path_parts)}"
try:
content = handle_error(
context.http_client.get(item_uri, headers={"Accept": MSGPACK_MIME_TYPE})
context.http_client.get(
item_uri,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"include_data_sources": include_data_sources},
)
).json()
except ClientError as err:
if (
Expand All @@ -142,12 +151,18 @@ def from_context(
):
context.authenticate()
content = handle_error(
context.http_client.get(item_uri, headers={"Accept": MSGPACK_MIME_TYPE})
context.http_client.get(
item_uri,
headers={"Accept": MSGPACK_MIME_TYPE},
params={"include_data_sources": include_data_sources},
)
).json()
else:
raise
item = content["data"]
return client_for_item(context, structure_clients, item)
return client_for_item(
context, structure_clients, item, include_data_sources=include_data_sources
)


def from_profile(name, structure_clients=None, **kwargs):
Expand Down
27 changes: 24 additions & 3 deletions tiled/client/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ def __init__(
queries=None,
sorting=None,
structure=None,
include_data_sources=False,
):
"This is not user-facing. Use Node.from_uri."

Expand Down Expand Up @@ -108,6 +109,7 @@ def __init__(
context=context,
item=item,
structure_clients=structure_clients,
include_data_sources=include_data_sources,
)

def __repr__(self):
Expand Down Expand Up @@ -252,6 +254,7 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
self.item["links"]["search"],
headers={"Accept": MSGPACK_MIME_TYPE},
params={
"include_data_sources": self._include_data_sources,
**_queries_to_params(KeyLookup(key)),
**self._queries_as_params,
**self._sorting_params,
Expand All @@ -269,7 +272,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
len(data) == 1
), "The key lookup query must never result more than one result."
(item,) = data
result = client_for_item(self.context, self.structure_clients, item)
result = client_for_item(
self.context,
self.structure_clients,
item,
include_data_sources=self._include_data_sources,
)
if tail:
result = result[tail]
else:
Expand Down Expand Up @@ -301,6 +309,9 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
self.context.http_client.get(
self_link + "".join(f"/{key}" for key in keys[i:]),
headers={"Accept": MSGPACK_MIME_TYPE},
params={
"include_data_sources": self._include_data_sources
},
)
).json()
except ClientError as err:
Expand All @@ -313,7 +324,12 @@ def __getitem__(self, keys, _ignore_inlined_contents=False):
raise
item = content["data"]
break
result = client_for_item(self.context, self.structure_clients, item)
result = client_for_item(
self.context,
self.structure_clients,
item,
include_data_sources=self._include_data_sources,
)
return result

def delete(self, key):
Expand Down Expand Up @@ -401,7 +417,12 @@ def _items_slice(self, start, stop, direction, _ignore_inlined_contents=False):
self.context.http_client.get(
next_page_url,
headers={"Accept": MSGPACK_MIME_TYPE},
params={**self._queries_as_params, **sorting_params},
params={
"include_data_sources": self._include_data_sources,
**self._queries_as_params,
**sorting_params,
},
include_data_sources=self._include_data_sources,
)
).json()
self._cached_len = (
Expand Down
5 changes: 4 additions & 1 deletion tiled/client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,9 @@ def export_util(file, format, get, link, params):
file.write(content)


def client_for_item(context, structure_clients, item, structure=None):
def client_for_item(
context, structure_clients, item, structure=None, include_data_sources=False
):
"""
Create an instance of the appropriate client class for an item.
Expand Down Expand Up @@ -129,6 +131,7 @@ def client_for_item(context, structure_clients, item, structure=None):
item=item,
structure_clients=structure_clients,
structure=structure,
include_data_sources=include_data_sources,
)


Expand Down
10 changes: 5 additions & 5 deletions tiled/server/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ async def construct_entries_response(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
filters,
sort,
base_url,
Expand Down Expand Up @@ -230,7 +230,7 @@ async def construct_entries_response(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
media_type,
max_depth=max_depth,
)
Expand Down Expand Up @@ -398,14 +398,14 @@ async def construct_resource(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
media_type,
max_depth,
depth=0,
):
path_str = "/".join(path_parts)
attributes = {"ancestors": path_parts[:-1]}
if show_sources and hasattr(entry, "data_sources"):
if include_data_sources and hasattr(entry, "data_sources"):
attributes["data_sources"] = entry.data_sources
if schemas.EntryFields.metadata in fields:
if select_metadata is not None:
Expand Down Expand Up @@ -461,7 +461,7 @@ async def construct_resource(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
media_type,
max_depth,
depth=1 + depth,
Expand Down
8 changes: 4 additions & 4 deletions tiled/server/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ async def search(
sort: Optional[str] = Query(None),
max_depth: Optional[int] = Query(None, ge=0, le=DEPTH_LIMIT),
omit_links: bool = Query(False),
show_sources: bool = Query(False),
include_data_sources: bool = Query(False),
entry: Any = SecureEntry(scopes=["read:metadata"]),
query_registry=Depends(get_query_registry),
principal: str = Depends(get_current_principal),
Expand All @@ -172,7 +172,7 @@ async def search(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
filters,
sort,
get_base_url(request),
Expand Down Expand Up @@ -306,7 +306,7 @@ async def metadata(
select_metadata: Optional[str] = Query(None),
max_depth: Optional[int] = Query(None, ge=0, le=DEPTH_LIMIT),
omit_links: bool = Query(False),
show_sources: bool = Query(False),
include_data_sources: bool = Query(False),
entry: Any = SecureEntry(scopes=["read:metadata"]),
root_path: bool = Query(False),
):
Expand All @@ -323,7 +323,7 @@ async def metadata(
fields,
select_metadata,
omit_links,
show_sources,
include_data_sources,
resolve_media_type(request),
max_depth=max_depth,
)
Expand Down

0 comments on commit 83f1585

Please sign in to comment.