Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin' into retry
Browse files Browse the repository at this point in the history
  • Loading branch information
bdpedigo committed Sep 9, 2024
2 parents 2f246e8 + dc6f24b commit 5b0c666
Show file tree
Hide file tree
Showing 11 changed files with 459 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 5.25.2
current_version = 5.26.0
commit = True
tag = True

Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,5 @@ notebooks/
*.mp4
*.tiff
.idea/


2 changes: 1 addition & 1 deletion caveclient/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "5.25.2"
__version__ = "5.26.0"

from .frameworkclient import CAVEclient
from .session_config import SESSION_DEFAULTS
Expand Down
65 changes: 54 additions & 11 deletions caveclient/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import urllib
import webbrowser
from typing import Optional

import requests

Expand Down Expand Up @@ -44,13 +45,21 @@ def write_token(token, filepath, key, overwrite=True):
json.dump(secrets, f)


def server_token_filename(server_address):
server = urllib.parse.urlparse(server_address).netloc
server_file = server + "-cave-secret.json"
server_file_path = os.path.join(default_token_location, server_file)
return os.path.expanduser(server_file_path)


class AuthClient(object):
def __init__(
self,
token_file=None,
token_key=None,
token=None,
server_address=default_global_server_address,
local_server=None,
):
"""Client to find and use auth tokens to access the dynamic annotation framework services.
Expand All @@ -71,11 +80,11 @@ def __init__(
server_address : str, optional,
URL to the auth server. By default, uses a default server address.
"""
self._server_address = server_address
self._local_server = local_server

if token_file is None:
server = urllib.parse.urlparse(server_address).netloc
server_file = server + "-cave-secret.json"
self._server_file_path = os.path.join(default_token_location, server_file)
self._server_file_path = os.path.expanduser(self._server_file_path)
self._server_file_path = server_token_filename(self._server_address)
if os.path.isfile(self._server_file_path):
token_file = self._server_file_path
else:
Expand All @@ -102,7 +111,6 @@ def __init__(
break
self._token = token

self._server_address = server_address
self._default_endpoint_mapping = {"auth_server_address": self._server_address}

@property
Expand Down Expand Up @@ -215,12 +223,12 @@ def get_new_token(self, open=False, no_text=False):

def save_token(
self,
token=None,
token_key=default_token_key,
overwrite=False,
token_file=None,
switch_token=True,
write_to_server_file=True,
token: Optional[str] = None,
token_key: str = default_token_key,
overwrite: bool = False,
token_file: Optional[str] = None,
switch_token: bool = True,
write_to_server_file: bool = True,
):
"""Conveniently save a token in the correct format.
Expand Down Expand Up @@ -313,3 +321,38 @@ def request_header(self):
return auth_header
else:
return {}

@property
def local_server(self):
return self._local_server

@local_server.setter
def local_server(self, new_val):
self._local_server = new_val
self._synchronize_local_server_file()

@property
def local_server_filepath(self):
if self.local_server:
return server_token_filename(self.local_server)
else:
return None

def _synchronize_local_server_file(self):
if self.local_server:
if os.path.exists(self.local_server_filepath):
local_token = self._load_token(
self.local_server_filepath, self._token_key
)
if local_token != self.token:
self.save_token(
token=self.token,
token_file=self.local_server_filepath,
overwrite=True,
)
else:
self.save_token(
token=self.token,
token_file=self.local_server_filepath,
overwrite=True,
)
27 changes: 25 additions & 2 deletions caveclient/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@
+ "/aligned_volume/{aligned_volume_name}/table/{table_name}/count",
}

annotation_api_versions = {0: annotation_endpoints_legacy, 2: annotation_endpoints_v2}

# -------------------------------
# ------ MaterializationEngine endpoints
# -------------------------------

materialization_common = {
"get_api_versions": "{me_server_address}/materialize/api/versions"
Expand Down Expand Up @@ -94,8 +99,6 @@
3: materialization_endpoints_v3,
}

annotation_api_versions = {0: annotation_endpoints_legacy, 2: annotation_endpoints_v2}

# -------------------------------
# ------ Infoservice endpoints
# -------------------------------
Expand Down Expand Up @@ -284,3 +287,23 @@
"get_info": "{ngl_url}/version.json",
"fallback_ngl_url": fallback_ngl_endpoint,
}

# -------------------------------
# ------ Skeleton endpoints
# -------------------------------

skeletonservice_common = {}

skeleton_common = "{skeleton_server_address}/skeletoncache/api"
skeleton_v1 = "{skeleton_server_address}/skeletoncache/api/v1"
skeletonservice_endpoints_v1 = {
"get_version": skeleton_common + "/version",
"skeleton_info": skeleton_v1 + "/{datastack_name}/precomputed/skeleton/info",
"get_skeleton_via_rid": skeleton_v1
+ "/{datastack_name}/precomputed/skeleton/{root_id}",
"get_skeleton_via_skvn_rid": skeleton_v1
+ "/{datastack_name}/precomputed/skeleton/{skeleton_version}/{root_id}",
"get_skeleton_via_skvn_rid_fmt": skeleton_v1
+ "/{datastack_name}/precomputed/skeleton/{skeleton_version}/{root_id}/{output_format}",
}
skeletonservice_api_versions = {1: skeletonservice_endpoints_v1}
31 changes: 31 additions & 0 deletions caveclient/frameworkclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from .jsonservice import JSONService, JSONServiceV1
from .l2cache import L2CacheClient, L2CacheClientLegacy
from .materializationengine import MaterializationClient, MaterializationClientType
from .skeletonservice import SkeletonClient


class GlobalClientError(Exception):
Expand Down Expand Up @@ -52,6 +53,7 @@ def __new__(
- `client.info` is an `InfoServiceClient` (see [client.info](../client_api/info.md))
- `client.l2cache` is an `L2CacheClient` (see [client.l2cache](../client_api/l2cache.md))
- `client.materialize` is a `MaterializationClient` (see [client.materialize](../client_api/materialize.md))
- `client.skeleton` is a `SkeletonClient` (see [client.skeleton](../client_api/skeleton.md))
- `client.schema` is a `SchemaClient` (see [client.schema](../client_api/schema.md))
- `client.state` is a neuroglancer `JSONService` (see [client.state](../client_api/state.md))
Expand Down Expand Up @@ -312,6 +314,9 @@ def chunkedgraph(self) -> None:
def datastack_name(self) -> None:
return None

def __repr__(self):
return f"CAVEclient<datastack=None, server_address={self.server_address}>"


class CAVEclientFull(CAVEclientGlobal):
def __init__(
Expand Down Expand Up @@ -346,6 +351,7 @@ def __init__(
- `client.info` is an `InfoServiceClient` (see [client.info](../client_api/info.md))
- `client.l2cache` is an `L2CacheClient` (see [client.l2cache](../client_api/l2cache.md))
- `client.materialize` is a `MaterializationClient` (see [client.materialize](../client_api/materialize.md))
- `client.skeleton` is a `SkeletonClient` (see [client.skeleton](../client_api/skeleton.md))
- `client.schema` is a `SchemaClient` (see [client.schema](../client_api/schema.md))
- `client.state` is a neuroglancer `JSONService` (see [client.state](../client_api/state.md))
Expand Down Expand Up @@ -399,9 +405,12 @@ def __init__(
self._chunkedgraph = None
self._annotation = None
self._materialize = None
self._skeleton = None
self._l2cache = None
self.desired_resolution = desired_resolution
self.local_server = self.info.local_server()
self.auth.local_server = self.local_server

av_info = self.info.get_aligned_volume_info()
self._aligned_volume_name = av_info["name"]

Expand Down Expand Up @@ -444,6 +453,7 @@ def _reset_services(self):
self._chunkedgraph = None
self._annotation = None
self._materialize = None
self._skeleton = None
self._l2cache = None

@property
Expand Down Expand Up @@ -513,6 +523,24 @@ def materialize(self) -> MaterializationClientType:
)
return self._materialize

@property
def skeleton(self) -> SkeletonClient:
"""
A client for the skeleton service. See [client.skeleton](../client_api/skeleton.md)
for more information.
"""
if self._skeleton is None:
self._skeleton = SkeletonClient(
server_address=self.local_server,
auth_client=self.auth,
datastack_name=self._datastack_name,
max_retries=self._max_retries,
pool_maxsize=self._pool_maxsize,
pool_block=self._pool_block,
over_client=self,
)
return self._skeleton

@property
def state(self) -> JSONServiceV1:
"""
Expand Down Expand Up @@ -553,3 +581,6 @@ def l2cache(self) -> L2CacheClientLegacy:
over_client=self,
)
return self._l2cache

def __repr__(self):
return f"CAVEclient<datastack_name={self.datastack_name}, server_address={self.server_address}>"
32 changes: 25 additions & 7 deletions caveclient/jsonservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import numbers
import os
import re
from typing import Optional

import numpy as np

Expand Down Expand Up @@ -251,7 +252,12 @@ def upload_state_json(self, json_state, state_id=None, timestamp=None):
return int(response_re.groups()[0])

@_check_version_compatibility(">=0.4.0")
def upload_property_json(self, property_json, state_id=None, timestamp=None):
def upload_property_json(
self,
property_json,
state_id=None,
max_size: Optional[int] = 2_500_000,
):
"""Upload a Neuroglancer JSON state
Parameters
Expand All @@ -261,8 +267,9 @@ def upload_property_json(self, property_json, state_id=None, timestamp=None):
state_id : int
ID of a JSON state uploaded to the state service.
Using a state_id is an admin feature.
timestamp: time.time
Timestamp for json state date. Requires state_id.
max_size: int
Maximum size in bytes for the data to upload. Default is 2.5MB. Set to None
for no limit.
Returns
-------
Expand All @@ -278,12 +285,23 @@ def upload_property_json(self, property_json, state_id=None, timestamp=None):
url_mapping["state_id"] = state_id
url = self._endpoints["upload_properties_w_id"].format_map(url_mapping)

data = json.dumps(
property_json,
default=neuroglancer_json_encoder,
)

# get size in bytes of data to upload
data_size = len(data.encode("utf-8"))

if max_size is not None and data_size > max_size:
msg = f"Data size {data_size} exceeds maximum size of {max_size} bytes. "
msg += "Please reduce the size of the data or increase the `max_size` "
msg += "if your state server can handle larger inputs."
raise ValueError(msg)

response = self.session.post(
url,
data=json.dumps(
property_json,
default=neuroglancer_json_encoder,
),
data=data,
)
handle_response(response, as_json=False)
response_re = re.search(".*\/(\d+)", str(response.content))
Expand Down
Loading

0 comments on commit 5b0c666

Please sign in to comment.