From 97be046ee6b6f4c86fd4dfdb65bc1af95986b9f1 Mon Sep 17 00:00:00 2001 From: Keith Wiley Date: Mon, 9 Sep 2024 12:36:26 -0700 Subject: [PATCH 1/4] Skeleton service incorporation into CAVEclient is working, at least in certain respects (#212) * Skeleton engine * Removed extraneous notebook * Client object builds in a notebook test, still pretty empty though. * Entry point added. * SkeletonClient now works in a basic test. * Ongoing skeleton work. * Skeleton service work. * Updates to PR 212 * Updates to PR 212 * Updates for PR 212 * Updates to PR 212 * Attempts to pacify lint checks for PR 212 * Updates toward PR 212 * Trying to appease Git lint formatter * fix import from type checking * tweak for linting * run ruff fix imports * Rebuilt endpoint map for PR 212 * Trying to appease lint tests for PR 212 * Testing formatting cleanup against lint. * Nope, line insists on allowing ruff to butcher the formatting so it's much hard to visually comprehend. Is this really what we want? * Added 'verify' to super call. * Skeleton endpoint cleanup. * Praying to the lint gods. * Return type cleanup * Added an exception to handle a missing L2Cache. * Trying to make lint happy. * Wrestling with lint. * Wrestling with lint. * Wrestling with lint. * Made a few imports optional. * Wrestline with lint. * Added warnings on import failures. Changed default output from precomputed to none. * Made test function names consistent. * Version endpoint work. * string formatting cleanup. * Wrestling with lint. * removing keith dev --------- Co-authored-by: Casey Schneider-Mizell Co-authored-by: Forrest Collman --- .gitignore | 2 + caveclient/endpoints.py | 27 ++- caveclient/frameworkclient.py | 23 +++ caveclient/skeletonservice.py | 298 +++++++++++++++++++++++++++ docs/client_api/skeletonservice.md | 16 ++ docs/extended_api/skeletonservice.md | 5 + docs/tutorials/framework.md | 1 + 7 files changed, 370 insertions(+), 2 deletions(-) create mode 100644 caveclient/skeletonservice.py create mode 100644 docs/client_api/skeletonservice.md create mode 100644 docs/extended_api/skeletonservice.md diff --git a/.gitignore b/.gitignore index 8c573bc8..e8026b8c 100644 --- a/.gitignore +++ b/.gitignore @@ -118,3 +118,5 @@ notebooks/ *.mp4 *.tiff .idea/ + + diff --git a/caveclient/endpoints.py b/caveclient/endpoints.py index b871883f..044e5f6e 100644 --- a/caveclient/endpoints.py +++ b/caveclient/endpoints.py @@ -25,6 +25,11 @@ + "/aligned_volume/{aligned_volume_name}/table/{table_name}/count", } +annotation_api_versions = {0: annotation_endpoints_legacy, 2: annotation_endpoints_v2} + +# ------------------------------- +# ------ MaterializationEngine endpoints +# ------------------------------- materialization_common = { "get_api_versions": "{me_server_address}/materialize/api/versions" @@ -94,8 +99,6 @@ 3: materialization_endpoints_v3, } -annotation_api_versions = {0: annotation_endpoints_legacy, 2: annotation_endpoints_v2} - # ------------------------------- # ------ Infoservice endpoints # ------------------------------- @@ -284,3 +287,23 @@ "get_info": "{ngl_url}/version.json", "fallback_ngl_url": fallback_ngl_endpoint, } + +# ------------------------------- +# ------ Skeleton endpoints +# ------------------------------- + +skeletonservice_common = {} + +skeleton_common = "{skeleton_server_address}/skeletoncache/api" +skeleton_v1 = "{skeleton_server_address}/skeletoncache/api/v1" +skeletonservice_endpoints_v1 = { + "get_version": skeleton_common + "/version", + "skeleton_info": skeleton_v1 + "/{datastack_name}/precomputed/skeleton/info", + "get_skeleton_via_rid": skeleton_v1 + + "/{datastack_name}/precomputed/skeleton/{root_id}", + "get_skeleton_via_skvn_rid": skeleton_v1 + + "/{datastack_name}/precomputed/skeleton/{skeleton_version}/{root_id}", + "get_skeleton_via_skvn_rid_fmt": skeleton_v1 + + "/{datastack_name}/precomputed/skeleton/{skeleton_version}/{root_id}/{output_format}", +} +skeletonservice_api_versions = {1: skeletonservice_endpoints_v1} diff --git a/caveclient/frameworkclient.py b/caveclient/frameworkclient.py index 4af8baed..dd853d90 100644 --- a/caveclient/frameworkclient.py +++ b/caveclient/frameworkclient.py @@ -11,6 +11,7 @@ from .jsonservice import JSONService, JSONServiceV1 from .l2cache import L2CacheClient, L2CacheClientLegacy from .materializationengine import MaterializationClient, MaterializationClientType +from .skeletonservice import SkeletonClient DEFAULT_RETRIES = 3 @@ -53,6 +54,7 @@ def __new__( - `client.info` is an `InfoServiceClient` (see [client.info](../client_api/info.md)) - `client.l2cache` is an `L2CacheClient` (see [client.l2cache](../client_api/l2cache.md)) - `client.materialize` is a `MaterializationClient` (see [client.materialize](../client_api/materialize.md)) + - `client.skeleton` is a `SkeletonClient` (see [client.skeleton](../client_api/skeleton.md)) - `client.schema` is a `SchemaClient` (see [client.schema](../client_api/schema.md)) - `client.state` is a neuroglancer `JSONService` (see [client.state](../client_api/state.md)) @@ -339,6 +341,7 @@ def __init__( - `client.info` is an `InfoServiceClient` (see [client.info](../client_api/info.md)) - `client.l2cache` is an `L2CacheClient` (see [client.l2cache](../client_api/l2cache.md)) - `client.materialize` is a `MaterializationClient` (see [client.materialize](../client_api/materialize.md)) + - `client.skeleton` is a `SkeletonClient` (see [client.skeleton](../client_api/skeleton.md)) - `client.schema` is a `SchemaClient` (see [client.schema](../client_api/schema.md)) - `client.state` is a neuroglancer `JSONService` (see [client.state](../client_api/state.md)) @@ -391,6 +394,7 @@ def __init__( self._chunkedgraph = None self._annotation = None self._materialize = None + self._skeleton = None self._l2cache = None self.desired_resolution = desired_resolution self.local_server = self.info.local_server() @@ -436,6 +440,7 @@ def _reset_services(self): self._chunkedgraph = None self._annotation = None self._materialize = None + self._skeleton = None self._l2cache = None @property @@ -502,6 +507,24 @@ def materialize(self) -> MaterializationClientType: ) return self._materialize + @property + def skeleton(self) -> SkeletonClient: + """ + A client for the skeleton service. See [client.skeleton](../client_api/skeleton.md) + for more information. + """ + if self._skeleton is None: + self._skeleton = SkeletonClient( + server_address=self.local_server, + auth_client=self.auth, + datastack_name=self._datastack_name, + max_retries=self._max_retries, + pool_maxsize=self._pool_maxsize, + pool_block=self._pool_block, + over_client=self, + ) + return self._skeleton + @property def state(self) -> JSONServiceV1: """ diff --git a/caveclient/skeletonservice.py b/caveclient/skeletonservice.py new file mode 100644 index 00000000..6c91a068 --- /dev/null +++ b/caveclient/skeletonservice.py @@ -0,0 +1,298 @@ +from __future__ import annotations + +import logging +from io import BytesIO +from typing import Literal, Optional + +import pandas as pd +from packaging.version import Version + +try: + import cloudvolume + + logging.warning( + "cloudvolume not installed. Some output formats will not be available." + ) + + CLOUDVOLUME_AVAILABLE = True +except ImportError: + CLOUDVOLUME_AVAILABLE = False + +try: + import h5py + + logging.warning("h5py not installed. Some output formats will not be available.") + + H5PY_AVAILABLE = True +except ImportError: + H5PY_AVAILABLE = False + +try: + from cloudfiles import CloudFiles + + logging.warning( + "cloudfiles not installed. Some output formats will not be available." + ) + + CLOUDFILES_AVAILABLE = True +except ImportError: + CLOUDFILES_AVAILABLE = False + +from .auth import AuthClient +from .base import ClientBase, _api_endpoints +from .endpoints import skeletonservice_api_versions, skeletonservice_common + +SERVER_KEY = "skeleton_server_address" + + +""" +Usage +""" + + +class NoL2CacheException(Exception): + def __init__(self, value=""): + """ + Parameters: + value (str) [optional]: A more detailed description of the error, if desired. + """ + super().__init__(f"No L2Cache found. {value}".strip()) + + +class SkeletonClient(ClientBase): + def __init__( + self, + server_address: str, + datastack_name=None, + auth_client: Optional[AuthClient] = None, + api_version: str = "latest", + verify: bool = True, + max_retries: int = None, + pool_maxsize: int = None, + pool_block: bool = None, + over_client: Optional[CAVEclientFull] = None, # noqa: F821 # type: ignore + ): + if auth_client is None: + auth_client = AuthClient() + + auth_header = auth_client.request_header + endpoints, api_version = _api_endpoints( + api_version, + SERVER_KEY, + server_address, + skeletonservice_common, + skeletonservice_api_versions, + auth_header, + fallback_version=1, + verify=verify, + ) + + super(SkeletonClient, self).__init__( + server_address, + auth_header, + api_version, + endpoints, + SERVER_KEY, + verify=verify, + max_retries=max_retries, + pool_maxsize=pool_maxsize, + pool_block=pool_block, + over_client=over_client, + ) + + self._datastack_name = datastack_name + + def _test_get_version(self) -> Optional[Version]: + print("_test_get_version()") + endpoint_mapping = self.default_url_mapping + endpoint = self._endpoints.get("get_version_test", None) + print(f"endpoint: {endpoint}") + if endpoint is None: + return None + + url = endpoint.format_map(endpoint_mapping) + print(f"url: {url}") + response = self.session.get(url) + print(f"response: {response}") + if response.status_code == 404: # server doesn't have this endpoint yet + print("404") + return None + else: + version_str = response.json() + print(f"version_str: {type(version_str)} {version_str}") + version = Version(version_str) + print(f"version: {version}") + return version + + def _test_l2cache_exception(self): + raise NoL2CacheException( + "This is a test of SkeletonClient's behavior when no L2Cache is found." + ) + + def _test_endpoints(self): + def parse(url): + return url.split("/", 6)[-1] + + rid = 123456789 + ds = "test_datastack" + innards = "/precomputed/skeleton/" + + if self._datastack_name is not None: + # I could write a complicated test that confirms that an AssertionError is raised + # when datastack_name and self._datastack_name are both None, but I'm just don't want to at the moment. + # The combinatorial explosion of test varieties is getting out of hand. + url = parse(self.build_endpoint(rid, None, None, "precomputed")) + assert url == f"{self._datastack_name}{innards}{rid}" + + url = parse(self.build_endpoint(rid, None, None, "json")) + assert url == f"{self._datastack_name}{innards}0/{rid}/json" + + url = parse(self.build_endpoint(rid, ds, None, "precomputed")) + assert url == f"{ds}{innards}{rid}" + + url = parse(self.build_endpoint(rid, ds, None, "json")) + assert url == f"{ds}{innards}0/{rid}/json" + + url = parse(self.build_endpoint(rid, ds, 0, "precomputed")) + assert url == f"{ds}{innards}0/{rid}" + + url = parse(self.build_endpoint(rid, ds, 0, "json")) + assert url == f"{ds}{innards}0/{rid}/json" + + url = parse(self.build_endpoint(rid, ds, 1, "precomputed")) + assert url == f"{ds}{innards}1/{rid}" + + url = parse(self.build_endpoint(rid, ds, 1, "json")) + assert url == f"{ds}{innards}1/{rid}/json" + + def build_endpoint( + self, + root_id: int, + datastack_name: str, + skeleton_version: int, + output_format: str, + ): + """ + Building the URL in a separate function facilities testing + """ + if datastack_name is None: + datastack_name = self._datastack_name + assert datastack_name is not None + + endpoint_mapping = self.default_url_mapping + endpoint_mapping["datastack_name"] = datastack_name + endpoint_mapping["root_id"] = root_id + + if skeleton_version is None: + # Pylance incorrectly thinks that skeleton_version cannot be None here, + # but it most certainly can, and that is precisely how I intended it. + # Google searching revealed this as a known problem with Pylance and Selenium, + # but I have not been successful in solving it yet. + if output_format == "precomputed": + endpoint = "get_skeleton_via_rid" + else: + # Note that there isn't currently an endpoint for this scenario, + # so we'll just use the skvn_rid_fmt endpoint with skvn set to the default value of 0 + endpoint_mapping["skeleton_version"] = 0 + endpoint_mapping["output_format"] = output_format + endpoint = "get_skeleton_via_skvn_rid_fmt" + else: + endpoint_mapping["skeleton_version"] = skeleton_version + if output_format == "precomputed": + endpoint = "get_skeleton_via_skvn_rid" + else: + endpoint_mapping["output_format"] = output_format + endpoint = "get_skeleton_via_skvn_rid_fmt" + + url = self._endpoints[endpoint].format_map(endpoint_mapping) + return url + + def get_skeleton( + self, + root_id: int, + datastack_name: Optional[str] = None, + skeleton_version: Optional[int] = None, + output_format: Literal[ + "none", "h5", "swc", "json", "arrays", "precomputed" + ] = "none", + ): + """Gets basic skeleton information for a datastack + + Parameters + ---------- + root_id : int + The root id of the skeleton to retrieve + datastack_name : str + The name of the datastack to check + skeleton_version : int + The skeleton version to generate and retrieve. Options are documented in SkeletonService. Use 0 for latest. + output_format : string + The format to retrieve. Options are 'none', 'h5', 'swc', 'json', 'arrays', 'precomputed' + + Returns + ------- + The return type will vary greatly depending on the output_format parameter. The options are: + - 'none': No return value (this can be used to generate a skeleton without retrieving it) + - 'precomputed': A cloudvolume.Skeleton object + - 'json': A dictionary + - 'arrays': A dictionary (literally a subset of the json response) + - 'swc': A pandas DataFrame + - 'h5': An h5py file object + """ + if not self.fc.l2cache.has_cache(): + raise NoL2CacheException("SkeletonClient requires an L2Cache.") + + url = self.build_endpoint( + root_id, datastack_name, skeleton_version, output_format + ) + + response = self.session.get(url) + + if output_format == "none": + return + if output_format == "precomputed": + if not CLOUDVOLUME_AVAILABLE: + raise ImportError( + "'precomputed' output format requires cloudvolume, which is not available." + ) + return cloudvolume.Skeleton.from_precomputed(response.content) + if output_format == "json": + return response.json() + if output_format == "arrays": + return response.json() + if output_format == "swc": + if not CLOUDFILES_AVAILABLE: + raise ImportError( + "'swc' output format requires cloudvolume, which is not available." + ) + # Curiously, the response is quoted and contains a terminal endline. Sigh. + parts = response.text.strip()[1:-1].split("/") + dir_, filename = "/".join(parts[0:-1]), parts[-1] + cf = CloudFiles(dir_) + skeleton_bytes = cf.get(filename) + arr = [ + [float(v) for v in row.split()] + for row in skeleton_bytes.decode().split("\n") + ] + # I got the SWC column header from skeleton_plot.skel_io.py + df = pd.DataFrame( + arr, columns=["id", "type", "x", "y", "z", "radius", "parent"] + ) + return df + if output_format == "h5": + if not CLOUDFILES_AVAILABLE: + raise ImportError( + "'h5' output format requires cloudvolume, which is not available." + ) + if not H5PY_AVAILABLE: + raise ImportError( + "'h5' output format requires h5py, which is not available." + ) + parts = response.text.strip()[1:-1].split("/") + dir_, filename = "/".join(parts[0:-1]), parts[-1] + cf = CloudFiles(dir_) + skeleton_bytes = cf.get(filename) + skeleton_bytesio = BytesIO(skeleton_bytes) + return h5py.File(skeleton_bytesio, "r") + + raise ValueError(f"Unknown output format: {output_format}") diff --git a/docs/client_api/skeletonservice.md b/docs/client_api/skeletonservice.md new file mode 100644 index 00000000..9fb9f8a6 --- /dev/null +++ b/docs/client_api/skeletonservice.md @@ -0,0 +1,16 @@ +--- +title: client.skeletonservice +--- + +!!! note + The functionality described here will be accurate if the datastack you are using is using the most up-to-date version of the skeleton service. If something seems wrong with the documentation here, try checking the version of the skeleton service returned by your client: + ```python + type(client.skeletonservice) + ``` + Extended documentation for all versions of the skeleton service client can be found + [here](../extended_api/skeletonservice.md). + +::: caveclient.skeletonservice.SkeletonClient + options: + heading_level: 2 + show_bases: false diff --git a/docs/extended_api/skeletonservice.md b/docs/extended_api/skeletonservice.md new file mode 100644 index 00000000..031a548a --- /dev/null +++ b/docs/extended_api/skeletonservice.md @@ -0,0 +1,5 @@ +--- +title: skeletonservice +--- + +::: caveclient.skeletonservice diff --git a/docs/tutorials/framework.md b/docs/tutorials/framework.md index 34e2e9f2..ab0eec8b 100644 --- a/docs/tutorials/framework.md +++ b/docs/tutorials/framework.md @@ -93,3 +93,4 @@ is named `client`, the subclients for each service are: - InfoService : `client.info` - EM Annotation Schemas : `client.schemas` - JSON Neuroglancer State Service : `client.state` +- Skeleton Service : `client.skeletonservice` From 34e7ed2a3f3d28c7b26b37bcd135309cf21c5df6 Mon Sep 17 00:00:00 2001 From: Forrest Collman Date: Mon, 9 Sep 2024 12:36:46 -0700 Subject: [PATCH 2/4] =?UTF-8?q?Bump=20version:=205.25.2=20=E2=86=92=205.26?= =?UTF-8?q?.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- caveclient/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6a5d505e..9961c7f2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.25.2 +current_version = 5.26.0 commit = True tag = True diff --git a/caveclient/__init__.py b/caveclient/__init__.py index 7d9d28f8..896b92ec 100644 --- a/caveclient/__init__.py +++ b/caveclient/__init__.py @@ -1,4 +1,4 @@ -__version__ = "5.25.2" +__version__ = "5.26.0" from .frameworkclient import CAVEclient From 52632be33246814d1cbc8162781bdc12a0d75b77 Mon Sep 17 00:00:00 2001 From: Casey Schneider-Mizell Date: Mon, 9 Sep 2024 20:37:42 +0100 Subject: [PATCH 3/4] Local server auth quality of life (#214) * prototype mesh client * tweak downloading parameters * small improvements to download meshes * auth qol + repr * remove unneeded file * import linting * fix extra self --- caveclient/auth.py | 65 +++++++++++++++++++++++++++++------ caveclient/frameworkclient.py | 8 +++++ 2 files changed, 62 insertions(+), 11 deletions(-) diff --git a/caveclient/auth.py b/caveclient/auth.py index bf7ca469..52829c32 100644 --- a/caveclient/auth.py +++ b/caveclient/auth.py @@ -3,6 +3,7 @@ import os import urllib import webbrowser +from typing import Optional import requests @@ -44,6 +45,13 @@ def write_token(token, filepath, key, overwrite=True): json.dump(secrets, f) +def server_token_filename(server_address): + server = urllib.parse.urlparse(server_address).netloc + server_file = server + "-cave-secret.json" + server_file_path = os.path.join(default_token_location, server_file) + return os.path.expanduser(server_file_path) + + class AuthClient(object): def __init__( self, @@ -51,6 +59,7 @@ def __init__( token_key=None, token=None, server_address=default_global_server_address, + local_server=None, ): """Client to find and use auth tokens to access the dynamic annotation framework services. @@ -71,11 +80,11 @@ def __init__( server_address : str, optional, URL to the auth server. By default, uses a default server address. """ + self._server_address = server_address + self._local_server = local_server + if token_file is None: - server = urllib.parse.urlparse(server_address).netloc - server_file = server + "-cave-secret.json" - self._server_file_path = os.path.join(default_token_location, server_file) - self._server_file_path = os.path.expanduser(self._server_file_path) + self._server_file_path = server_token_filename(self._server_address) if os.path.isfile(self._server_file_path): token_file = self._server_file_path else: @@ -102,7 +111,6 @@ def __init__( break self._token = token - self._server_address = server_address self._default_endpoint_mapping = {"auth_server_address": self._server_address} @property @@ -215,12 +223,12 @@ def get_new_token(self, open=False, no_text=False): def save_token( self, - token=None, - token_key=default_token_key, - overwrite=False, - token_file=None, - switch_token=True, - write_to_server_file=True, + token: Optional[str] = None, + token_key: str = default_token_key, + overwrite: bool = False, + token_file: Optional[str] = None, + switch_token: bool = True, + write_to_server_file: bool = True, ): """Conveniently save a token in the correct format. @@ -313,3 +321,38 @@ def request_header(self): return auth_header else: return {} + + @property + def local_server(self): + return self._local_server + + @local_server.setter + def local_server(self, new_val): + self._local_server = new_val + self._synchronize_local_server_file() + + @property + def local_server_filepath(self): + if self.local_server: + return server_token_filename(self.local_server) + else: + return None + + def _synchronize_local_server_file(self): + if self.local_server: + if os.path.exists(self.local_server_filepath): + local_token = self._load_token( + self.local_server_filepath, self._token_key + ) + if local_token != self.token: + self.save_token( + token=self.token, + token_file=self.local_server_filepath, + overwrite=True, + ) + else: + self.save_token( + token=self.token, + token_file=self.local_server_filepath, + overwrite=True, + ) diff --git a/caveclient/frameworkclient.py b/caveclient/frameworkclient.py index dd853d90..00d94301 100644 --- a/caveclient/frameworkclient.py +++ b/caveclient/frameworkclient.py @@ -308,6 +308,9 @@ def chunkedgraph(self) -> None: def datastack_name(self) -> None: return None + def __repr__(self): + return f"CAVEclient" + class CAVEclientFull(CAVEclientGlobal): def __init__( @@ -398,6 +401,8 @@ def __init__( self._l2cache = None self.desired_resolution = desired_resolution self.local_server = self.info.local_server() + self.auth.local_server = self.local_server + av_info = self.info.get_aligned_volume_info() self._aligned_volume_name = av_info["name"] @@ -563,3 +568,6 @@ def l2cache(self) -> L2CacheClientLegacy: over_client=self, ) return self._l2cache + + def __repr__(self): + return f"CAVEclient" From dc6f24b2a0a9a3dd8b59c9bf9c55b7f9b21d31ba Mon Sep 17 00:00:00 2001 From: Ben Pedigo Date: Mon, 9 Sep 2024 12:39:13 -0700 Subject: [PATCH 4/4] shell of logic (#216) --- caveclient/jsonservice.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/caveclient/jsonservice.py b/caveclient/jsonservice.py index 4265662e..98e5e281 100644 --- a/caveclient/jsonservice.py +++ b/caveclient/jsonservice.py @@ -2,6 +2,7 @@ import numbers import os import re +from typing import Optional import numpy as np @@ -247,7 +248,12 @@ def upload_state_json(self, json_state, state_id=None, timestamp=None): return int(response_re.groups()[0]) @_check_version_compatibility(">=0.4.0") - def upload_property_json(self, property_json, state_id=None, timestamp=None): + def upload_property_json( + self, + property_json, + state_id=None, + max_size: Optional[int] = 2_500_000, + ): """Upload a Neuroglancer JSON state Parameters @@ -257,8 +263,9 @@ def upload_property_json(self, property_json, state_id=None, timestamp=None): state_id : int ID of a JSON state uploaded to the state service. Using a state_id is an admin feature. - timestamp: time.time - Timestamp for json state date. Requires state_id. + max_size: int + Maximum size in bytes for the data to upload. Default is 2.5MB. Set to None + for no limit. Returns ------- @@ -274,12 +281,23 @@ def upload_property_json(self, property_json, state_id=None, timestamp=None): url_mapping["state_id"] = state_id url = self._endpoints["upload_properties_w_id"].format_map(url_mapping) + data = json.dumps( + property_json, + default=neuroglancer_json_encoder, + ) + + # get size in bytes of data to upload + data_size = len(data.encode("utf-8")) + + if max_size is not None and data_size > max_size: + msg = f"Data size {data_size} exceeds maximum size of {max_size} bytes. " + msg += "Please reduce the size of the data or increase the `max_size` " + msg += "if your state server can handle larger inputs." + raise ValueError(msg) + response = self.session.post( url, - data=json.dumps( - property_json, - default=neuroglancer_json_encoder, - ), + data=data, ) handle_response(response, as_json=False) response_re = re.search(".*\/(\d+)", str(response.content))