Skip to content

Commit

Permalink
checkpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
mavaylon1 committed Sep 16, 2023
1 parent f7669dc commit eb9cb1f
Show file tree
Hide file tree
Showing 4 changed files with 220 additions and 103 deletions.
27 changes: 22 additions & 5 deletions docs/gallery/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,32 @@
from pynwb import NWBHDF5IO
from hdmf_zarr.nwb import NWBZarrIO

with NWBHDF5IO(filename, 'r', load_namespaces=False) as read_io:
file = read_io.read()
# with NWBHDF5IO(filename, 'r', load_namespaces=False) as read_io:
# file = read_io.read()
#
with NWBHDF5IO(filename, 'r', load_namespaces=False) as read_io: # Create HDF5 IO object for read
with NWBZarrIO(zarr_filename, mode='w') as export_io: # Create Zarr IO object for write
export_io.export(src_io=read_io, write_args=dict(link_data=False)) # Export from HDF5 to Zarr
#
# with NWBZarrIO(path=zarr_filename, mode="r") as io:
# infile = io.read()

# group = infile.electrodes.group.data[0]
# breakpoint()
#########
with NWBZarrIO(zarr_filename, mode='r') as read_io: # Create Zarr IO object for read
with NWBHDF5IO(hdf_filename, 'w') as export_io: # Create HDF5 IO object for write
export_io.export(src_io=read_io, write_args=dict(link_data=False)) # Export from Zarr to HDF5

with NWBZarrIO(path=zarr_filename, mode="r") as io:
infile = io.read()
###############################################################################
# Read the new HDF5 file back
# ---------------------------
#
# Now our file has been converted from HDF5 to Zarr and back again to HDF5.
# Here we check that we can still read that file.

group = infile.electrodes.group.data[0]
with NWBHDF5IO(hdf_filename, 'r') as hr:
hf = hr.read()
breakpoint()
hf.electrodes.group.data
breakpoint()
14 changes: 11 additions & 3 deletions src/hdmf_zarr/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,6 +300,7 @@ def get_builder_disk_path(self, **kwargs):
def write_builder(self, **kwargs):
"""Write a builder to disk"""
f_builder, link_data, exhaust_dci = getargs('builder', 'link_data', 'exhaust_dci', kwargs)
breakpoint()
for name, gbldr in f_builder.groups.items():
self.write_group(parent=self.__file,
builder=gbldr,
Expand Down Expand Up @@ -530,10 +531,14 @@ def resolve_ref(self, zarr_ref):
else:
target_name = ROOT_NAME
target_zarr_obj = zarr.open(source_file, mode='r')
# if target_name=='baz0':
# breakpoint()
if object_path is not None:
try:
target_zarr_obj = target_zarr_obj[object_path]
except Exception:
# breakpoint()

raise ValueError("Found bad link to object %s in file %s" % (object_path, source_file))
# Return the create path
return target_name, target_zarr_obj
Expand Down Expand Up @@ -699,6 +704,7 @@ def __setup_chunked_dataset__(cls, parent, name, data, options=None):
returns='the Zarr array that was created', rtype=Array)
def write_dataset(self, **kwargs): # noqa: C901
parent, builder, link_data, exhaust_dci = getargs('parent', 'builder', 'link_data', 'exhaust_dci', kwargs)
# breakpoint()
force_data = getargs('force_data', kwargs)
if self.get_written(builder):
return None
Expand Down Expand Up @@ -1086,8 +1092,8 @@ def __read_group(self, zarr_obj, name=None):
ret.set_group(sub_builder)
# breakpoint()
# read sub datasets
# breakpoint()
for sub_name, sub_array in zarr_obj.arrays():
# breakpoint()
sub_builder = self.__read_dataset(sub_array, sub_name)
ret.set_dataset(sub_builder)

Expand Down Expand Up @@ -1125,6 +1131,7 @@ def __read_links(self, zarr_obj, parent):
def __read_dataset(self, zarr_obj, name):
# breakpoint()
ret = self.__get_built(zarr_obj)
# breakpoint()
if ret is not None:
return ret

Expand Down Expand Up @@ -1156,7 +1163,6 @@ def __read_dataset(self, zarr_obj, name):
reg_refs = False
has_reference = False
if isinstance(dtype, list):
breakpoint()
# compound data type
obj_refs = list()
reg_refs = list()
Expand All @@ -1176,10 +1182,12 @@ def __read_dataset(self, zarr_obj, name):
data = BuilderZarrTableDataset(zarr_obj, self, retrieved_dtypes)
# d = BuilderH5TableDataset(zarr_obj, self, dtype)
elif self.__is_ref(dtype):
# breakpoint()
# reference array
has_reference = True
has_reference = True #TODO: REMOVE
if dtype == 'object': # wrap with dataset ref
# obj_refs = True
breakpoint()
data = BuilderZarrReferenceDataset(data, self)
elif dtype == 'region':
reg_refs = True
Expand Down
Loading

0 comments on commit eb9cb1f

Please sign in to comment.