From aa0b8f4697a0b96642b3048dc9934fa946cb728f Mon Sep 17 00:00:00 2001 From: Luigi Pertoldi Date: Mon, 1 Jan 2024 21:59:13 +0100 Subject: [PATCH 1/3] Update to latest pydataobj changes --- setup.cfg | 4 +- .../buffer_processor/buffer_processor.py | 4 +- .../buffer_processor/lh5_buffer_processor.py | 24 +- src/daq2lh5/build_raw.py | 6 +- src/daq2lh5/data_decoder.py | 22 +- src/daq2lh5/fc/fc_config_decoder.py | 2 +- src/daq2lh5/raw_buffer.py | 18 +- .../buffer_processor/test_buffer_processor.py | 289 ++++++++---------- .../test_lh5_buffer_processor.py | 219 ++++++------- tests/fc/test_fc_event_decoder.py | 2 +- tests/test_build_raw.py | 28 +- tests/test_daq_to_raw.py | 10 +- 12 files changed, 281 insertions(+), 347 deletions(-) diff --git a/setup.cfg b/setup.cfg index c137458..9d808f0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,10 +31,10 @@ classifiers = [options] packages = find: install_requires = - dspeed>=1.1 + dspeed>=1.3.0a3 h5py>=3.2.0 hdf5plugin - legend-pydataobj>=1.4.1 + legend-pydataobj>=1.5.0a1 numpy>=1.21 pyfcutils tqdm>=4.27 diff --git a/src/daq2lh5/buffer_processor/buffer_processor.py b/src/daq2lh5/buffer_processor/buffer_processor.py index 3a27285..8824df3 100644 --- a/src/daq2lh5/buffer_processor/buffer_processor.py +++ b/src/daq2lh5/buffer_processor/buffer_processor.py @@ -51,14 +51,14 @@ def buffer_processor(rb: RawBuffer) -> Table: ``"compression": {"lgdo": "codec_name" [, ...]}`` `(dict)` Updates the `compression` attribute of `lgdo` to `codec_name`. The attribute sets the compression algorithm applied by - :func:`~.lgdo.lh5_store.LH5Store.read_object` before writing `lgdo` to + :func:`~.lgdo.lh5.LH5Store.read` before writing `lgdo` to disk. Can be used to apply custom waveform compression algorithms from :mod:`lgdo.compression`. ``"hdf5_settings": {"lgdo": { }}`` `(dict)` Updates the `hdf5_settings` attribute of `lgdo`. The attribute sets the HDF5 dataset options applied by - :func:`~.lgdo.lh5_store.LH5Store.read_object` before writing `lgdo` to + :func:`~.lgdo.lh5.LH5Store.read` before writing `lgdo` to disk. Parameters diff --git a/src/daq2lh5/buffer_processor/lh5_buffer_processor.py b/src/daq2lh5/buffer_processor/lh5_buffer_processor.py index e855c0a..49c0362 100644 --- a/src/daq2lh5/buffer_processor/lh5_buffer_processor.py +++ b/src/daq2lh5/buffer_processor/lh5_buffer_processor.py @@ -6,7 +6,7 @@ import h5py import lgdo -from lgdo import LH5Store +from lgdo import lh5 from ..buffer_processor.buffer_processor import buffer_processor from ..raw_buffer import RawBuffer, RawBufferLibrary @@ -54,14 +54,14 @@ def lh5_buffer_processor( """ # Initialize the input raw file - raw_store = LH5Store() + raw_store = lh5.LH5Store() lh5_file = raw_store.gimme_file(lh5_raw_file_in, "r") if lh5_file is None: raise ValueError(f"input file not found: {lh5_raw_file_in}") return # List the groups in the raw file - lh5_groups = lgdo.ls(lh5_raw_file_in) + lh5_groups = lh5.ls(lh5_raw_file_in) lh5_tables = [] # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw @@ -69,21 +69,19 @@ def lh5_buffer_processor( # Make sure that the upper level key isn't a dataset if isinstance(lh5_file[tb], h5py.Dataset): lh5_tables.append(f"{tb}") - elif "raw" not in tb and lgdo.ls(lh5_file, f"{tb}/raw"): + elif "raw" not in tb and lh5.ls(lh5_file, f"{tb}/raw"): lh5_tables.append(f"{tb}/raw") # Look one layer deeper for a :meth:`lgdo.Table` if necessary - elif lgdo.ls(lh5_file, f"{tb}"): + elif lh5.ls(lh5_file, f"{tb}"): # Check to make sure that this isn't a table itself - maybe_table, _ = raw_store.read_object(f"{tb}", lh5_file) + maybe_table, _ = raw_store.read(f"{tb}", lh5_file) if isinstance(maybe_table, lgdo.Table): lh5_tables.append(f"{tb}") del maybe_table # otherwise, go deeper else: - for sub_table in lgdo.ls(lh5_file, f"{tb}"): - maybe_table, _ = raw_store.read_object( - f"{tb}/{sub_table}", lh5_file - ) + for sub_table in lh5.ls(lh5_file, f"{tb}"): + maybe_table, _ = raw_store.read(f"{tb}/{sub_table}", lh5_file) if isinstance(maybe_table, lgdo.Table): lh5_tables.append(f"{tb}/{sub_table}") del maybe_table @@ -114,7 +112,7 @@ def lh5_buffer_processor( # Write everything in the raw file to the new file, check for proc_spec under either the group name, out_name, or the name for tb in lh5_tables: - lgdo_obj, _ = raw_store.read_object(f"{tb}", lh5_file) + lgdo_obj, _ = raw_store.read(f"{tb}", lh5_file) # Find the out_name. # If the top level group has an lgdo table in it, then the out_name is group @@ -198,6 +196,4 @@ def lh5_buffer_processor( pass # Write the (possibly processed) lgdo_obj to a file - raw_store.write_object( - lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name - ) + raw_store.write(lgdo_obj, out_name, lh5_file=proc_file_name, group=group_name) diff --git a/src/daq2lh5/build_raw.py b/src/daq2lh5/build_raw.py index 8e23edb..12fcbaf 100644 --- a/src/daq2lh5/build_raw.py +++ b/src/daq2lh5/build_raw.py @@ -6,8 +6,8 @@ import os import time -import lgdo import numpy as np +from lgdo import lh5 from tqdm.auto import tqdm from .compass.compass_streamer import CompassStreamer @@ -77,7 +77,7 @@ def build_raw( hdf5_settings keyword arguments (as a dict) forwarded to - :meth:`~.lgdo.lh5_store.LH5Store.write_object`. + :meth:`~.lgdo.lh5.LH5Store.write`. **kwargs sent to :class:`.RawBufferLibrary` generation as `kw_dict` argument. @@ -224,7 +224,7 @@ def build_raw( os.remove(out_file_glob[0]) # Write header data - lh5_store = lgdo.LH5Store(keep_open=True) + lh5_store = lh5.LH5Store(keep_open=True) write_to_lh5_and_clear(header_data, lh5_store, **hdf5_settings) # Now loop through the data diff --git a/src/daq2lh5/data_decoder.py b/src/daq2lh5/data_decoder.py index fb3aa67..1e4d471 100644 --- a/src/daq2lh5/data_decoder.py +++ b/src/daq2lh5/data_decoder.py @@ -3,13 +3,10 @@ """ from __future__ import annotations -from typing import Union - import lgdo import numpy as np -from lgdo import LH5Store - -LGDO = Union[lgdo.Scalar, lgdo.Struct, lgdo.Array, lgdo.VectorOfVectors] +from lgdo import LGDO +from lgdo.lh5 import LH5Store class DataDecoder: @@ -18,15 +15,16 @@ class DataDecoder: Most decoders will repeatedly decode the same set of values from each packet. The values that get decoded need to be described by a dict stored in `self.decoded_values` that helps determine how to set up the buffers and - write them to file as :class:`~.lgdo.LGDO`\ s. :class:`~.lgdo.table.Table`\ s - are made whose columns correspond to the elements of `decoded_values`, and - packet data gets pushed to the end of the table one row at a time. + write them to file as :class:`~.lgdo.types.lgdo.LGDO`\ s. + :class:`~.lgdo.types.table.Table`\ s are made whose columns correspond to + the elements of `decoded_values`, and packet data gets pushed to the end of + the table one row at a time. Any key-value entry in a configuration dictionary attached to an element of `decoded_values` is typically interpreted as an attribute to be attached to the corresponding LGDO. This feature can be for example exploited to specify HDF5 dataset settings used by - :meth:`~.lgdo.lh5_store.LH5Store.write_object` to write LGDOs to disk. + :meth:`~.lgdo.lh5.LH5Store.write` to write LGDOs to disk. For example :: @@ -119,7 +117,7 @@ def make_lgdo(self, key: int | str = None, size: int = None) -> LGDO: """Make an LGDO for this :class:`DataDecoder` to fill. This default version of this function allocates a - :class:`~.lgdo.table.Table` using the `decoded_values` for key. If a + :class:`~.lgdo.types.table.Table` using the `decoded_values` for key. If a different type of LGDO object is required for this decoder, overload this function. @@ -207,7 +205,7 @@ def make_lgdo(self, key: int | str = None, size: int = None) -> LGDO: continue # Parse datatype for remaining lgdos - datatype, shape, elements = lgdo.lgdo_utils.parse_datatype(datatype) + datatype, shape, elements = lgdo.lh5.utils.parse_datatype(datatype) # ArrayOfEqualSizedArrays if datatype == "array_of_equalsized_arrays": @@ -258,7 +256,7 @@ def write_out_garbage( n_rows = self.garbage_table.loc if n_rows == 0: return - lh5_store.write_object( + lh5_store.write( self.garbage_table, "garbage", filename, group, n_rows=n_rows, append=True ) self.garbage_table.clear() diff --git a/src/daq2lh5/fc/fc_config_decoder.py b/src/daq2lh5/fc/fc_config_decoder.py index 05190eb..ef8317e 100644 --- a/src/daq2lh5/fc/fc_config_decoder.py +++ b/src/daq2lh5/fc/fc_config_decoder.py @@ -28,7 +28,7 @@ class FCConfigDecoder(DataDecoder): >>> decoder = FCConfigDecoder() >>> config = decoder.decode_config(fc) >>> type(config) - lgdo.struct.Struct + lgdo.types.struct.Struct """ def __init__(self, *args, **kwargs) -> None: diff --git a/src/daq2lh5/raw_buffer.py b/src/daq2lh5/raw_buffer.py index c1d7971..da50f41 100644 --- a/src/daq2lh5/raw_buffer.py +++ b/src/daq2lh5/raw_buffer.py @@ -65,21 +65,19 @@ from __future__ import annotations import os -from typing import Union import lgdo -from lgdo import LH5Store +from lgdo import LGDO +from lgdo.lh5 import LH5Store from .buffer_processor.buffer_processor import buffer_processor -LGDO = Union[lgdo.Scalar, lgdo.Struct, lgdo.Array, lgdo.VectorOfVectors] - class RawBuffer: r"""Base class to represent a buffer of raw data. A :class:`RawBuffer` is in essence a an LGDO object (typically a - :class:`~.lgdo.table.Table`) to which decoded data will be written, along + :class:`~.lgdo.types.table.Table`) to which decoded data will be written, along with some meta-data distinguishing what data goes into it, and where the LGDO gets written out. Also holds on to the current location in the buffer for writing. @@ -88,7 +86,7 @@ class RawBuffer: ---------- lgdo the LGDO used as the actual buffer. Typically a - :class:`~.lgdo.table.Table`. Set to ``None`` upon creation so that the + :class:`~.lgdo.types.table.Table`. Set to ``None`` upon creation so that the user or a decoder can initialize it later. key_list a list of keys (e.g. channel numbers) identifying data to be written @@ -107,7 +105,7 @@ class RawBuffer: proc_spec a dictionary containing the following: - a DSP config file, passed as a dictionary, or as a path to a JSON file - - an array containing: the name of an :class:`~.lgdo` object stored in the :class:`.RawBuffer` to be sliced, + - an array containing: the name of an LGDO object stored in the :class:`.RawBuffer` to be sliced, the start and end indices of the slice, and the new name for the sliced object - a dictionary of fields to drop - a dictionary of new fields and their return datatype @@ -440,11 +438,11 @@ def write_to_lh5_and_clear( files (saves some time opening / closing files). **kwargs keyword-arguments forwarded to - :meth:`.lgdo.lh5_store.LH5Store.write_object`. + :meth:`.lgdo.lh5.LH5Store.write`. See Also -------- - .lgdo.lh5_store.LH5Store.write_object + .lgdo.lh5.LH5Store.write """ if lh5_store is None: lh5_store = lgdo.LH5Store() @@ -470,7 +468,7 @@ def write_to_lh5_and_clear( # write if requested... if filename != "": - lh5_store.write_object( + lh5_store.write( lgdo_to_write, rb.out_name, filename, diff --git a/tests/buffer_processor/test_buffer_processor.py b/tests/buffer_processor/test_buffer_processor.py index 124ace1..5d01361 100644 --- a/tests/buffer_processor/test_buffer_processor.py +++ b/tests/buffer_processor/test_buffer_processor.py @@ -6,6 +6,7 @@ import lgdo import numpy as np from dspeed import build_processing_chain as bpc +from lgdo import lh5 from lgdo.compression import RadwareSigcompress, ULEB128ZigZagDiff from daq2lh5.build_raw import build_raw @@ -45,20 +46,18 @@ def test_buffer_processor_packet_ids(lgnd_test_data, tmptestdir): raw_file = f"{tmptestdir}/L200-comm-20220519-phy-geds.lh5" - sto = lgdo.LH5Store() + sto = lh5.LH5Store() raw_group = "ORFlashCamADCWaveform" - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( - str(raw_group) + "/packet_id", processed_file - ) + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", processed_file) assert np.array_equal(raw_packet_ids.nda, processed_packet_ids.nda) - processed_presummed_wfs, _ = sto.read_object( + processed_presummed_wfs, _ = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - raw_wfs, _ = sto.read_object(str(raw_group) + "/waveform/values", raw_file) + raw_wfs, _ = sto.read(str(raw_group) + "/waveform/values", raw_file) assert processed_presummed_wfs.nda[0][0] == np.sum(raw_wfs.nda[0][:4]) @@ -121,12 +120,12 @@ def test_buffer_processor_waveform_lengths(lgnd_test_data, tmptestdir): build_raw(in_stream=daq_file, out_spec=copy_out_spec, overwrite=True) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = dsp_config @@ -143,16 +142,16 @@ def test_buffer_processor_waveform_lengths(lgnd_test_data, tmptestdir): window_start_index = window_config[1] window_end_index = window_config[2] - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -169,21 +168,17 @@ def test_buffer_processor_waveform_lengths(lgnd_test_data, tmptestdir): == raw_packet_waveform_values[0].dtype ) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) - windowed_packet_waveform_dts, _ = sto.read_object( + windowed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/windowed_waveform/dt", processed_file ) @@ -205,7 +200,7 @@ def test_buffer_processor_waveform_lengths(lgnd_test_data, tmptestdir): == raw_packet_waveform_t0s.attrs["units"] ) - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) @@ -219,7 +214,7 @@ def test_buffer_processor_waveform_lengths(lgnd_test_data, tmptestdir): ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -274,19 +269,19 @@ def test_buffer_processor_file_size_decrease(lgnd_test_data, tmptestdir): build_raw(in_stream=daq_file, out_spec=proc_out_spec, overwrite=True) build_raw(in_stream=daq_file, out_spec=raw_out_spec, overwrite=True) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] - sto = lgdo.LH5Store() + sto = lh5.LH5Store() wf_size = 0 for raw_group in lh5_tables: wf_size += sys.getsizeof( - sto.read_object(str(raw_group) + "/waveform/values", raw_file)[0].nda + sto.read(str(raw_group) + "/waveform/values", raw_file)[0].nda ) # Make sure we are taking up less space than a file that has two copies of the waveform table in it @@ -389,23 +384,23 @@ def test_buffer_processor_separate_name_tables(lgnd_test_data, tmptestdir): # build the unprocessed raw file build_raw(in_stream=daq_file, out_spec=copy_out_spec, overwrite=True) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -424,13 +419,13 @@ def test_buffer_processor_separate_name_tables(lgnd_test_data, tmptestdir): window_start_index = int(jsonfile[group_name]["window"][1]) window_end_index = int(jsonfile[group_name]["window"][2]) - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -447,17 +442,13 @@ def test_buffer_processor_separate_name_tables(lgnd_test_data, tmptestdir): == raw_packet_waveform_values[0].dtype ) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -477,7 +468,7 @@ def test_buffer_processor_separate_name_tables(lgnd_test_data, tmptestdir): == raw_packet_waveform_t0s.attrs["units"] ) - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) @@ -488,7 +479,7 @@ def test_buffer_processor_separate_name_tables(lgnd_test_data, tmptestdir): ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -593,22 +584,22 @@ def test_proc_geds_no_proc_spms(lgnd_test_data, tmptestdir): # Do the unprocessed build raw build_raw(in_stream=daq_file, out_spec=copy_out_spec, overwrite=True) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -639,21 +630,21 @@ def test_proc_geds_no_proc_spms(lgnd_test_data, tmptestdir): window_end_index = int(jsonfile[group_name]["window"][2]) # Read in the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) if pass_flag: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) else: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -669,25 +660,21 @@ def test_proc_geds_no_proc_spms(lgnd_test_data, tmptestdir): == raw_packet_waveform_values[0].dtype ) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) if pass_flag: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) else: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -708,16 +695,16 @@ def test_proc_geds_no_proc_spms(lgnd_test_data, tmptestdir): ) if pass_flag: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/waveform/dt", processed_file ) else: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -729,19 +716,15 @@ def test_proc_geds_no_proc_spms(lgnd_test_data, tmptestdir): # check that the t_lo_sat and t_sat_hi are correct if not pass_flag: - wf_table, _ = sto.read_object(str(raw_group), raw_file) + wf_table, _ = sto.read(str(raw_group), raw_file) pc, _, wf_out = bpc(wf_table, json.loads(raw_dsp_config)) pc.execute() raw_sat_lo = wf_out["t_sat_lo"] raw_sat_hi = wf_out["t_sat_hi"] - proc_sat_lo, _ = sto.read_object( - str(raw_group) + "/t_sat_lo", processed_file - ) + proc_sat_lo, _ = sto.read(str(raw_group) + "/t_sat_lo", processed_file) - proc_sat_hi, _ = sto.read_object( - str(raw_group) + "/t_sat_hi", processed_file - ) + proc_sat_hi, _ = sto.read(str(raw_group) + "/t_sat_hi", processed_file) assert np.array_equal(raw_sat_lo.nda, proc_sat_lo.nda) assert np.array_equal(raw_sat_hi.nda, proc_sat_hi.nda) @@ -849,22 +832,22 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): # Build the unprocessed raw file for comparison build_raw(in_stream=daq_file, out_spec=copy_out_spec, overwrite=True) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -896,21 +879,21 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): window_end_index = int(jsonfile[group_name]["window"][2]) # Read in the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) if pass_flag: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) else: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -931,20 +914,20 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): # Check that the waveforms match # These are the channels that should be unprocessed if group_name == "chan1028803" or group_name == "chan1028804": - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", processed_file ) assert np.array_equal( raw_packet_waveform_values.nda, windowed_packet_waveform_values.nda ) else: - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) assert np.array_equal( @@ -953,25 +936,21 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): ) # Check the t0 and dts are what we expect - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) if pass_flag: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) else: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -992,16 +971,16 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): ) if pass_flag: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/waveform/dt", processed_file ) else: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -1013,19 +992,15 @@ def test_buffer_processor_multiple_keys(lgnd_test_data, tmptestdir): # check that the t_lo_sat and t_sat_hi are correct if not pass_flag: - wf_table, _ = sto.read_object(str(raw_group), raw_file) + wf_table, _ = sto.read(str(raw_group), raw_file) pc, _, wf_out = bpc(wf_table, json.loads(raw_dsp_config)) pc.execute() raw_sat_lo = wf_out["t_sat_lo"] raw_sat_hi = wf_out["t_sat_hi"] - proc_sat_lo, _ = sto.read_object( - str(raw_group) + "/t_sat_lo", processed_file - ) + proc_sat_lo, _ = sto.read(str(raw_group) + "/t_sat_lo", processed_file) - proc_sat_hi, _ = sto.read_object( - str(raw_group) + "/t_sat_hi", processed_file - ) + proc_sat_hi, _ = sto.read(str(raw_group) + "/t_sat_hi", processed_file) assert np.array_equal(raw_sat_lo.nda, proc_sat_lo.nda) assert np.array_equal(raw_sat_hi.nda, proc_sat_hi.nda) @@ -1059,11 +1034,11 @@ def test_buffer_processor_all_pass(lgnd_test_data, tmptestdir): build_raw(in_stream=daq_file, out_spec=raw_out_spec, overwrite=True) # assert filecmp.cmp(raw_file, processed_file, shallow=True) - sto = lgdo.LH5Store() - raw_tables = lgdo.ls(raw_file) + sto = lh5.LH5Store() + raw_tables = lh5.ls(raw_file) for tb in raw_tables: - raw, _ = sto.read_object(tb, raw_file) - proc, _ = sto.read_object(tb, processed_file) + raw, _ = sto.read(tb, raw_file) + proc, _ = sto.read(tb, processed_file) if isinstance(raw, lgdo.Scalar): raw_value = raw.value raw_attrs = raw.attrs @@ -1074,12 +1049,12 @@ def test_buffer_processor_all_pass(lgnd_test_data, tmptestdir): else: for obj in raw.keys(): if not isinstance(raw[obj], lgdo.Table): - raw_df = raw.get_dataframe([obj]) - proc_df = proc.get_dataframe([obj]) + raw_df = raw.view_as("pd", cols=[obj]) + proc_df = proc.view_as("pd", cols=[obj]) else: for sub_obj in raw[obj].keys(): - raw_df = raw[obj].get_dataframe([str(sub_obj)]) - proc_df = proc[obj].get_dataframe([str(sub_obj)]) + raw_df = raw[obj].view_as("pd", cols=[str(sub_obj)]) + proc_df = proc[obj].view_as("pd", cols=[str(sub_obj)]) assert raw_df.equals(proc_df) @@ -1185,22 +1160,22 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) # Build the unprocessed raw file for comparison build_raw(in_stream=daq_file, out_spec=copy_out_spec, overwrite=True, buffer_size=2) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -1232,21 +1207,21 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) window_end_index = int(jsonfile[group_name]["window"][2]) # Read in the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) if pass_flag: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) else: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -1267,20 +1242,20 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) # Check that the waveforms match # These are the channels that should be unprocessed if group_name == "chan1028803" or group_name == "chan1028804": - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", processed_file ) assert np.array_equal( raw_packet_waveform_values.nda, windowed_packet_waveform_values.nda ) else: - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) assert np.array_equal( @@ -1289,25 +1264,21 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) ) # Check the t0 and dts are what we expect - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) if pass_flag: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) else: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -1328,16 +1299,16 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) ) if pass_flag: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/waveform/dt", processed_file ) else: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -1349,19 +1320,15 @@ def test_buffer_processor_drop_waveform_small_buffer(lgnd_test_data, tmptestdir) # check that the t_lo_sat and t_sat_hi are correct if not pass_flag: - wf_table, _ = sto.read_object(str(raw_group), raw_file) + wf_table, _ = sto.read(str(raw_group), raw_file) pc, _, wf_out = bpc(wf_table, json.loads(raw_dsp_config)) pc.execute() raw_sat_lo = wf_out["t_sat_lo"] raw_sat_hi = wf_out["t_sat_hi"] - proc_sat_lo, _ = sto.read_object( - str(raw_group) + "/t_sat_lo", processed_file - ) + proc_sat_lo, _ = sto.read(str(raw_group) + "/t_sat_lo", processed_file) - proc_sat_hi, _ = sto.read_object( - str(raw_group) + "/t_sat_hi", processed_file - ) + proc_sat_hi, _ = sto.read(str(raw_group) + "/t_sat_hi", processed_file) assert np.array_equal(raw_sat_lo.nda, proc_sat_lo.nda) assert np.array_equal(raw_sat_hi.nda, proc_sat_hi.nda) @@ -1419,11 +1386,11 @@ def test_buffer_processor_compression_settings(lgnd_test_data, tmptestdir): build_raw(in_stream=daq_file, out_spec=out_spec, overwrite=True) - sto = lgdo.LH5Store() - presum_wf, _ = sto.read_object( + sto = lh5.LH5Store() + presum_wf, _ = sto.read( "/ch0/raw/presummed_waveform/values", processed_file, decompress=False ) - window_wf, _ = sto.read_object( + window_wf, _ = sto.read( "/ch0/raw/windowed_waveform/values", processed_file, decompress=False ) diff --git a/tests/buffer_processor/test_lh5_buffer_processor.py b/tests/buffer_processor/test_lh5_buffer_processor.py index 74db90c..280fd75 100644 --- a/tests/buffer_processor/test_lh5_buffer_processor.py +++ b/tests/buffer_processor/test_lh5_buffer_processor.py @@ -7,6 +7,7 @@ import lgdo import numpy as np from dspeed import build_processing_chain as bpc +from lgdo import lh5 from daq2lh5.buffer_processor.lh5_buffer_processor import lh5_buffer_processor from daq2lh5.build_raw import build_raw @@ -35,20 +36,18 @@ def test_lh5_buffer_processor_packet_ids(lgnd_test_data): lh5_raw_file_in=raw_file, overwrite=True, out_spec=proc_out_spec ) - sto = lgdo.LH5Store() + sto = lh5.LH5Store() raw_group = "ORFlashCamADCWaveform" - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( - str(raw_group) + "/packet_id", processed_file - ) + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", processed_file) assert np.array_equal(raw_packet_ids.nda, processed_packet_ids.nda) - processed_presummed_wfs, _ = sto.read_object( + processed_presummed_wfs, _ = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - raw_wfs, _ = sto.read_object(str(raw_group) + "/waveform/values", raw_file) + raw_wfs, _ = sto.read(str(raw_group) + "/waveform/values", raw_file) assert processed_presummed_wfs.nda[0][0] == np.sum(raw_wfs.nda[0][:4]) @@ -114,12 +113,12 @@ def test_lh5_buffer_processor_waveform_lengths(lgnd_test_data): proc_file_name=processed_file, ) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] # Grab the proc_spec after keylist expansion from build_raw @@ -142,16 +141,16 @@ def test_lh5_buffer_processor_waveform_lengths(lgnd_test_data): window_start_index = jsonfile["ch0"]["window"][1] window_end_index = jsonfile["ch0"]["window"][2] - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -165,17 +164,13 @@ def test_lh5_buffer_processor_waveform_lengths(lgnd_test_data): ) + np.abs(window_start_index) + np.abs(window_end_index) assert isinstance(windowed_packet_waveform_values[0].nda[0][0], np.uint16) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -197,7 +192,7 @@ def test_lh5_buffer_processor_waveform_lengths(lgnd_test_data): == raw_packet_waveform_t0s.attrs["units"] ) - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) @@ -264,29 +259,29 @@ def test_lh5_buffer_processor_file_size_decrease(lgnd_test_data): proc_file_name=processed_file, ) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] - sto = lgdo.LH5Store() + sto = lh5.LH5Store() wf_size = 0 for raw_group in lh5_tables: wf_size += sys.getsizeof( - sto.read_object(str(raw_group) + "/waveform/values", raw_file)[0].nda + sto.read(str(raw_group) + "/waveform/values", raw_file)[0].nda ) # Make sure that we are actually processing the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -399,12 +394,12 @@ def test_lh5_buffer_processor_separate_name_tables(lgnd_test_data): proc_file_name=processed_file, ) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] # Grab the proc_spec after keylist expansion from build_raw @@ -415,12 +410,12 @@ def test_lh5_buffer_processor_separate_name_tables(lgnd_test_data): jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -439,13 +434,13 @@ def test_lh5_buffer_processor_separate_name_tables(lgnd_test_data): window_start_index = int(jsonfile[group_name]["window"][1]) window_end_index = int(jsonfile[group_name]["window"][2]) - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -459,17 +454,13 @@ def test_lh5_buffer_processor_separate_name_tables(lgnd_test_data): ) + np.abs(window_start_index) + np.abs(window_end_index) assert isinstance(windowed_packet_waveform_values[0].nda[0][0], np.uint16) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -489,7 +480,7 @@ def test_lh5_buffer_processor_separate_name_tables(lgnd_test_data): == raw_packet_waveform_t0s.attrs["units"] ) - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) @@ -594,12 +585,12 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): lh5_raw_file_in=raw_file, overwrite=True, out_spec=proc_out_spec ) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] # Grab the proc_spec after keylist expansion from build_raw @@ -610,12 +601,12 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -646,21 +637,21 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): window_end_index = int(jsonfile[group_name]["window"][2]) # Read in the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) if pass_flag: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) else: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -673,25 +664,21 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): ) + np.abs(window_start_index) + np.abs(window_end_index) assert isinstance(windowed_packet_waveform_values[0].nda[0][0], np.uint16) - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) if pass_flag: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) else: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -712,11 +699,11 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): ) if pass_flag: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/waveform/dt", processed_file ) else: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) # Check that the dts match what we expect, with the correct units @@ -727,19 +714,15 @@ def test_raw_geds_no_proc_spms(lgnd_test_data): # check that the t_lo_sat and t_sat_hi are correct if not pass_flag: - wf_table, _ = sto.read_object(str(raw_group), raw_file) + wf_table, _ = sto.read(str(raw_group), raw_file) pc, _, wf_out = bpc(wf_table, json.loads(raw_dsp_config)) pc.execute() raw_sat_lo = wf_out["t_sat_lo"] raw_sat_hi = wf_out["t_sat_hi"] - proc_sat_lo, _ = sto.read_object( - str(raw_group) + "/t_sat_lo", processed_file - ) + proc_sat_lo, _ = sto.read(str(raw_group) + "/t_sat_lo", processed_file) - proc_sat_hi, _ = sto.read_object( - str(raw_group) + "/t_sat_hi", processed_file - ) + proc_sat_hi, _ = sto.read(str(raw_group) + "/t_sat_hi", processed_file) assert np.array_equal(raw_sat_lo.nda, proc_sat_lo.nda) assert np.array_equal(raw_sat_hi.nda, proc_sat_hi.nda) @@ -853,22 +836,22 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): "proc_spec" ) - lh5_tables = lgdo.ls(raw_file) + lh5_tables = lh5.ls(raw_file) # check if group points to raw data; sometimes 'raw' is nested, e.g g024/raw for i, tb in enumerate(lh5_tables): - if "raw" not in tb and lgdo.ls(raw_file, f"{tb}/raw"): + if "raw" not in tb and lh5.ls(raw_file, f"{tb}/raw"): lh5_tables[i] = f"{tb}/raw" - elif not lgdo.ls(raw_file, tb): + elif not lh5.ls(raw_file, tb): del lh5_tables[i] jsonfile = proc_spec - sto = lgdo.LH5Store() + sto = lh5.LH5Store() for raw_group in lh5_tables: # First, check the packet ids - raw_packet_ids, _ = sto.read_object(str(raw_group) + "/packet_id", raw_file) - processed_packet_ids, _ = sto.read_object( + raw_packet_ids, _ = sto.read(str(raw_group) + "/packet_id", raw_file) + processed_packet_ids, _ = sto.read( str(raw_group) + "/packet_id", processed_file ) @@ -900,21 +883,21 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): window_end_index = int(jsonfile[group_name]["window"][2]) # Read in the waveforms - raw_packet_waveform_values = sto.read_object( + raw_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", raw_file ) if pass_flag: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/waveform/values", processed_file ) else: - presummed_packet_waveform_values = sto.read_object( + presummed_packet_waveform_values = sto.read( str(raw_group) + "/presummed_waveform/values", processed_file ) - windowed_packet_waveform_values = sto.read_object( + windowed_packet_waveform_values = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) @@ -932,20 +915,20 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): # Check that the waveforms match # These are the channels that should be unprocessed if group_name == "chan1028803" or group_name == "chan1028804": - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", processed_file ) assert np.array_equal( raw_packet_waveform_values.nda, windowed_packet_waveform_values.nda ) else: - raw_packet_waveform_values, _ = sto.read_object( + raw_packet_waveform_values, _ = sto.read( str(raw_group) + "/waveform/values", raw_file ) - windowed_packet_waveform_values, _ = sto.read_object( + windowed_packet_waveform_values, _ = sto.read( str(raw_group) + "/windowed_waveform/values", processed_file ) assert np.array_equal( @@ -954,25 +937,21 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): ) # Check the t0 and dts are what we expect - raw_packet_waveform_t0s, _ = sto.read_object( - str(raw_group) + "/waveform/t0", raw_file - ) - raw_packet_waveform_dts, _ = sto.read_object( - str(raw_group) + "/waveform/dt", raw_file - ) + raw_packet_waveform_t0s, _ = sto.read(str(raw_group) + "/waveform/t0", raw_file) + raw_packet_waveform_dts, _ = sto.read(str(raw_group) + "/waveform/dt", raw_file) if pass_flag: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/waveform/t0", processed_file ) else: - windowed_packet_waveform_t0s, _ = sto.read_object( + windowed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/windowed_waveform/t0", processed_file ) - presummed_packet_waveform_t0s, _ = sto.read_object( + presummed_packet_waveform_t0s, _ = sto.read( str(raw_group) + "/presummed_waveform/t0", processed_file ) @@ -993,16 +972,16 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): ) if pass_flag: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/waveform/dt", processed_file ) else: - presummed_packet_waveform_dts, _ = sto.read_object( + presummed_packet_waveform_dts, _ = sto.read( str(raw_group) + "/presummed_waveform/dt", processed_file ) # Check that the presum_rate is correctly identified - presum_rate_from_file, _ = sto.read_object( + presum_rate_from_file, _ = sto.read( str(raw_group) + "/presum_rate", processed_file ) assert presum_rate_from_file.nda[0] == presum_rate @@ -1014,19 +993,15 @@ def test_lh5_buffer_processor_multiple_keys(lgnd_test_data): # check that the t_lo_sat and t_sat_hi are correct if not pass_flag: - wf_table, _ = sto.read_object(str(raw_group), raw_file) + wf_table, _ = sto.read(str(raw_group), raw_file) pc, _, wf_out = bpc(wf_table, json.loads(raw_dsp_config)) pc.execute() raw_sat_lo = wf_out["t_sat_lo"] raw_sat_hi = wf_out["t_sat_hi"] - proc_sat_lo, _ = sto.read_object( - str(raw_group) + "/t_sat_lo", processed_file - ) + proc_sat_lo, _ = sto.read(str(raw_group) + "/t_sat_lo", processed_file) - proc_sat_hi, _ = sto.read_object( - str(raw_group) + "/t_sat_hi", processed_file - ) + proc_sat_hi, _ = sto.read(str(raw_group) + "/t_sat_hi", processed_file) assert np.array_equal(raw_sat_lo.nda, proc_sat_lo.nda) assert np.array_equal(raw_sat_hi.nda, proc_sat_hi.nda) @@ -1067,11 +1042,11 @@ def test_buffer_processor_all_pass(lgnd_test_data): ) # assert filecmp.cmp(raw_file, processed_file, shallow=True) - sto = lgdo.LH5Store() - raw_tables = lgdo.ls(raw_file) + sto = lh5.LH5Store() + raw_tables = lh5.ls(raw_file) for tb in raw_tables: - raw, _ = sto.read_object(tb, raw_file) - proc, _ = sto.read_object(tb, processed_file) + raw, _ = sto.read(tb, raw_file) + proc, _ = sto.read(tb, processed_file) if isinstance(raw, lgdo.Scalar): raw_value = raw.value raw_attrs = raw.attrs @@ -1082,12 +1057,12 @@ def test_buffer_processor_all_pass(lgnd_test_data): else: for obj in raw.keys(): if not isinstance(raw[obj], lgdo.Table): - raw_df = raw.get_dataframe([obj]) - proc_df = proc.get_dataframe([obj]) + raw_df = raw.view_as("pd", cols=[obj]) + proc_df = proc.view_as("pd", cols=[obj]) else: for sub_obj in raw[obj].keys(): - raw_df = raw[obj].get_dataframe([str(sub_obj)]) - proc_df = proc[obj].get_dataframe([str(sub_obj)]) + raw_df = raw[obj].view_as("pd", cols=[str(sub_obj)]) + proc_df = proc[obj].view_as("pd", cols=[str(sub_obj)]) assert raw_df.equals(proc_df) diff --git a/tests/fc/test_fc_event_decoder.py b/tests/fc/test_fc_event_decoder.py index 1f3e80a..ca1e12c 100644 --- a/tests/fc/test_fc_event_decoder.py +++ b/tests/fc/test_fc_event_decoder.py @@ -24,7 +24,7 @@ def event_rbkd(fcio_obj, fcio_config): # decode packet into the lgdo's and check if the buffer is full assert decoder.decode_packet(fcio=fcio_obj, evt_rbkd=rbkd, packet_id=69) is True - # # check compression settings (here before any LH5Store.write_object() call + # # check compression settings (here before any LH5Store.write() call # assert "compression" in rbkd[0].lgdo["packet_id"].attrs # assert "compression" in rbkd[0].lgdo["waveform"].values.attrs diff --git a/tests/test_build_raw.py b/tests/test_build_raw.py index 396c38a..6a8c16f 100644 --- a/tests/test_build_raw.py +++ b/tests/test_build_raw.py @@ -4,8 +4,8 @@ import h5py import pytest +from lgdo import lh5 from lgdo.compression import ULEB128ZigZagDiff -from lgdo.lh5_store import LH5Store, ls from daq2lh5 import build_raw from daq2lh5.fc.fc_event_decoder import fc_decoded_values @@ -86,8 +86,8 @@ def test_build_raw_fc_out_spec(lgnd_test_data, tmptestdir): overwrite=True, ) - store = LH5Store() - lh5_obj, n_rows = store.read_object("/spms", out_file) + store = lh5.LH5Store() + lh5_obj, n_rows = store.read("/spms", out_file) assert n_rows == 10 assert (lh5_obj["channel"].nda == [2, 3, 4, 2, 3, 4, 2, 3, 4, 2]).all() @@ -124,9 +124,9 @@ def test_build_raw_fc_channelwise_out_spec(lgnd_test_data, tmptestdir): overwrite=True, ) - assert ls(out_file) == ["ch0", "ch1", "ch2", "ch3", "ch4", "ch5"] - assert ls(out_file, "ch0/") == ["ch0/raw"] - assert ls(out_file, "ch0/raw/waveform") == ["ch0/raw/waveform"] + assert lh5.ls(out_file) == ["ch0", "ch1", "ch2", "ch3", "ch4", "ch5"] + assert lh5.ls(out_file, "ch0/") == ["ch0/raw"] + assert lh5.ls(out_file, "ch0/raw/waveform") == ["ch0/raw/waveform"] def test_build_raw_orca(lgnd_test_data, tmptestdir): @@ -163,8 +163,8 @@ def test_build_raw_orca_out_spec(lgnd_test_data, tmptestdir): overwrite=True, ) - store = LH5Store() - lh5_obj, n_rows = store.read_object("/geds", out_file) + store = lh5.LH5Store() + lh5_obj, n_rows = store.read("/geds", out_file) assert n_rows == 10 assert (lh5_obj["channel"].nda == [2, 3, 4, 2, 3, 4, 2, 3, 4, 2]).all() @@ -258,8 +258,8 @@ def test_build_raw_wf_compression_in_decoded_values(lgnd_test_data, tmptestdir): assert f["ORFlashCamADCWaveform/waveform/t0"].shuffle is True assert f["ORFlashCamADCWaveform/waveform/t0"].compression is None - store = LH5Store() - obj, _ = store.read_object( + store = lh5.LH5Store() + obj, _ = store.read( "ORFlashCamADCWaveform/waveform/values", out_file, decompress=False ) assert obj.attrs["codec"] == "uleb128_zigzag_diff" @@ -306,8 +306,8 @@ def test_build_raw_compass_out_spec(lgnd_test_data, tmptestdir): ), ) - store = LH5Store() - lh5_obj, n_rows = store.read_object("/spms", out_file) + store = lh5.LH5Store() + lh5_obj, n_rows = store.read("/spms", out_file) assert n_rows == 10 assert (lh5_obj["channel"].nda == [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]).all() @@ -325,8 +325,8 @@ def test_build_raw_compass_out_spec_no_config(lgnd_test_data, tmptestdir): overwrite=True, ) - store = LH5Store() - lh5_obj, n_rows = store.read_object("/spms", out_file) + store = lh5.LH5Store() + lh5_obj, n_rows = store.read("/spms", out_file) assert n_rows == 10 assert (lh5_obj["channel"].nda == [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]).all() diff --git a/tests/test_daq_to_raw.py b/tests/test_daq_to_raw.py index 0277cd0..2013db7 100644 --- a/tests/test_daq_to_raw.py +++ b/tests/test_daq_to_raw.py @@ -5,7 +5,7 @@ from collections import Counter from io import BytesIO -from lgdo import LH5Store +from lgdo.lh5 import LH5Store from daq2lh5 import build_raw from daq2lh5.orca import orca_streamer @@ -26,7 +26,7 @@ def encode_header(self): """Convert orca header back to a byte string.""" lh5 = LH5Store() - test_file, _ = lh5.read_object( + test_file, _ = lh5.read( "OrcaHeader", self.file, ) @@ -65,7 +65,7 @@ def encode_orflashcamconfig(self, ii): """Convert orca flashcam config data back to byte strings.""" lh5 = LH5Store() - tbl, _ = lh5.read_object( + tbl, _ = lh5.read( "ORFlashCamListenerConfig", self.file, ) @@ -104,7 +104,7 @@ def encode_orflashcamadcwaveform(self, ii): """Convert orca flashcam ADC waveform data back to byte strings.""" lh5 = LH5Store() - tbl, _ = lh5.read_object( + tbl, _ = lh5.read( "ORFlashCamADCWaveform", self.file, ) @@ -171,7 +171,7 @@ def encode_orrun(self, ii): """Convert orca run data back to byte strings.""" lh5 = LH5Store() - tbl, _ = lh5.read_object( + tbl, _ = lh5.read( "ORRunDecoderForRun", self.file, ) From c6e3c50405d77131305de80e632e066cc1abab43 Mon Sep 17 00:00:00 2001 From: Luigi Pertoldi Date: Tue, 2 Jan 2024 13:27:22 +0100 Subject: [PATCH 2/3] Fix assertion in buffer_processor tests --- .../buffer_processor/test_buffer_processor.py | 22 +++++-------------- .../test_lh5_buffer_processor.py | 22 +++++-------------- 2 files changed, 10 insertions(+), 34 deletions(-) diff --git a/tests/buffer_processor/test_buffer_processor.py b/tests/buffer_processor/test_buffer_processor.py index 5d01361..11dc084 100644 --- a/tests/buffer_processor/test_buffer_processor.py +++ b/tests/buffer_processor/test_buffer_processor.py @@ -1039,24 +1039,12 @@ def test_buffer_processor_all_pass(lgnd_test_data, tmptestdir): for tb in raw_tables: raw, _ = sto.read(tb, raw_file) proc, _ = sto.read(tb, processed_file) - if isinstance(raw, lgdo.Scalar): - raw_value = raw.value - raw_attrs = raw.attrs - proc_value = proc.value - proc_attrs = proc.attrs - assert raw_value == proc_value - assert raw_attrs == proc_attrs + + if isinstance(raw, lgdo.Struct): + for obj in raw: + assert raw[obj] == raw[obj] else: - for obj in raw.keys(): - if not isinstance(raw[obj], lgdo.Table): - raw_df = raw.view_as("pd", cols=[obj]) - proc_df = proc.view_as("pd", cols=[obj]) - else: - for sub_obj in raw[obj].keys(): - raw_df = raw[obj].view_as("pd", cols=[str(sub_obj)]) - proc_df = proc[obj].view_as("pd", cols=[str(sub_obj)]) - - assert raw_df.equals(proc_df) + assert raw == proc # check that packet indexes match in verification test diff --git a/tests/buffer_processor/test_lh5_buffer_processor.py b/tests/buffer_processor/test_lh5_buffer_processor.py index 280fd75..9b281cc 100644 --- a/tests/buffer_processor/test_lh5_buffer_processor.py +++ b/tests/buffer_processor/test_lh5_buffer_processor.py @@ -1047,24 +1047,12 @@ def test_buffer_processor_all_pass(lgnd_test_data): for tb in raw_tables: raw, _ = sto.read(tb, raw_file) proc, _ = sto.read(tb, processed_file) - if isinstance(raw, lgdo.Scalar): - raw_value = raw.value - raw_attrs = raw.attrs - proc_value = proc.value - proc_attrs = proc.attrs - assert raw_value == proc_value - assert raw_attrs == proc_attrs + + if isinstance(raw, lgdo.Struct): + for obj in raw: + assert raw[obj] == raw[obj] else: - for obj in raw.keys(): - if not isinstance(raw[obj], lgdo.Table): - raw_df = raw.view_as("pd", cols=[obj]) - proc_df = proc.view_as("pd", cols=[obj]) - else: - for sub_obj in raw[obj].keys(): - raw_df = raw[obj].view_as("pd", cols=[str(sub_obj)]) - proc_df = proc[obj].view_as("pd", cols=[str(sub_obj)]) - - assert raw_df.equals(proc_df) + assert raw == proc def test_lh5_buffer_processor_hdf5_settings(lgnd_test_data): From 7521d7fcf6cdc973ec32940e1d743aaa5397e020 Mon Sep 17 00:00:00 2001 From: Luigi Pertoldi Date: Tue, 2 Jan 2024 14:49:57 +0100 Subject: [PATCH 3/3] Install dspeed directly from main branch as a temporary CI fix --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 9d808f0..962e3f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ classifiers = [options] packages = find: install_requires = - dspeed>=1.3.0a3 + dspeed@git+https://github.com/legend-exp/dspeed@main h5py>=3.2.0 hdf5plugin legend-pydataobj>=1.5.0a1