Skip to content

Commit

Permalink
Remove legacy hacks in ASV benchmarks
Browse files Browse the repository at this point in the history
  • Loading branch information
crusaderky committed Nov 19, 2024
1 parent 37f700c commit 4150809
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 92 deletions.
32 changes: 1 addition & 31 deletions benchmarks/delete_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,41 +5,11 @@
import h5py
import numpy

from versioned_hdf5 import VersionedHDF5File
from versioned_hdf5 import VersionedHDF5File, delete_versions

filename = "delete_versions_bench.h5"


try:
from versioned_hdf5 import delete_versions
except ImportError:
from versioned_hdf5.replay import recreate_dataset, swap, tmp_group

def delete_versions(f, versions_to_delete, names=("values",)):
"""
Modified replay.delete_version to delete multiple versions.
"""
if isinstance(f, VersionedHDF5File):
f = f.f

def callback(dataset, version_name):
if version_name in versions_to_delete:
return
return dataset

newf = tmp_group(f)

for name in names:
recreate_dataset(f, name, newf, callback=callback)

swap(f, newf)

for version in versions_to_delete:
del f["_version_data/versions"][version]

del newf[newf.name]


class TimeDeleting:
params = [10, 30, 50]
timeout = 1000
Expand Down
35 changes: 5 additions & 30 deletions benchmarks/inmemoryarraydataset.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,11 @@
import os

import h5py
import numpy as np

from versioned_hdf5 import VersionedHDF5File
from versioned_hdf5.wrappers import InMemoryArrayDataset

try:
from versioned_hdf5.wrappers import DatasetWrapper
except ImportError:

class DatasetWrapper:
pass


import numpy as np


class TimeInMemoryArrayDataset:
timeout = 1000
Expand All @@ -31,11 +22,7 @@ def time_getattr(self):
data=np.arange(10000).reshape((100, 10, 10)),
chunks=(3, 3, 3),
)
assert (
isinstance(dataset, InMemoryArrayDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryArrayDataset)
)
assert isinstance(dataset.dataset, InMemoryArrayDataset)
dataset[:, 0, 0:6]

def time_setattr(self):
Expand All @@ -47,11 +34,7 @@ def time_setattr(self):
data=np.arange(10000).reshape((100, 10, 10)),
chunks=(3, 3, 3),
)
assert (
isinstance(dataset, InMemoryArrayDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryArrayDataset)
)
assert isinstance(dataset.dataset, InMemoryArrayDataset)
dataset[:, 0, 0:6] = -1

def time_resize_bigger(self):
Expand All @@ -63,11 +46,7 @@ def time_resize_bigger(self):
data=np.arange(10000).reshape((100, 10, 10)),
chunks=(3, 3, 3),
)
assert (
isinstance(dataset, InMemoryArrayDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryArrayDataset)
)
assert isinstance(dataset.dataset, InMemoryArrayDataset)
dataset.resize((100, 100, 100))

def time_resize_smaller(self):
Expand All @@ -79,9 +58,5 @@ def time_resize_smaller(self):
data=np.arange(10000).reshape((100, 10, 10)),
chunks=(3, 3, 3),
)
assert (
isinstance(dataset, InMemoryArrayDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryArrayDataset)
)
assert isinstance(dataset.dataset, InMemoryArrayDataset)
dataset.resize((10, 10, 10))
36 changes: 5 additions & 31 deletions benchmarks/inmemorydataset.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,10 @@
import os

import h5py

import numpy as np
from versioned_hdf5 import VersionedHDF5File
from versioned_hdf5.wrappers import InMemoryDataset

try:
from versioned_hdf5.wrappers import DatasetWrapper
except ImportError:

class DatasetWrapper:
pass


import numpy as np


class TimeInMemoryDataset:
timeout = 1000
Expand Down Expand Up @@ -44,45 +34,29 @@ def setup(self):

def time_getitem(self):
dataset = self.versioned_file["version1"]["data"]
assert (
isinstance(dataset, InMemoryDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryDataset)
)
assert isinstance(dataset.dataset, InMemoryDataset)
dataset[:, 0, 0:6]

def time_setitem(self):
# https://github.com/airspeed-velocity/asv/issues/966
self.setup()
with self.versioned_file.stage_version("version2") as g:
dataset = g["data"]
assert (
isinstance(dataset, InMemoryDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryDataset)
)
assert isinstance(dataset.dataset, InMemoryDataset)
dataset[:, 0, 0:6] = -1

def time_resize_bigger(self):
# https://github.com/airspeed-velocity/asv/issues/966
self.setup()
with self.versioned_file.stage_version("version2") as g:
dataset = g["data"]
assert (
isinstance(dataset, InMemoryDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryDataset)
)
assert isinstance(dataset.dataset, InMemoryDataset)
dataset.resize((100, 100, 100))

def time_resize_smaller(self):
# https://github.com/airspeed-velocity/asv/issues/966
self.setup()
with self.versioned_file.stage_version("version2") as g:
dataset = g["data"]
assert (
isinstance(dataset, InMemoryDataset)
or isinstance(dataset, DatasetWrapper)
and isinstance(dataset.dataset, InMemoryDataset)
)
assert isinstance(dataset.dataset, InMemoryDataset)
dataset.resize((10, 10, 10))

0 comments on commit 4150809

Please sign in to comment.