From ca3a595be8ac5526f3abd095a50520676881b09d Mon Sep 17 00:00:00 2001 From: Vincent Moens Date: Wed, 25 Sep 2024 06:43:37 +0100 Subject: [PATCH] [BugFix] Extend RB with lazy stack (revamp) ghstack-source-id: df397d09166d8fb61eceacb5fe8659e0295ca414 Pull Request resolved: https://github.com/pytorch/rl/pull/2454 --- torchrl/data/replay_buffers/storages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchrl/data/replay_buffers/storages.py b/torchrl/data/replay_buffers/storages.py index f047d5b2d22..b914c52b338 100644 --- a/torchrl/data/replay_buffers/storages.py +++ b/torchrl/data/replay_buffers/storages.py @@ -1118,7 +1118,7 @@ def max_size_along_dim0(data_shape): out = data.clone().to(self.device) out = out.expand(max_size_along_dim0(data.shape)) out = out.memmap_like(prefix=self.scratch_dir, existsok=self.existsok) - if torchrl_logger.getEffectiveLevel() == logging.DEBUG: + if torchrl_logger.isEnabledFor(logging.DEBUG): for key, tensor in sorted( out.items(include_nested=True, leaves_only=True), key=str ):