Skip to content

Commit 6f59b88

Browse files
Improves recorder manager to support customized demo indices. (#3552)
# Description <!-- Thank you for your interest in sending a pull request. Please make sure to check the contribution guidelines. Link: https://isaac-sim.github.io/IsaacLab/main/source/refs/contributing.html 💡 Please try to keep PRs small and focused. Large PRs are harder to review and merge. --> Current manager-based workflow leveraged recorder_manager to store demo obs and actions to hdf5 file. - But it only outputs successful demos and accumulates the demo index - Not supporting customized demo index (the use case is like: replay an existing hdf5, and store successful demos while keeping its original demo index) The modification in this PR: - keeps original function, but extends to store demo with specific demo index <!-- As a practice, it is recommended to open an issue to have discussions on the proposed pull request. This makes it easier for the community to keep track of what is being developed or added, and if a given feature is demanded by more than one party. --> ## Type of change <!-- As you go through the list, delete the ones that are not applicable. --> - New feature (non-breaking change which adds functionality) ## Screenshots | Before | After | | ------ | ----- | |<img width="100" height="150" alt="Screenshot from 2025-09-25 10-20-31" src="https://github.com/user-attachments/assets/b4af24df-2781-4ba2-8693-fd246875012b" />|<img width="100" height="150" alt="Screenshot from 2025-09-25 10-20-47" src="https://github.com/user-attachments/assets/e86d3210-e205-4d6b-b83e-cf69a585743b" /> | ## Checklist - [x] I have read and understood the [contribution guidelines](https://isaac-sim.github.io/IsaacLab/main/source/refs/contributing.html) - [x] I have run the [`pre-commit` checks](https://pre-commit.com/) with `./isaaclab.sh --format` - [ ] I have made corresponding changes to the documentation - [x] My changes generate no new warnings - [ ] I have added tests that prove my fix is effective or that my feature works - [ ] I have updated the changelog and the corresponding version in the extension's `config/extension.toml` file - [ ] I have added my name to the `CONTRIBUTORS.md` or my name already exists there <!-- As you go through the checklist above, you can mark something as done by putting an x character in it For example, - [x] I have done this task - [ ] I have not done this task --> --------- Co-authored-by: ooctipus <[email protected]>
1 parent 6acdd82 commit 6f59b88

File tree

4 files changed

+54
-9
lines changed

4 files changed

+54
-9
lines changed

source/isaaclab/config/extension.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22

33
# Note: Semantic Versioning is used: https://semver.org/
4-
version = "0.47.3"
4+
version = "0.47.4"
55

66
# Description
77
title = "Isaac Lab framework for Robot Learning"

source/isaaclab/docs/CHANGELOG.rst

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,23 @@
11
Changelog
22
---------
33

4+
0.47.4 (2025-10-30)
5+
~~~~~~~~~~~~~~~~~~~
6+
7+
Changed
8+
^^^^^^^
9+
10+
* Enhanced :meth:`~isaaclab.managers.RecorderManager.export_episodes` method to support customizable sequence of demo IDs:
11+
12+
- Added argument ``demo_ids`` to :meth:`~isaaclab.managers.RecorderManager.export_episodes` to accept a sequence of integers
13+
for custom episode identifiers.
14+
15+
* Enhanced :meth:`~isaaclab.utils.datasets.HDF5DatasetFileHandler.write_episode` method to support customizable episode identifiers:
16+
17+
- Added argument ``demo_id`` to :meth:`~isaaclab.utils.datasets.HDF5DatasetFileHandler.write_episode` to accept a custom integer
18+
for episode identifier.
19+
20+
421
0.47.3 (2025-10-22)
522
~~~~~~~~~~~~~~~~~~~
623

source/isaaclab/isaaclab/managers/recorder_manager.py

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -442,12 +442,16 @@ def get_ep_meta(self) -> dict:
442442
ep_meta = self._env.cfg.get_ep_meta()
443443
return ep_meta
444444

445-
def export_episodes(self, env_ids: Sequence[int] | None = None) -> None:
445+
def export_episodes(self, env_ids: Sequence[int] | None = None, demo_ids: Sequence[int] | None = None) -> None:
446446
"""Concludes and exports the episodes for the given environment ids.
447447
448448
Args:
449449
env_ids: The environment ids. Defaults to None, in which case
450450
all environments are considered.
451+
demo_ids: Custom identifiers for the exported episodes.
452+
If provided, episodes will be named "demo_{demo_id}" in the dataset.
453+
Should have the same length as env_ids if both are provided.
454+
If None, uses the default sequential naming scheme. Defaults to None.
451455
"""
452456
# Do nothing if no active recorder terms are provided
453457
if len(self.active_terms) == 0:
@@ -458,6 +462,17 @@ def export_episodes(self, env_ids: Sequence[int] | None = None) -> None:
458462
if isinstance(env_ids, torch.Tensor):
459463
env_ids = env_ids.tolist()
460464

465+
# Handle demo_ids processing
466+
if demo_ids is not None:
467+
if isinstance(demo_ids, torch.Tensor):
468+
demo_ids = demo_ids.tolist()
469+
if len(demo_ids) != len(env_ids):
470+
raise ValueError(f"Length of demo_ids ({len(demo_ids)}) must match length of env_ids ({len(env_ids)})")
471+
# Check for duplicate demo_ids
472+
if len(set(demo_ids)) != len(demo_ids):
473+
duplicates = [x for i, x in enumerate(demo_ids) if demo_ids.index(x) != i]
474+
raise ValueError(f"demo_ids must be unique. Found duplicates: {list(set(duplicates))}")
475+
461476
# Export episode data through dataset exporter
462477
need_to_flush = False
463478

@@ -468,7 +483,7 @@ def export_episodes(self, env_ids: Sequence[int] | None = None) -> None:
468483
if self._failed_episode_dataset_file_handler is not None:
469484
self._failed_episode_dataset_file_handler.add_env_args(ep_meta)
470485

471-
for env_id in env_ids:
486+
for i, env_id in enumerate(env_ids):
472487
if env_id in self._episodes and not self._episodes[env_id].is_empty():
473488
self._episodes[env_id].pre_export()
474489

@@ -484,7 +499,9 @@ def export_episodes(self, env_ids: Sequence[int] | None = None) -> None:
484499
else:
485500
target_dataset_file_handler = self._failed_episode_dataset_file_handler
486501
if target_dataset_file_handler is not None:
487-
target_dataset_file_handler.write_episode(self._episodes[env_id])
502+
# Use corresponding demo_id if provided, otherwise None
503+
current_demo_id = demo_ids[i] if demo_ids is not None else None
504+
target_dataset_file_handler.write_episode(self._episodes[env_id], current_demo_id)
488505
need_to_flush = True
489506
# Update episode count
490507
if episode_succeeded:

source/isaaclab/isaaclab/utils/datasets/hdf5_dataset_file_handler.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -136,18 +136,27 @@ def load_dataset_helper(group):
136136

137137
return episode
138138

139-
def write_episode(self, episode: EpisodeData):
139+
def write_episode(self, episode: EpisodeData, demo_id: int | None = None):
140140
"""Add an episode to the dataset.
141141
142142
Args:
143143
episode: The episode data to add.
144+
demo_id: Custom index for the episode. If None, uses default index.
144145
"""
145146
self._raise_if_not_initialized()
146147
if episode.is_empty():
147148
return
148149

149-
# create episode group based on demo count
150-
h5_episode_group = self._hdf5_data_group.create_group(f"demo_{self._demo_count}")
150+
# Use custom demo id if provided, otherwise use default naming
151+
if demo_id is not None:
152+
episode_group_name = f"demo_{demo_id}"
153+
else:
154+
episode_group_name = f"demo_{self._demo_count}"
155+
156+
# create episode group with the specified name
157+
if episode_group_name in self._hdf5_data_group:
158+
raise ValueError(f"Episode group '{episode_group_name}' already exists in the dataset")
159+
h5_episode_group = self._hdf5_data_group.create_group(episode_group_name)
151160

152161
# store number of steps taken
153162
if "actions" in episode.data:
@@ -176,8 +185,10 @@ def create_dataset_helper(group, key, value):
176185
# increment total step counts
177186
self._hdf5_data_group.attrs["total"] += h5_episode_group.attrs["num_samples"]
178187

179-
# increment total demo counts
180-
self._demo_count += 1
188+
# Only increment demo count if using default indexing
189+
if demo_id is None:
190+
# increment total demo counts
191+
self._demo_count += 1
181192

182193
def flush(self):
183194
"""Flush the episode data to disk."""

0 commit comments

Comments
 (0)