From b62f975c51627cd97b1646aa5e8e879bd0086728 Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Sat, 28 Dec 2024 15:50:14 -0500 Subject: [PATCH 1/9] Allow multiple run folders as input to analyze_crash_reasons --- .../tools/results/analyze_crash_reasons.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/infinigen/tools/results/analyze_crash_reasons.py b/infinigen/tools/results/analyze_crash_reasons.py index 9438836e..90a03913 100644 --- a/infinigen/tools/results/analyze_crash_reasons.py +++ b/infinigen/tools/results/analyze_crash_reasons.py @@ -38,8 +38,15 @@ def get_configs(log_path, stage): return match.groups()[0] -def main(args): - crash_reasons = (args.input_folder / "crash_summaries.txt").read_text().split("\n") +def parse_run_folder(run_folder: Path, args: argparse.Namespace): + + crash_reasons = (run_folder / "crash_summaries.txt") + + if not crash_reasons.exists(): + print(f"Could not find crash reasons for {run_folder}") + return + + crash_reasons = crash_reasons.read_text().split("\n") regex = re.compile( ".*\s.*\s(.*\/([a-zA-Z0-9]*)\/logs\/(.*))\sreason=[\"'](.*)[\"']\snode='(.*)'" @@ -70,6 +77,9 @@ def main(args): df = pd.DataFrame.from_records(records) + return df + +def visualize_results(df: pd.DataFrame, args: argparse.Namespace): df["reason_canonical"] = df["reason"].apply(canonicalize_reason) print("COMMON CRASH REASONS") @@ -108,10 +118,17 @@ def main(args): print(f" {row}") print("") +def main(args): + + run_dfs = [parse_run_folder(run_folder, args) for run_folder in args.input_folder] + run_dfs = [x for x in run_dfs if x is not None] + + df = pd.concat(run_dfs) + visualize_results(df, args) if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("--input_folder", type=Path, required=True) + parser.add_argument("--input_folder", type=Path, required=True, nargs="+") args = parser.parse_args() main(args) From a0edebc4948f7c67cf005390664eaa1b0fbdcdf4 Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 11:39:44 -0500 Subject: [PATCH 2/9] Fix references to indoor_asset_semantics in docs --- docs/HelloRoom.md | 2 +- docs/StaticAssets.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/HelloRoom.md b/docs/HelloRoom.md index 6e017659..1d18a383 100644 --- a/docs/HelloRoom.md +++ b/docs/HelloRoom.md @@ -116,7 +116,7 @@ Each of these commandline args demonstrates a different way in which you can res - `restrict_solving.consgraph_filters=[\"counter\",\"sink\"]` says to throw out any `constraints` or `score_terms` keys from `home_furniture_constraints()` that do not contain `counter` or `sink` as substrings, producing a simpler constraint graph. - `compose_indoors.solve_steps_large=30 compose_indoors.solve_steps_small=30` says to spend fewer optimization steps on large/small objects. You can also do the same for medium. These values override the defaults provided in `fast_solve.gin` and `infinigen_examples/configs_indoor/base.gin` -These settings are intended for debugging or for generating tailored datasets. If you want more granular control over what assets are used for what purposes, please customize `infinigen_examples/indoor_asset_semantics.py` which defines this mapping. +These settings are intended for debugging or for generating tailored datasets. If you want more granular control over what assets are used for what purposes, please customize `infinigen_examples/constraints/semantics.py` which defines this mapping. If you are using the commands from [Creating large datasets](#creating-large-datasets) you will instead add these configs as `--overrides` to the end of your command, rather than `-p` diff --git a/docs/StaticAssets.md b/docs/StaticAssets.md index 3f699a1e..73a14dde 100644 --- a/docs/StaticAssets.md +++ b/docs/StaticAssets.md @@ -79,7 +79,7 @@ If you want to add more categories, just add more lines with `{CategoryName}` as ## Define Semantics -Infinigen allows the user to specify high-level semantics for the objects in the scene. These semantics are then used to define high-level constraints. For example, we want to say that our static shelf factory is a type of storage unit, which will be placed against the wall, and there will be a bunch of objects on top of it. In general, if you want your static object factory to be treated like an existing asset factory, you can just imitate the semantics of the existing asset factory. Let's demonstrate this idea by defining semantics for our static shelf. We go to `infinigen_examples/indoor_asset_semantics.py` and search for `LargeShelfFactory`. We see that it is used as `Semantics.Storage` and `Semantics.AssetPlaceholderForChildren`. We want our static shelf to be used as a storage unit as well, so we add a line for our new static factory: +Infinigen allows the user to specify high-level semantics for the objects in the scene. These semantics are then used to define high-level constraints. For example, we want to say that our static shelf factory is a type of storage unit, which will be placed against the wall, and there will be a bunch of objects on top of it. In general, if you want your static object factory to be treated like an existing asset factory, you can just imitate the semantics of the existing asset factory. Let's demonstrate this idea by defining semantics for our static shelf. We go to `infinigen_examples/constraints/semantics.py` and search for `LargeShelfFactory`. We see that it is used as `Semantics.Storage` and `Semantics.AssetPlaceholderForChildren`. We want our static shelf to be used as a storage unit as well, so we add a line for our new static factory: ![alt text](images/static_assets/image3.jpg) Similarly, we add `StaticShelfFactory` to `Semantics.AssetPlaceholderForChildren`. This will replace the placeholder bounding box for the shelf before placing the small objects. @@ -166,7 +166,7 @@ StaticMyCategoryFactory = static_category_factory("infinigen/assets/static_asset 5. Add a line in `infinigen/assets/static_assets/__init__.py` to import the factory from other files. -6. Define the semantics for the objects in `infinigen_examples/indoor_asset_semantics.py`. E.g. +6. Define the semantics for the objects in `infinigen_examples/constraints/semantics.py`. E.g. ```python used_as[Semantics.Furniture] = {... From d09026c65af90345551aeadc68f0bf18e9ce91e3 Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 12:26:40 -0500 Subject: [PATCH 3/9] Fix / - substitution in launch.sh branchnames --- tests/integration/launch.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/launch.sh b/tests/integration/launch.sh index 036101a0..a44441aa 100644 --- a/tests/integration/launch.sh +++ b/tests/integration/launch.sh @@ -19,7 +19,7 @@ INFINIGEN_VERSION=$(python -c "import infinigen; print(infinigen.__version__)") COMMIT_HASH=$(git rev-parse HEAD | cut -c 1-6) DATE=$(date '+%Y-%m-%d') JOBTAG="${DATE}_ifg-int" -BRANCH=$(git rev-parse --abbrev-ref HEAD | sed 's/_/-/g; s/\//_/g') +BRANCH=$(git rev-parse --abbrev-ref HEAD | sed 's/_/-/g; s|/|-|g; s/\//_/g') VERSION_STRING="${DATE}_${BRANCH}_${COMMIT_HASH}_${USER}" mkdir -p $OUTPUT_PATH From 9741181a70a6c0cc78b6b630e9e4faf5cf25c2a6 Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 13:12:37 -0500 Subject: [PATCH 4/9] Add get_cmd.child_debug flag to allow targeted debug logging for child jobs of manage_jobs --- infinigen/datagen/job_funcs.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/infinigen/datagen/job_funcs.py b/infinigen/datagen/job_funcs.py index 0603171a..2130ccc7 100644 --- a/infinigen/datagen/job_funcs.py +++ b/infinigen/datagen/job_funcs.py @@ -48,6 +48,7 @@ def get_cmd( driver_script="infinigen_examples.generate_nature", # replace with a regular path to a .py, or another installed module input_folder=None, process_niceness=None, + child_debug=None, ): if isinstance(task, list): task = " ".join(task) @@ -72,9 +73,18 @@ def get_cmd( cmd += "--input_folder " + str(input_folder) + " " if output_folder is not None: cmd += "--output_folder " + str(output_folder) + " " + cmd += f"--seed {seed} --task {task} --task_uniqname {taskname} " + + if child_debug is not None: + if child_debug == "all": + cmd += "--debug " + else: + cmd += f"--debug {child_debug} " + if len(configs) != 0: cmd += f'-g {" ".join(configs)} ' + cmd += "-p" return cmd.split() From 98c93105e2d3760e150d81820c518762eba0999a Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 13:41:01 -0500 Subject: [PATCH 5/9] Reintroduce upload_method=mock --- infinigen/datagen/util/upload_util.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/infinigen/datagen/util/upload_util.py b/infinigen/datagen/util/upload_util.py index 9d72aa55..f97876a9 100644 --- a/infinigen/datagen/util/upload_util.py +++ b/infinigen/datagen/util/upload_util.py @@ -194,6 +194,8 @@ def get_upload_func(method="smbclient"): return smb_client.upload elif method.startswith("copyfile"): return lambda x, y: copy_upload_file(x, y, root_dir=method.split(":")[-1]) + elif method == "mock": + return lambda x, y: print(f"Mock upload {x} to {y}") else: raise ValueError(f"Unrecognized {method=}") From 918c69dc60bdf17805c2c7e8e71ba166a69e0e2c Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 14:08:54 -0500 Subject: [PATCH 6/9] Fix inactive viewpoint filter causing excessive nature populate times / scene complexity --- infinigen/core/placement/placement.py | 105 ++++++++++++++-------- infinigen/core/placement/split_in_view.py | 15 ++-- 2 files changed, 80 insertions(+), 40 deletions(-) diff --git a/infinigen/core/placement/placement.py b/infinigen/core/placement/placement.py index 5e579177..f53584cc 100644 --- a/infinigen/core/placement/placement.py +++ b/infinigen/core/placement/placement.py @@ -128,7 +128,7 @@ def scatter_placeholders(locations, factory: AssetFactory): return col -def get_placeholder_points(obj): +def get_placeholder_points(obj: bpy.types.Object) -> np.ndarray: if obj.type == "MESH": verts = np.zeros((len(obj.data.vertices), 3)) obj.data.vertices.foreach_get("co", verts.reshape(-1)) @@ -148,51 +148,82 @@ def parse_asset_name(name): return list(match.groups()) +def filter_populate_targets( + placeholders: list[bpy.types.Object], + cameras: list[bpy.types.Object], + dist_cull: float, + vis_cull: float, + verbose: bool, +) -> list[tuple[bpy.types.Object, float, float]]: + if verbose: + placeholders = tqdm(placeholders) + + results = [] + + for i, p in enumerate(placeholders): + classname, *_ = parse_asset_name(p.name) + + if classname is None: + raise ValueError(f"Could not parse {p.name=}, got {classname=}") + + mask, min_dists, min_vis_dists = split_in_view.compute_inview_distances( + get_placeholder_points(p), + cameras, + dist_max=dist_cull, + vis_margin=vis_cull, + verbose=False, + ) + + dist = min_dists.min() + vis_dist = min_vis_dists.min() + + if not mask.any(): + logger.debug( + f"{p.name=} culled, not in view of any camera. {dist=} {vis_dist=}" + ) + continue + + results.append((p, dist, vis_dist)) + + return results + + def populate_collection( factory: AssetFactory, placeholder_col: bpy.types.Collection, + cameras, asset_col_target=None, - cameras=None, dist_cull=None, vis_cull=None, verbose=True, cache_system=None, **asset_kwargs, ): - logger.info(f"Populating placeholders for {factory}") - if asset_col_target is None: asset_col_target = butil.get_collection(f"unique_assets:{repr(factory)}") - all_objs = [] - updated_pholders = [] placeholders = [o for o in placeholder_col.objects if o.parent is None] - if verbose: - placeholders = tqdm(placeholders) - - for i, p in enumerate(placeholders): - classname, fac_seed, _, inst_seed = parse_asset_name(p.name) - if classname is None: - continue + if cameras is not None: + logger.info(f"Checking visibility for {placeholder_col.name=}") + targets = filter_populate_targets( + placeholders, cameras, dist_cull, vis_cull, verbose + ) + else: + targets = [(p, detail.scatter_res_distance(), 0) for p in placeholders] - if cameras is not None: - mask, min_dists, min_vis_dists = split_in_view.compute_inview_distances( - get_placeholder_points(p), cameras, verbose=verbose - ) + print( + f"Populating {len(targets)} placeholders for {factory=} out of {len(placeholders)} total" + ) - dist = min_dists.min() - vis_dist = min_vis_dists.min() + all_objs = [] + updated_pholders = [] - if not mask.any(): - logger.debug( - f"{p.name=} culled, not in view of any camera. {dist=} {vis_dist=}" - ) - continue + if verbose: + targets = tqdm(targets) - else: - dist = detail.scatter_res_distance() - vis_dist = 0 + for i, (p, dist, vis_dist) in enumerate(targets): + classname, inst_seed, *_ = parse_asset_name(p.name) if cache_system: if ( @@ -209,10 +240,12 @@ def populate_collection( cache_system.link_fire(full_sim_folder, sim_folder, obj, factory) else: break - else: - obj = factory.spawn_asset( - i, placeholder=p, distance=dist, vis_distance=vis_dist, **asset_kwargs - ) + + continue + + obj = factory.spawn_asset( + i, placeholder=p, distance=dist, vis_distance=vis_dist, **asset_kwargs + ) if p is not obj: p.hide_render = True @@ -268,11 +301,13 @@ def populate_all( ) continue + fac_inst = factory_class(int(fac_seed), **kwargs) + new_assets, pholders = populate_collection( - factory_class(int(fac_seed), **kwargs), - col, - asset_target_col, - camera=cameras, + fac_inst, + placeholder_col=col, + cameras=cameras, + asset_target_col=asset_target_col, dist_cull=dist_cull, vis_cull=vis_cull, cache_system=cache_system, diff --git a/infinigen/core/placement/split_in_view.py b/infinigen/core/placement/split_in_view.py index 56790b84..96ddaedd 100644 --- a/infinigen/core/placement/split_in_view.py +++ b/infinigen/core/placement/split_in_view.py @@ -149,12 +149,17 @@ def compute_inview_distances( bpy.context.scene.frame_set(frame) for cam in cameras: dists, vis_dists = compute_vis_dists(points, cam) - mask |= (dists < dist_max) & (vis_dists < vis_margin) - if mask.any(): - min_vis_dists[mask] = np.minimum(vis_dists[mask], min_vis_dists[mask]) - min_dists[mask] = np.minimum(dists[mask], min_dists[mask]) + frame_cam_mask = (dists < dist_max) & (vis_dists < vis_margin) - logger.debug(f"Computed dists for {frame=} {cam.name} {mask.mean()=:.2f}") + if frame_cam_mask.any(): + min_vis_dists[frame_cam_mask] = np.minimum( + vis_dists[frame_cam_mask], min_vis_dists[frame_cam_mask] + ) + min_dists[frame_cam_mask] = np.minimum( + dists[frame_cam_mask], min_dists[frame_cam_mask] + ) + + mask |= frame_cam_mask return mask, min_dists, min_vis_dists From a51592854cbb4c751df8a14f9ca6cf2e5392111c Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 14:56:21 -0500 Subject: [PATCH 7/9] Return score or None for animate cameras multicam wrapper --- infinigen/core/placement/camera.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/infinigen/core/placement/camera.py b/infinigen/core/placement/camera.py index a6f78780..076ac951 100644 --- a/infinigen/core/placement/camera.py +++ b/infinigen/core/placement/camera.py @@ -753,6 +753,8 @@ def animate_cameras( def anim_valid_camrig_pose_func(cam_rig: bpy.types.Object): assert len(cam_rig.children) > 0 + scores = [] + for cam in cam_rig.children: score = keep_cam_pose_proposal( cam, @@ -765,10 +767,15 @@ def anim_valid_camrig_pose_func(cam_rig: bpy.types.Object): **kwargs, ) + frame = bpy.context.scene.frame_current + logger.debug(f"Checking {cam.name=} {frame=} got {score=}") + if score is None: - return False + return None + + scores.append(score) - return True + return np.min(scores) for cam_rig in cam_rigs: if policy_registry is None: From 40ebe9296f958cf136c53f87b1edf80390e9fb8f Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 15:27:32 -0500 Subject: [PATCH 8/9] Touch slurm logfiles to avoid crash --- infinigen/datagen/manage_jobs.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/infinigen/datagen/manage_jobs.py b/infinigen/datagen/manage_jobs.py index 9bef8200..cd8bbbba 100644 --- a/infinigen/datagen/manage_jobs.py +++ b/infinigen/datagen/manage_jobs.py @@ -346,9 +346,7 @@ def update_symlink(scene_folder, scenes): std_out = scene_folder / "logs" / f"{scene.job_id}_0_log.out" if not std_out.exists(): - raise FileNotFoundError( - f"{std_out=} does not exist during attempt to symlink from {to=}" - ) + std_out.touch() if os.path.islink(to): os.unlink(to) From 94d19c163f855e44189439220d9dd26640a65945 Mon Sep 17 00:00:00 2001 From: Alexander Raistrick Date: Mon, 6 Jan 2025 16:32:05 -0500 Subject: [PATCH 9/9] Fix blendergt step not set to 1h by slurm_1h.gin --- infinigen/datagen/configs/compute_platform/slurm_1h.gin | 1 + 1 file changed, 1 insertion(+) diff --git a/infinigen/datagen/configs/compute_platform/slurm_1h.gin b/infinigen/datagen/configs/compute_platform/slurm_1h.gin index dc3ed03e..e502184b 100644 --- a/infinigen/datagen/configs/compute_platform/slurm_1h.gin +++ b/infinigen/datagen/configs/compute_platform/slurm_1h.gin @@ -10,6 +10,7 @@ queue_coarse.hours = 1 queue_fine_terrain.hours = 1 queue_populate.hours = 1 queue_render.hours = 1 +ground_truth/queue_render.hours = 1 queue_mesh_save.hours = 1 queue_opengl.hours = 1