Skip to content

Commit e05fa3c

Browse files
actually activating the mock dataparser correctly
1 parent 82219cb commit e05fa3c

File tree

1 file changed

+21
-10
lines changed

1 file changed

+21
-10
lines changed

nerfstudio/utils/eval_utils.py

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -44,18 +44,23 @@ def patch_config_for_mock_data(config: TrainerConfig) -> TrainerConfig:
4444
Returns:
4545
Modified config that uses mock data if original data is not available
4646
"""
47-
# Check if the data path exists
48-
data_path = config.pipeline.datamanager.dataparser.data
47+
# Determine the actual data path that will be used by the dataparser
48+
dataparser_data = config.pipeline.datamanager.dataparser.data
49+
datamanager_data = getattr(config.pipeline.datamanager, 'data', None)
4950

50-
if not data_path.exists():
51-
CONSOLE.print(f"[yellow]Original data path {data_path} not found. Using mock data for inference.[/yellow]")
51+
# The dataparser will use its own data field if it's meaningful, otherwise it inherits from datamanager
52+
if dataparser_data and str(dataparser_data) != "." and dataparser_data.name != "":
53+
actual_data_path = dataparser_data
54+
else:
55+
actual_data_path = datamanager_data
56+
57+
# Check if the actual data path exists
58+
if not actual_data_path or not actual_data_path.exists():
59+
CONSOLE.print(f"[yellow]Original data path {actual_data_path} not found. Using mock data for inference.[/yellow]")
5260

5361
# Replace the dataparser with MockDataParserConfig
5462
config.pipeline.datamanager.dataparser = MockDataParserConfig()
55-
56-
# If the datamanager has a data field, update it too
57-
if hasattr(config.pipeline.datamanager, 'data'):
58-
config.pipeline.datamanager.data = data_path # Keep original for reference
63+
CONSOLE.print("[green]Successfully switched to mock dataparser for inference.[/green]")
5964

6065
return config
6166

@@ -129,8 +134,14 @@ def eval_setup(
129134
config = patch_config_for_mock_data(config)
130135

131136
# load checkpoints from wherever they were saved
132-
# TODO: expose the ability to choose an arbitrary checkpoint
133-
config.load_dir = config.get_checkpoint_dir()
137+
# For shared checkpoints, the checkpoints should be relative to the config file location
138+
config_dir = config_path.parent
139+
expected_checkpoint_dir = config_dir / "nerfstudio_models"
140+
if expected_checkpoint_dir.exists():
141+
config.load_dir = expected_checkpoint_dir
142+
else:
143+
# Fallback to the original behavior
144+
config.load_dir = config.get_checkpoint_dir()
134145

135146
# setup pipeline (which includes the DataManager)
136147
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

0 commit comments

Comments
 (0)