@@ -31,6 +31,57 @@ def _create_id_to_name(name_to_id: Dict[str, int]) -> Dict[int, str]:
31
31
"""Create an id_to_name mapping from a name_to_id mapping."""
32
32
return {id : name for name , id in name_to_id .items ()}
33
33
34
+ @staticmethod
35
+ def _calculate_default_range (grid_extent : List [float ]) -> float :
36
+ """Calculate the default range based on the model extent (room diagonal)."""
37
+ # Extract min and max coordinates
38
+ x_min , x_max = grid_extent [0 ], grid_extent [1 ]
39
+ y_min , y_max = grid_extent [2 ], grid_extent [3 ]
40
+ z_min , z_max = grid_extent [4 ], grid_extent [5 ]
41
+
42
+ # Calculate the room diagonal (Euclidean distance)
43
+ return np .sqrt ((x_max - x_min )** 2 + (y_max - y_min )** 2 + (z_max - z_min )** 2 )
44
+
45
+ @staticmethod
46
+ def _calculate_default_grid_settings (surface_points : List [SurfacePoint ], orientations : List [Orientation ]) -> Dict [str , Any ]:
47
+ """Calculate default grid settings based on data points.
48
+
49
+ Args:
50
+ surface_points: List of surface points
51
+ orientations: List of orientations
52
+
53
+ Returns:
54
+ Dict containing grid settings with default values
55
+ """
56
+ # Collect all x, y, z coordinates
57
+ all_x = [sp ['x' ] for sp in surface_points ] + [ori ['x' ] for ori in orientations ]
58
+ all_y = [sp ['y' ] for sp in surface_points ] + [ori ['y' ] for ori in orientations ]
59
+ all_z = [sp ['z' ] for sp in surface_points ] + [ori ['z' ] for ori in orientations ]
60
+
61
+ # Calculate extents
62
+ x_min , x_max = min (all_x ), max (all_x )
63
+ y_min , y_max = min (all_y ), max (all_y )
64
+ z_min , z_max = min (all_z ), max (all_z )
65
+
66
+ # Calculate ranges
67
+ x_range = x_max - x_min
68
+ y_range = y_max - y_min
69
+ z_range = z_max - z_min
70
+
71
+ # Add 10% padding to each dimension
72
+ x_padding = x_range * 0.1
73
+ y_padding = y_range * 0.1
74
+ z_padding = z_range * 0.1
75
+
76
+ return {
77
+ "regular_grid_resolution" : [10 , 10 , 10 ],
78
+ "regular_grid_extent" : [
79
+ x_min - x_padding , x_max + x_padding ,
80
+ y_min - y_padding , y_max + y_padding ,
81
+ z_min - z_padding , z_max + z_padding
82
+ ]
83
+ }
84
+
34
85
@staticmethod
35
86
def load_model_from_json (file_path : str ):
36
87
"""
@@ -60,71 +111,73 @@ def load_model_from_json(file_path: str):
60
111
surface_names = []
61
112
for series in data ['series' ]:
62
113
surface_names .extend (series ['surfaces' ])
63
-
64
- # Use the id_name_mapping if available, otherwise create one from series data
65
- if 'id_name_mapping' in data and data ['id_name_mapping' ]:
66
- name_to_id = data ['id_name_mapping' ]['name_to_id' ]
67
- id_to_name = JsonIO ._create_id_to_name (name_to_id )
114
+
115
+ # Create ID to name mapping if not provided
116
+ if 'id_name_mapping' in data :
117
+ id_to_name = JsonIO ._create_id_to_name (data ['id_name_mapping' ]['name_to_id' ])
68
118
else :
69
- # Create a mapping from surface points to their names
70
- surface_point_names = {}
71
- # First, create a mapping from surface names to their IDs
72
- surface_id_map = {}
73
- for series in data ['series' ]:
74
- for i , name in enumerate (series ['surfaces' ]):
75
- # Find the first surface point with this name
76
- for sp in data ['surface_points' ]:
77
- if sp ['id' ] not in surface_id_map :
78
- surface_id_map [sp ['id' ]] = name
79
- break
119
+ # Create mapping from series data
120
+ id_to_name = {i : name for i , name in enumerate (surface_names )}
80
121
81
- # Now create the mapping from IDs to names
82
- for sp in data ['surface_points' ]:
83
- surface_point_names [sp ['id' ]] = surface_id_map .get (sp ['id' ], f"surface_{ sp ['id' ]} " )
84
- id_to_name = surface_point_names
85
- name_to_id = {name : id for id , name in id_to_name .items ()}
122
+ # Create surface points table
123
+ surface_points_table = JsonIO ._load_surface_points (data ['surface_points' ], id_to_name )
86
124
87
- # Load surface points and orientations
88
- surface_points = JsonIO ._load_surface_points (data ['surface_points' ], id_to_name )
89
- orientations = JsonIO ._load_orientations (data ['orientations' ], id_to_name )
125
+ # Create orientations table
126
+ orientations_table = JsonIO ._load_orientations (data ['orientations' ], id_to_name )
90
127
91
128
# Create structural frame
92
- structural_frame = StructuralFrame .from_data_tables (surface_points , orientations )
129
+ structural_frame = StructuralFrame .from_data_tables (surface_points_table , orientations_table )
130
+
131
+ # Get grid settings with defaults if not provided
132
+ grid_settings = data .get ('grid_settings' , JsonIO ._calculate_default_grid_settings (data ['surface_points' ], data ['orientations' ]))
93
133
94
134
# Create grid
95
135
grid = Grid (
96
- extent = data [ ' grid_settings' ][ 'regular_grid_extent ' ],
97
- resolution = data [ ' grid_settings' ][ 'regular_grid_resolution ' ]
136
+ resolution = grid_settings [ 'regular_grid_resolution ' ],
137
+ extent = grid_settings [ 'regular_grid_extent ' ]
98
138
)
99
139
100
- # Create interpolation options with kernel options
140
+ # Calculate default range based on model extent
141
+ default_range = JsonIO ._calculate_default_range (grid_settings ['regular_grid_extent' ])
142
+
143
+ # Create interpolation options with defaults if not provided
101
144
interpolation_options = InterpolationOptions (
102
- range = data [ 'interpolation_options' ] .get ('kernel_options' , {}).get ('range' , 1.7 ),
103
- c_o = data [ 'interpolation_options' ] .get ('kernel_options' , {}).get ('c_o' , 10 ),
104
- mesh_extraction = data [ 'interpolation_options' ] .get ('mesh_extraction' , True ),
105
- number_octree_levels = data [ 'interpolation_options' ] .get ('number_octree_levels' , 1 )
145
+ range = data . get ( 'interpolation_options' , {}) .get ('kernel_options' , {}).get ('range' , default_range ),
146
+ c_o = data . get ( 'interpolation_options' , {}) .get ('kernel_options' , {}).get ('c_o' , 10 ),
147
+ mesh_extraction = data . get ( 'interpolation_options' , {}) .get ('mesh_extraction' , True ),
148
+ number_octree_levels = data . get ( 'interpolation_options' , {}) .get ('number_octree_levels' , 1 )
106
149
)
107
150
108
- # Create GeoModel
151
+ # Create GeoModel with default metadata if not provided
152
+ current_date = datetime .now ().strftime ("%Y-%m-%d" )
153
+ model_name = data .get ('metadata' , {}).get ('name' , "GemPy Model" )
154
+
109
155
model = GeoModel (
110
- name = data [ 'metadata' ][ 'name' ] ,
156
+ name = model_name ,
111
157
structural_frame = structural_frame ,
112
158
grid = grid ,
113
159
interpolation_options = interpolation_options
114
160
)
115
161
116
162
# Set the metadata with proper dates
117
163
model_meta = GeoModelMeta (
118
- name = data [ 'metadata' ][ 'name' ] ,
119
- creation_date = data [ 'metadata' ] .get ('creation_date' , datetime . now (). isoformat () ),
120
- last_modification_date = data [ 'metadata' ] .get ('last_modification_date' , datetime . now (). isoformat () ),
121
- owner = data [ 'metadata' ] .get ('owner' , None )
164
+ name = model_name ,
165
+ creation_date = data . get ( 'metadata' , {}) .get ('creation_date' , current_date ),
166
+ last_modification_date = data . get ( 'metadata' , {}) .get ('last_modification_date' , current_date ),
167
+ owner = data . get ( 'metadata' , {}) .get ('owner' , "GemPy Modeller" )
122
168
)
123
169
model .meta = model_meta
124
170
125
171
# Map series to surfaces with structural relations
126
172
mapping_object = {series ['name' ]: series ['surfaces' ] for series in data ['series' ]}
127
- map_stack_to_surfaces (model , mapping_object , series_data = data ['series' ])
173
+ # Ensure each series has structural_relation set to ERODE by default
174
+ series_data = []
175
+ for series in data ['series' ]:
176
+ series_copy = series .copy ()
177
+ if 'structural_relation' not in series_copy :
178
+ series_copy ['structural_relation' ] = 'ERODE'
179
+ series_data .append (series_copy )
180
+ map_stack_to_surfaces (model , mapping_object , series_data = series_data )
128
181
129
182
# Set fault relations after structural groups are set up
130
183
if 'fault_relations' in data and data ['fault_relations' ] is not None :
@@ -356,9 +409,8 @@ def _validate_json_schema(data: Dict[str, Any]) -> bool:
356
409
Returns:
357
410
bool: True if valid, False otherwise
358
411
"""
359
- # Check required top-level keys
360
- required_keys = {'metadata' , 'surface_points' , 'orientations' , 'series' ,
361
- 'grid_settings' , 'interpolation_options' }
412
+ # Check required top-level keys (metadata, grid_settings, and interpolation_options are optional)
413
+ required_keys = {'surface_points' , 'orientations' , 'series' }
362
414
if not all (key in data for key in required_keys ):
363
415
return False
364
416
@@ -387,17 +439,74 @@ def _validate_json_schema(data: Dict[str, Any]) -> bool:
387
439
return False
388
440
if not isinstance (ori ['id' ], int ):
389
441
return False
390
- if not isinstance (ori ['polarity' ], int ) or ori [ 'polarity' ] not in { - 1 , 1 } :
442
+ if not isinstance (ori ['polarity' ], int ):
391
443
return False
392
444
393
- # Validate id_name_mapping if present
394
- if 'id_name_mapping' in data :
395
- mapping = data ['id_name_mapping' ]
396
- if not isinstance (mapping , dict ):
445
+ # Validate series
446
+ if not isinstance (data ['series' ], list ):
447
+ return False
448
+
449
+ for series in data ['series' ]:
450
+ # Only name and surfaces are required
451
+ required_series_keys = {'name' , 'surfaces' }
452
+ if not all (key in series for key in required_series_keys ):
453
+ return False
454
+ if not isinstance (series ['name' ], str ):
455
+ return False
456
+ if not isinstance (series ['surfaces' ], list ):
457
+ return False
458
+ # Validate optional fields if present
459
+ if 'structural_relation' in series and not isinstance (series ['structural_relation' ], str ):
460
+ return False
461
+ if 'colors' in series :
462
+ if not isinstance (series ['colors' ], list ):
463
+ return False
464
+ if not all (isinstance (color , str ) for color in series ['colors' ]):
465
+ return False
466
+
467
+ # Validate grid settings if present
468
+ if 'grid_settings' in data :
469
+ if not isinstance (data ['grid_settings' ], dict ):
470
+ return False
471
+
472
+ required_grid_keys = {'regular_grid_resolution' , 'regular_grid_extent' }
473
+ if not all (key in data ['grid_settings' ] for key in required_grid_keys ):
474
+ return False
475
+
476
+ if not isinstance (data ['grid_settings' ]['regular_grid_resolution' ], list ):
477
+ return False
478
+ if not isinstance (data ['grid_settings' ]['regular_grid_extent' ], list ):
479
+ return False
480
+
481
+ # Validate interpolation options if present
482
+ if 'interpolation_options' in data :
483
+ if not isinstance (data ['interpolation_options' ], dict ):
484
+ return False
485
+ if 'kernel_options' in data ['interpolation_options' ]:
486
+ kernel_options = data ['interpolation_options' ]['kernel_options' ]
487
+ if not isinstance (kernel_options , dict ):
488
+ return False
489
+ if 'range' in kernel_options and not isinstance (kernel_options ['range' ], (int , float )):
490
+ return False
491
+ if 'c_o' in kernel_options and not isinstance (kernel_options ['c_o' ], (int , float )):
492
+ return False
493
+ if 'mesh_extraction' in data ['interpolation_options' ] and not isinstance (data ['interpolation_options' ]['mesh_extraction' ], bool ):
494
+ return False
495
+ if 'number_octree_levels' in data ['interpolation_options' ] and not isinstance (data ['interpolation_options' ]['number_octree_levels' ], int ):
496
+ return False
497
+
498
+ # Validate metadata if present
499
+ if 'metadata' in data :
500
+ metadata = data ['metadata' ]
501
+ if not isinstance (metadata , dict ):
502
+ return False
503
+ if 'name' in metadata and not isinstance (metadata ['name' ], str ):
504
+ return False
505
+ if 'creation_date' in metadata and not isinstance (metadata ['creation_date' ], str ):
397
506
return False
398
- if 'name_to_id' not in mapping :
507
+ if 'last_modification_date' in metadata and not isinstance ( metadata [ 'last_modification_date' ], str ) :
399
508
return False
400
- if not isinstance (mapping [ 'name_to_id ' ], dict ):
509
+ if 'owner' in metadata and not isinstance (metadata [ 'owner ' ], str ):
401
510
return False
402
511
403
512
return True
0 commit comments