6
6
import json
7
7
from typing import Dict , Any , Optional , List
8
8
import numpy as np
9
+ from datetime import datetime
9
10
10
11
from .schema import SurfacePoint , Orientation , GemPyModelJson
11
12
from gempy_engine .core .data .stack_relation_type import StackRelationType
14
15
class JsonIO :
15
16
"""Class for handling JSON I/O operations for GemPy models."""
16
17
18
+ @staticmethod
19
+ def _numpy_to_list (obj ):
20
+ """Convert numpy arrays to lists for JSON serialization."""
21
+ if isinstance (obj , np .ndarray ):
22
+ return obj .tolist ()
23
+ elif isinstance (obj , np .integer ):
24
+ return int (obj )
25
+ elif isinstance (obj , np .floating ):
26
+ return float (obj )
27
+ return obj
28
+
17
29
@staticmethod
18
30
def load_model_from_json (file_path : str ):
19
31
"""
@@ -44,12 +56,16 @@ def load_model_from_json(file_path: str):
44
56
for series in data ['series' ]:
45
57
surface_names .extend (series ['surfaces' ])
46
58
47
- # Create id to name mapping
48
- id_to_name = {i : name for i , name in enumerate (surface_names )}
49
-
59
+ # Create a mapping from surface points to their names
60
+ surface_point_names = {}
61
+ for sp in data ['surface_points' ]:
62
+ surface_point_names [sp ['id' ]] = next ((name for series in data ['series' ]
63
+ for name in series ['surfaces' ]
64
+ if name in surface_names ), "surface_0" )
65
+
50
66
# Load surface points and orientations
51
- surface_points = JsonIO ._load_surface_points (data ['surface_points' ], id_to_name )
52
- orientations = JsonIO ._load_orientations (data ['orientations' ], id_to_name )
67
+ surface_points = JsonIO ._load_surface_points (data ['surface_points' ], surface_point_names )
68
+ orientations = JsonIO ._load_orientations (data ['orientations' ], surface_point_names )
53
69
54
70
# Create structural frame
55
71
structural_frame = StructuralFrame .from_data_tables (surface_points , orientations )
@@ -60,20 +76,12 @@ def load_model_from_json(file_path: str):
60
76
resolution = data ['grid_settings' ]['regular_grid_resolution' ]
61
77
)
62
78
63
- # Create interpolation options
79
+ # Create interpolation options with kernel options
64
80
interpolation_options = InterpolationOptions (
65
- range = 1.7 , # Default value
66
- c_o = 10 , # Default value
67
- mesh_extraction = True , # Default value
68
- number_octree_levels = 1 # Default value
69
- )
70
-
71
- # Create GeoModelMeta with all metadata fields
72
- model_meta = GeoModelMeta (
73
- name = data ['metadata' ]['name' ],
74
- creation_date = data ['metadata' ].get ('creation_date' , None ),
75
- last_modification_date = data ['metadata' ].get ('last_modification_date' , None ),
76
- owner = data ['metadata' ].get ('owner' , None )
81
+ range = data ['interpolation_options' ].get ('kernel_options' , {}).get ('range' , 1.7 ),
82
+ c_o = data ['interpolation_options' ].get ('kernel_options' , {}).get ('c_o' , 10 ),
83
+ mesh_extraction = data ['interpolation_options' ].get ('mesh_extraction' , True ),
84
+ number_octree_levels = data ['interpolation_options' ].get ('number_octree_levels' , 1 )
77
85
)
78
86
79
87
# Create GeoModel
@@ -84,13 +92,25 @@ def load_model_from_json(file_path: str):
84
92
interpolation_options = interpolation_options
85
93
)
86
94
87
- # Set the metadata
95
+ # Set the metadata with proper dates
96
+ model_meta = GeoModelMeta (
97
+ name = data ['metadata' ]['name' ],
98
+ creation_date = data ['metadata' ].get ('creation_date' , datetime .now ().isoformat ()),
99
+ last_modification_date = data ['metadata' ].get ('last_modification_date' , datetime .now ().isoformat ()),
100
+ owner = data ['metadata' ].get ('owner' , None )
101
+ )
88
102
model .meta = model_meta
89
103
90
104
# Map series to surfaces with structural relations
91
105
mapping_object = {series ['name' ]: series ['surfaces' ] for series in data ['series' ]}
92
106
map_stack_to_surfaces (model , mapping_object , series_data = data ['series' ])
93
107
108
+ # Set fault relations after structural groups are set up
109
+ if 'fault_relations' in data and data ['fault_relations' ] is not None :
110
+ fault_relations = np .array (data ['fault_relations' ])
111
+ if fault_relations .shape == (len (model .structural_frame .structural_groups ), len (model .structural_frame .structural_groups )):
112
+ model .structural_frame .fault_relations = fault_relations
113
+
94
114
return model
95
115
96
116
@staticmethod
@@ -112,7 +132,7 @@ def _load_surface_points(surface_points_data: List[SurfacePoint], id_to_name: Di
112
132
from gempy .core .data .surface_points import SurfacePointsTable
113
133
114
134
# Validate data structure
115
- required_fields = {'x' , 'y' , 'z' , 'id ' , 'nugget' }
135
+ required_fields = {'x' , 'y' , 'z' , 'nugget ' , 'id' } # Add 'id' back to required fields
116
136
for i , sp in enumerate (surface_points_data ):
117
137
missing_fields = required_fields - set (sp .keys ())
118
138
if missing_fields :
@@ -128,21 +148,16 @@ def _load_surface_points(surface_points_data: List[SurfacePoint], id_to_name: Di
128
148
x = np .array ([sp ['x' ] for sp in surface_points_data ])
129
149
y = np .array ([sp ['y' ] for sp in surface_points_data ])
130
150
z = np .array ([sp ['z' ] for sp in surface_points_data ])
131
- ids = np .array ([sp ['id' ] for sp in surface_points_data ])
132
151
nugget = np .array ([sp ['nugget' ] for sp in surface_points_data ])
133
-
134
- # Create name_id_map from unique IDs
135
- unique_ids = np .unique (ids )
136
- name_id_map = {id_to_name [id ]: id for id in unique_ids }
152
+ names = [id_to_name .get (sp ['id' ], "surface_0" ) for sp in surface_points_data ]
137
153
138
154
# Create SurfacePointsTable
139
155
return SurfacePointsTable .from_arrays (
140
156
x = x ,
141
157
y = y ,
142
158
z = z ,
143
- names = [id_to_name [id ] for id in ids ],
144
- nugget = nugget ,
145
- name_id_map = name_id_map
159
+ names = names ,
160
+ nugget = nugget
146
161
)
147
162
148
163
@staticmethod
@@ -164,7 +179,7 @@ def _load_orientations(orientations_data: List[Orientation], id_to_name: Dict[in
164
179
from gempy .core .data .orientations import OrientationsTable
165
180
166
181
# Validate data structure
167
- required_fields = {'x' , 'y' , 'z' , 'G_x' , 'G_y' , 'G_z' , 'id ' , 'nugget ' , 'polarity' }
182
+ required_fields = {'x' , 'y' , 'z' , 'G_x' , 'G_y' , 'G_z' , 'nugget ' , 'polarity ' , 'id' } # Add 'id' back to required fields
168
183
for i , ori in enumerate (orientations_data ):
169
184
missing_fields = required_fields - set (ori .keys ())
170
185
if missing_fields :
@@ -173,10 +188,10 @@ def _load_orientations(orientations_data: List[Orientation], id_to_name: Dict[in
173
188
# Validate data types
174
189
if not all (isinstance (ori [field ], (int , float )) for field in ['x' , 'y' , 'z' , 'G_x' , 'G_y' , 'G_z' , 'nugget' ]):
175
190
raise ValueError (f"Invalid data type in orientation { i } . All coordinates, gradients, and nugget must be numeric." )
191
+ if not isinstance (ori .get ('polarity' , 1 ), int ) or ori .get ('polarity' , 1 ) not in {- 1 , 1 }:
192
+ raise ValueError (f"Invalid polarity in orientation { i } . Must be 1 (normal) or -1 (reverse)." )
176
193
if not isinstance (ori ['id' ], int ):
177
194
raise ValueError (f"Invalid data type in orientation { i } . ID must be an integer." )
178
- if not isinstance (ori ['polarity' ], int ) or ori ['polarity' ] not in {- 1 , 1 }:
179
- raise ValueError (f"Invalid polarity in orientation { i } . Must be 1 (normal) or -1 (reverse)." )
180
195
181
196
# Extract coordinates and other data
182
197
x = np .array ([ori ['x' ] for ori in orientations_data ])
@@ -185,20 +200,16 @@ def _load_orientations(orientations_data: List[Orientation], id_to_name: Dict[in
185
200
G_x = np .array ([ori ['G_x' ] for ori in orientations_data ])
186
201
G_y = np .array ([ori ['G_y' ] for ori in orientations_data ])
187
202
G_z = np .array ([ori ['G_z' ] for ori in orientations_data ])
188
- ids = np .array ([ori ['id' ] for ori in orientations_data ])
189
203
nugget = np .array ([ori ['nugget' ] for ori in orientations_data ])
204
+ names = [id_to_name .get (ori ['id' ], "surface_0" ) for ori in orientations_data ]
190
205
191
206
# Apply polarity to gradients
192
207
for i , ori in enumerate (orientations_data ):
193
- if ori [ 'polarity' ] == - 1 :
208
+ if ori . get ( 'polarity' , 1 ) == - 1 :
194
209
G_x [i ] *= - 1
195
210
G_y [i ] *= - 1
196
211
G_z [i ] *= - 1
197
212
198
- # Create name_id_map from unique IDs
199
- unique_ids = np .unique (ids )
200
- name_id_map = {id_to_name [id ]: id for id in unique_ids }
201
-
202
213
# Create OrientationsTable
203
214
return OrientationsTable .from_arrays (
204
215
x = x ,
@@ -207,9 +218,8 @@ def _load_orientations(orientations_data: List[Orientation], id_to_name: Dict[in
207
218
G_x = G_x ,
208
219
G_y = G_y ,
209
220
G_z = G_z ,
210
- names = [id_to_name [id ] for id in ids ],
211
- nugget = nugget ,
212
- name_id_map = name_id_map
221
+ names = names ,
222
+ nugget = nugget
213
223
)
214
224
215
225
@staticmethod
@@ -233,11 +243,19 @@ def save_model_to_json(model, file_path: str) -> None:
233
243
"orientations" : [],
234
244
"series" : [],
235
245
"grid_settings" : {
236
- "regular_grid_resolution" : model .grid ._dense_grid .resolution . tolist ( ),
237
- "regular_grid_extent" : model .grid ._dense_grid .extent . tolist ( ),
246
+ "regular_grid_resolution" : JsonIO . _numpy_to_list ( model .grid ._dense_grid .resolution ),
247
+ "regular_grid_extent" : JsonIO . _numpy_to_list ( model .grid ._dense_grid .extent ),
238
248
"octree_levels" : None # TODO: Add octree levels if needed
239
249
},
240
- "interpolation_options" : {}
250
+ "interpolation_options" : {
251
+ "kernel_options" : {
252
+ "range" : float (model .interpolation_options .kernel_options .range ),
253
+ "c_o" : float (model .interpolation_options .kernel_options .c_o )
254
+ },
255
+ "mesh_extraction" : bool (model .interpolation_options .mesh_extraction ),
256
+ "number_octree_levels" : int (model .interpolation_options .number_octree_levels )
257
+ },
258
+ "fault_relations" : JsonIO ._numpy_to_list (model .structural_frame .fault_relations ) if hasattr (model .structural_frame , 'fault_relations' ) else None
241
259
}
242
260
243
261
# Get series and surface information
0 commit comments