1
+ import json
2
+
3
+ import numpy as np
4
+
1
5
from ...core .data import GeoModel
2
6
from ...core .data .encoders .converters import loading_model_injection
7
+ from ...optional_dependencies import require_zlib
3
8
4
9
5
10
def save_model (model : GeoModel , path : str ):
11
+ import zlib
6
12
7
13
# TODO: Serialize to json
8
14
model_json = model .model_dump_json (by_alias = True , indent = 4 )
9
15
10
16
# TODO: Serialize to binary
11
- sp_binary = model .structural_frame .surface_points_copy .data .tobytes ()
17
+ data : np .ndarray = model .structural_frame .surface_points_copy .data
18
+ sp_binary = data .tobytes ()
12
19
ori_binary = model .structural_frame .orientations_copy .data .tobytes ()
20
+
21
+ # Compress the binary data
22
+ compressed_binary = zlib .compress (sp_binary + ori_binary )
23
+
24
+ # Add compression info to metadata
25
+ model_dict = model .model_dump (by_alias = True )
26
+ model_dict ["_binary_metadata" ] = {
27
+ "sp_shape" : model .structural_frame .surface_points_copy .data .shape ,
28
+ "sp_dtype" : str (model .structural_frame .surface_points_copy .data .dtype ),
29
+ "ori_shape" : model .structural_frame .orientations_copy .data .shape ,
30
+ "ori_dtype" : str (model .structural_frame .orientations_copy .data .dtype ),
31
+ "compression" : "zlib" ,
32
+ "sp_length" : len (sp_binary ) # Need this to split the arrays after decompression
33
+ }
13
34
14
35
# TODO: Putting both together
15
- binary_file = _to_binary (model_json , sp_binary + ori_binary )
36
+ binary_file = _to_binary (model_json , compressed_binary )
16
37
with open (path , 'wb' ) as f :
17
38
f .write (binary_file )
18
39
@@ -25,22 +46,35 @@ def load_model(path: str) -> GeoModel:
25
46
26
47
# Split header and body
27
48
header_json = binary_file [4 :4 + header_length ].decode ('utf-8' )
28
- body = binary_file [ 4 + header_length :]
49
+ header_dict = json . loads ( header_json )
29
50
30
- # Split body into surface points and orientations
31
- # They are equal size so we can split in half
32
- sp_binary = body [: len ( body ) // 2 ]
33
- ori_binary = body [ len ( body ) // 2 :]
51
+ metadata = header_dict . pop ( "_binary_metadata" )
52
+
53
+ # Decompress the binary data
54
+ ori_data , sp_data = _foo ( binary_file , header_length , metadata )
34
55
35
56
with loading_model_injection (
36
- surface_points_binary = sp_binary ,
37
- orientations_binary = ori_binary
57
+ surface_points_binary = sp_data ,
58
+ orientations_binary = ori_data
38
59
):
39
60
model = GeoModel .model_validate_json (header_json )
40
61
41
62
return model
42
63
43
64
65
+ def _foo (binary_file , header_length , metadata ):
66
+ zlib = require_zlib ()
67
+ body = binary_file [4 + header_length :]
68
+ decompressed_binary = zlib .decompress (body )
69
+ # Split the decompressed data using the stored length
70
+ sp_binary = decompressed_binary [:metadata ["sp_length" ]]
71
+ ori_binary = decompressed_binary [metadata ["sp_length" ]:]
72
+ # Reconstruct arrays
73
+ sp_data = np .frombuffer (sp_binary , dtype = np .dtype (metadata ["sp_dtype" ])).reshape (metadata ["sp_shape" ])
74
+ ori_data = np .frombuffer (ori_binary , dtype = np .dtype (metadata ["ori_dtype" ])).reshape (metadata ["ori_shape" ])
75
+ return ori_data , sp_data
76
+
77
+
44
78
def _to_binary (header_json , body_ ) -> bytes :
45
79
header_json_bytes = header_json .encode ('utf-8' )
46
80
header_json_length = len (header_json_bytes )
0 commit comments