Skip to content

Commit 0ffdeb8

Browse files
committed
Rail asset exporter
1 parent 2e52b43 commit 0ffdeb8

File tree

2 files changed

+334
-1
lines changed

2 files changed

+334
-1
lines changed

__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
"author" : "bii",
1717
"description" : "",
1818
"blender" : (2, 80, 0),
19-
"version" : (0, 0, 2),
19+
"version" : (0, 0, 3),
2020
"location" : "",
2121
"warning" : "",
2222
"category" : "BII Tools"
@@ -29,6 +29,7 @@
2929
from . import add_ifc_property
3030
from . import clean_reduce_ifc
3131
from . import upgrade_to_IFC4
32+
from . import export_rail_asset
3233

3334
def register():
3435
close_mesh_holes.register()
@@ -38,6 +39,7 @@ def register():
3839
add_ifc_property.register()
3940
clean_reduce_ifc.register()
4041
# upgrade_to_IFC4.register()
42+
export_rail_asset.register()
4143

4244
def unregister():
4345
close_mesh_holes.unregister()
@@ -47,6 +49,7 @@ def unregister():
4749
add_ifc_property.unregister()
4850
clean_reduce_ifc.unregister()
4951
# upgrade_to_IFC4.unregister()
52+
export_rail_asset.unregister()
5053

5154
if __name__ == "__main__":
5255
register()

export_rail_asset.py

Lines changed: 330 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,330 @@
1+
import uuid
2+
import bpy
3+
import json
4+
import zipfile
5+
import io
6+
import os
7+
import math
8+
import bmesh
9+
from mathutils import Vector
10+
from math import degrees
11+
from bpy_extras.io_utils import ExportHelper
12+
from collections import defaultdict, deque
13+
14+
def menu_func_export(self, context):
15+
self.layout.operator(ExportDProBRailAssetOperator.bl_idname, text="dProB Rail Asset (.dasset)")
16+
17+
def register():
18+
bpy.utils.register_class(ExportDProBRailAssetOperator)
19+
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
20+
21+
def unregister():
22+
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
23+
bpy.utils.unregister_class(ExportDProBRailAssetOperator)
24+
25+
def resample_and_polniearize(obj, handle_distance, merge_threshold):
26+
"""
27+
Duplicate 'obj', apply all its modifiers on the duplicate,
28+
and set the duplicate's origin to its geometry's median.
29+
Returns the new, prepared object.
30+
"""
31+
# 1. Duplicate the object and its data
32+
clone = obj.copy()
33+
clone.data = obj.data.copy()
34+
obj.users_collection[0].objects.link(clone)
35+
36+
# 2. If it’s a curve, remove any bevel/taper/profile settings
37+
if clone.type == 'CURVE':
38+
cd = clone.data
39+
# zero out bevel depth and resolution
40+
cd.bevel_depth = 0.0
41+
cd.bevel_resolution = 0
42+
# disable any extrude, taper, or bevel objects
43+
cd.extrude = 0.0
44+
cd.taper_object = None
45+
cd.bevel_object = None
46+
cd.offset = 0.0
47+
48+
# 3. Select & activate the clone
49+
bpy.ops.object.select_all(action='DESELECT')
50+
clone.select_set(True)
51+
bpy.context.view_layer.objects.active = clone
52+
53+
# 4. Convert to mesh (bakes all modifiers, and now there’s no curve profile)
54+
bpy.ops.object.convert(target='MESH')
55+
56+
bpy.ops.object.transform_apply(location=True, rotation=True, scale=True)
57+
58+
raw_polylines = extract_polylines_from_mesh(clone.data, distance=merge_threshold)
59+
60+
sampled_polylines = [
61+
resample_polyline_at_fixed_interval(pl, target_step=handle_distance)
62+
for pl in raw_polylines
63+
]
64+
65+
rails = []
66+
for poly in sampled_polylines:
67+
margin_in = 2 * poly[0] - poly[1]
68+
margin_out = 2 * poly[-1] - poly[-2]
69+
rails.append([margin_in] + poly + [margin_out])
70+
71+
bpy.data.objects.remove(clone, do_unlink=True)
72+
return rails
73+
74+
def extract_polylines_from_mesh(mesh, sharp_angle_threshold=90, distance=1e-1):
75+
"""
76+
Given a Mesh datablock whose edges form one or more 1D graphs,
77+
return a list of polylines (each a list of (x,y,z) coords).
78+
79+
Splits occur at vertices of valence != 2.
80+
"""
81+
bm = bmesh.new()
82+
bm.from_mesh(mesh)
83+
bm.verts.ensure_lookup_table()
84+
85+
bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=distance)
86+
87+
# Build adjacency map: vertex index -> set of connected vertex indices
88+
adjacency = {}
89+
for edge in bm.edges:
90+
v1, v2 = edge.verts[0].index, edge.verts[1].index
91+
adjacency.setdefault(v1, set()).add(v2)
92+
adjacency.setdefault(v2, set()).add(v1)
93+
94+
# Build edge set for tracking which edges we've used
95+
unused_edges = {frozenset((e.verts[0].index, e.verts[1].index)) for e in bm.edges}
96+
polylines = []
97+
98+
def walk(start_idx, prev_idx=None):
99+
path = [start_idx]
100+
current = start_idx
101+
102+
while True:
103+
neighbors = [v for v in adjacency[current] if v != prev_idx]
104+
if len(neighbors) != 1:
105+
break
106+
107+
next_idx = neighbors[0]
108+
edge_key = frozenset((current, next_idx))
109+
if edge_key not in unused_edges:
110+
break
111+
112+
# Check angle at current vertex
113+
if prev_idx is not None:
114+
bm.verts.ensure_lookup_table()
115+
v_prev = bm.verts[prev_idx].co
116+
v_curr = bm.verts[current].co
117+
v_next = bm.verts[next_idx].co
118+
119+
dir1 = (v_curr - v_prev).normalized()
120+
dir2 = (v_next - v_curr).normalized()
121+
122+
angle = dir1.angle(dir2, any)
123+
if degrees(angle) > sharp_angle_threshold:
124+
break # split at sharp turn
125+
126+
unused_edges.remove(edge_key)
127+
path.append(next_idx)
128+
prev_idx, current = current, next_idx
129+
130+
return path
131+
132+
# 1. Handle endpoints first (valence 1)
133+
endpoints = [vid for vid, nbrs in adjacency.items() if len(nbrs) == 1]
134+
for ep in endpoints:
135+
for neighbor in adjacency[ep]:
136+
edge_key = frozenset((ep, neighbor))
137+
if edge_key in unused_edges:
138+
unused_edges.remove(edge_key)
139+
path = walk(neighbor, prev_idx=ep)
140+
polylines.append([ep] + path)
141+
142+
# 2. Handle remaining edges (loops or chains with valence=2)
143+
while unused_edges:
144+
edge = next(iter(unused_edges))
145+
a, b = tuple(edge)
146+
unused_edges.remove(edge)
147+
forward = walk(b, prev_idx=a)
148+
backward = walk(a, prev_idx=b)
149+
backward.reverse()
150+
full = backward[:-1] + [a] + forward
151+
polylines.append(full)
152+
153+
# Convert vertex indices to positions
154+
result = []
155+
bm.verts.ensure_lookup_table()
156+
for poly in polylines:
157+
coords = [bm.verts[i].co.copy() for i in poly]
158+
result.append(coords)
159+
160+
bm.free()
161+
return result
162+
163+
def resample_polyline_at_fixed_interval(polyline, target_step=5.0):
164+
"""
165+
Given an ordered list of Vector points (the polyline),
166+
returns a new list of Vector points sampled so that:
167+
- the first and last points are included,
168+
- the distance between consecutive points is constant,
169+
- that constant is total_length // target_step (rounded up) exactly partitioned.
170+
171+
:param polyline: list[Vector] original vertices
172+
:param target_step: preferred spacing in meters
173+
:return: list[Vector] resampled points
174+
"""
175+
if len(polyline) < 2:
176+
return [p.copy() for p in polyline]
177+
178+
# 1. Compute segment lengths and cumulative lengths
179+
seg_lens = []
180+
for i in range(len(polyline) - 1):
181+
seg_lens.append((polyline[i+1] - polyline[i]).length)
182+
total_length = sum(seg_lens)
183+
184+
# 2. Determine number of intervals (must be at least 1)
185+
count = max(1, int(round(total_length / target_step)))
186+
# Recompute step so it partitions exactly
187+
step = total_length / count
188+
189+
resampled = [polyline[0].copy()]
190+
distances = [0.0] # cumulative along resampled
191+
192+
# 3. Walk through target distances 1*step, 2*step, ..., (count-1)*step
193+
seg_idx = 0
194+
seg_acc = 0.0 # how far we've walked along current segment
195+
196+
for n in range(1, count):
197+
target_d = n * step
198+
# Advance seg_idx until cumulative segment length surpasses target_d
199+
while seg_idx < len(seg_lens) and (seg_acc + seg_lens[seg_idx]) < target_d:
200+
seg_acc += seg_lens[seg_idx]
201+
seg_idx += 1
202+
# if we've run out of segments, clamp to last point
203+
if seg_idx >= len(seg_lens):
204+
resampled.append(polyline[-1].copy())
205+
distances.append(total_length)
206+
continue
207+
208+
# We know target_d lies within segment seg_idx between
209+
# original points polyline[seg_idx] and polyline[seg_idx+1]
210+
local_t = (target_d - seg_acc) / seg_lens[seg_idx]
211+
p = polyline[seg_idx].lerp(polyline[seg_idx+1], local_t)
212+
resampled.append(p.copy())
213+
distances.append(target_d)
214+
215+
# 4. Finally append the exact last point
216+
resampled.append(polyline[-1].copy())
217+
distances.append(total_length)
218+
219+
return resampled
220+
221+
class ExportDProBRailAssetOperator(bpy.types.Operator, ExportHelper):
222+
bl_idname = "export_scene.dprobrailasset"
223+
bl_label = "Export dProB Rail Asset (.dasset)"
224+
bl_description = 'Export splines in dProB Rail Asset format'
225+
filename_ext = ".dasset"
226+
227+
export_selected_only: bpy.props.BoolProperty(
228+
name="Selected Only",
229+
description="Export only selected objects (vs all curve objects)",
230+
default=True,
231+
)
232+
233+
east: bpy.props.FloatProperty(
234+
name="East GeoLocation",
235+
default=0,
236+
step=1,
237+
)
238+
239+
north: bpy.props.FloatProperty(
240+
name="North GeoLocation",
241+
default=0,
242+
step=1,
243+
)
244+
245+
elevation: bpy.props.FloatProperty(
246+
name="Elevation GeoLocation",
247+
default=0,
248+
step=1,
249+
)
250+
251+
apply_geolocation: bpy.props.BoolProperty(
252+
name="Subtract geolocation from coordinates",
253+
default=True,
254+
)
255+
256+
handle_distance: bpy.props.FloatProperty(
257+
name="Resolution",
258+
min=1,
259+
default=5,
260+
step=1,
261+
)
262+
263+
merge_threshold: bpy.props.FloatProperty(
264+
name="Merge Threshold",
265+
default=0.1,
266+
step=0.05,
267+
)
268+
269+
@classmethod
270+
def description(cls, context, properties):
271+
return "Export splines in dProB Rail Asset format"
272+
273+
def execute(self, context):
274+
if context.space_data is None or context.space_data.type == 'FILE_BROWSER':
275+
return {'CANCELLED'}
276+
if not self.filepath:
277+
self.report({'ERROR'}, "No filepath provided.")
278+
return {'CANCELLED'}
279+
280+
if self.export_selected_only:
281+
curve_objs = [obj for obj in context.selected_objects]
282+
else:
283+
curve_objs = [obj for obj in bpy.data.objects if obj.type == 'CURVE']
284+
285+
# Get filename without extension
286+
file_base_name = os.path.splitext(os.path.basename(self.filepath))[0]
287+
288+
export_data = {
289+
"Name": file_base_name,
290+
"GeoLocation": {
291+
"East": self.east,
292+
"Elevation": self.elevation,
293+
"North": self.north
294+
},
295+
"Rails": []
296+
}
297+
298+
amount_done = 0
299+
for obj in curve_objs:
300+
for sampled_pts in resample_and_polniearize(obj, self.handle_distance, self.merge_threshold):
301+
spline_data = {
302+
"Name": obj.name,
303+
"SplineHandles":
304+
[{"X": pt.x - self.east, "Y": pt.z - self.elevation, "Z": pt.y - self.north} for pt in sampled_pts] if self.apply_geolocation else [{"X": pt.x, "Y": pt.z, "Z": pt.y} for pt in sampled_pts],
305+
"SplineType": "Centripetal"
306+
}
307+
export_data["Rails"].append(spline_data)
308+
amount_done += 1
309+
310+
json_str = json.dumps(export_data, indent=4)
311+
312+
zip_buffer = io.BytesIO()
313+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
314+
zip_file.writestr("model.json", json_str)
315+
zip_file.writestr("dProB_asset_metadata.json", '{"Format":"Rails","ProductVersion":"Simulation 2024.2.9"}')
316+
zip_file.writestr("asset_guid.txt", str(uuid.uuid4()))
317+
318+
try:
319+
with open(self.filepath, "wb") as f:
320+
f.write(zip_buffer.getvalue())
321+
except Exception as e:
322+
self.report({'ERROR'}, f"Failed to write file: {e}")
323+
return {'CANCELLED'}
324+
325+
self.report({'INFO'}, f"Exported {amount_done} curve(s).")
326+
return {'FINISHED'}
327+
328+
@classmethod
329+
def poll(cls, context):
330+
return context.space_data is not None and context.space_data.type != 'FILE_BROWSER'

0 commit comments

Comments
 (0)