Skip to content

Commit cde81e4

Browse files
authored
Merge pull request #2296 from mikedh/feat/3mfextra
Support 3MF component `path`
2 parents cca5eef + af820f8 commit cde81e4

File tree

3 files changed

+85
-58
lines changed

3 files changed

+85
-58
lines changed

tests/test_3mf.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,10 @@ def test_roundtrip(self):
6060
file_type="3mf",
6161
)
6262

63-
assert set(s.geometry.keys()) == set(r.geometry.keys())
63+
assert set(s.geometry.keys()) == set(r.geometry.keys()), (
64+
s.geometry.keys(),
65+
r.geometry.keys(),
66+
)
6467
assert g.np.allclose(s.bounds, r.bounds)
6568
assert g.np.isclose(s.area, r.area, rtol=1e-3)
6669

trimesh/exchange/threemf.py

Lines changed: 67 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,35 @@
1-
import collections
21
import io
32
import uuid
43
import zipfile
4+
from collections import defaultdict
55

66
import numpy as np
77

88
from .. import graph, util
99
from ..constants import log
10+
from ..util import unique_name
11+
12+
13+
def _read_mesh(mesh):
14+
vertices = mesh.find("{*}vertices")
15+
v_array = np.array(
16+
[
17+
[i.attrib["x"], i.attrib["y"], i.attrib["z"]]
18+
for i in vertices.iter("{*}vertex")
19+
],
20+
dtype=np.float64,
21+
)
22+
23+
faces = mesh.find("{*}triangles")
24+
f_array = np.array(
25+
[
26+
[i.attrib["v1"], i.attrib["v2"], i.attrib["v3"]]
27+
for i in faces.iter("{*}triangle")
28+
],
29+
dtype=np.int64,
30+
)
31+
32+
return v_array, f_array
1033

1134

1235
def load_3MF(file_obj, postprocess=True, **kwargs):
@@ -23,6 +46,7 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
2346
kwargs : dict
2447
Constructor arguments for `trimesh.Scene`
2548
"""
49+
2650
# dict, {name in archive: BytesIo}
2751
archive = util.decompress(file_obj, file_type="zip")
2852
# get model with case-insensitive keys
@@ -40,66 +64,74 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
4064
# { mesh id : mesh name}
4165
id_name = {}
4266
# { mesh id: (n,3) float vertices}
43-
v_seq = {}
67+
v_seq = defaultdict(list)
4468
# { mesh id: (n,3) int faces}
45-
f_seq = {}
69+
f_seq = defaultdict(list)
4670
# components are objects that contain other objects
4771
# {id : [other ids]}
48-
components = collections.defaultdict(list)
72+
components = defaultdict(list)
4973
# load information about the scene graph
5074
# each instance is a single geometry
5175
build_items = []
5276

77+
# keep track of names we can use
78+
consumed_counts = {}
5379
consumed_names = set()
80+
5481
# iterate the XML object and build elements with an LXML iterator
5582
# loaded elements are cleared to avoid ballooning memory
5683
model.seek(0)
57-
for _, obj in etree.iterparse(model, tag=("{*}object", "{*}build")):
84+
for _, obj in etree.iterparse(model, tag=("{*}object", "{*}build"), events=("end",)):
5885
# parse objects
5986
if "object" in obj.tag:
6087
# id is mandatory
6188
index = obj.attrib["id"]
6289

6390
# start with stored name
64-
name = obj.attrib.get("name", str(index))
6591
# apparently some exporters name multiple meshes
6692
# the same thing so check to see if it's been used
67-
if name in consumed_names:
68-
name = name + str(index)
93+
name = unique_name(
94+
obj.attrib.get("name", str(index)), consumed_names, consumed_counts
95+
)
6996
consumed_names.add(name)
7097
# store name reference on the index
7198
id_name[index] = name
7299

73100
# if the object has actual geometry data parse here
74101
for mesh in obj.iter("{*}mesh"):
75-
vertices = mesh.find("{*}vertices")
76-
v_seq[index] = np.array(
77-
[
78-
[i.attrib["x"], i.attrib["y"], i.attrib["z"]]
79-
for i in vertices.iter("{*}vertex")
80-
],
81-
dtype=np.float64,
82-
)
83-
vertices.clear()
84-
vertices.getparent().remove(vertices)
85-
86-
faces = mesh.find("{*}triangles")
87-
f_seq[index] = np.array(
88-
[
89-
[i.attrib["v1"], i.attrib["v2"], i.attrib["v3"]]
90-
for i in faces.iter("{*}triangle")
91-
],
92-
dtype=np.int64,
93-
)
94-
faces.clear()
95-
faces.getparent().remove(faces)
102+
v, f = _read_mesh(mesh)
103+
v_seq[index].append(v)
104+
f_seq[index].append(f)
96105

97106
# components are references to other geometries
98107
for c in obj.iter("{*}component"):
99108
mesh_index = c.attrib["objectid"]
100109
transform = _attrib_to_transform(c.attrib)
101110
components[index].append((mesh_index, transform))
102111

112+
# if this references another file as the `path` attrib
113+
path = next(
114+
(v.strip("/") for k, v in c.attrib.items() if k.endswith("path")),
115+
None,
116+
)
117+
if path is not None and path in archive:
118+
archive[path].seek(0)
119+
name = unique_name(
120+
obj.attrib.get("name", str(mesh_index)),
121+
consumed_names,
122+
consumed_counts,
123+
)
124+
consumed_names.add(name)
125+
# store name reference on the index
126+
id_name[mesh_index] = name
127+
128+
for _, m in etree.iterparse(
129+
archive[path], tag=("{*}mesh"), events=("end",)
130+
):
131+
v, f = _read_mesh(m)
132+
v_seq[mesh_index].append(v)
133+
f_seq[mesh_index].append(f)
134+
103135
# parse build
104136
if "build" in obj.tag:
105137
# scene graph information stored here, aka "build" the scene
@@ -109,19 +141,15 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
109141
# the index of the geometry this item instantiates
110142
build_items.append((item.attrib["objectid"], transform))
111143

112-
# free resources
113-
obj.clear()
114-
obj.getparent().remove(obj)
115-
del obj
116-
117144
# have one mesh per 3MF object
118145
# one mesh per geometry ID, store as kwargs for the object
119146
meshes = {}
120147
for gid in v_seq.keys():
148+
v, f = util.append_faces(v_seq[gid], f_seq[gid])
121149
name = id_name[gid]
122150
meshes[name] = {
123-
"vertices": v_seq[gid],
124-
"faces": f_seq[gid],
151+
"vertices": v,
152+
"faces": f,
125153
"metadata": metadata.copy(),
126154
}
127155
meshes[name].update(kwargs)
@@ -143,7 +171,7 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
143171
# flatten the scene structure and simplify to
144172
# a single unique node per instance
145173
graph_args = []
146-
parents = collections.defaultdict(set)
174+
parents = defaultdict(set)
147175
for path in graph.multigraph_paths(G=g, source="world"):
148176
# collect all the transform on the path
149177
transforms = graph.multigraph_collect(G=g, traversal=path, attrib="matrix")
@@ -157,8 +185,9 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
157185
last = path[-1][0]
158186
# if someone included an undefined component, skip it
159187
if last not in id_name:
160-
log.debug(f"id {last} included but not defined!")
188+
log.warning(f"id {last} included but not defined!")
161189
continue
190+
162191
# frame names unique
163192
name = id_name[last] + util.unique_id()
164193
# index in meshes

trimesh/path/polygons.py

Lines changed: 14 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from shapely.geometry import Polygon
44

55
from .. import bounds, geometry, graph, grouping
6+
from ..boolean import reduce_cascade
67
from ..constants import log
78
from ..constants import tol_path as tol
89
from ..transformations import transform_points
@@ -162,14 +163,14 @@ def edges_to_polygons(edges: NDArray[int64], vertices: NDArray[float64]):
162163
# find which polygons contain which other polygons
163164
roots, tree = enclosure_tree(polygons)
164165

165-
# generate list of polygons with proper interiors
166-
complete = []
167-
for root in roots:
168-
interior = list(tree[root].keys())
169-
shell = polygons[root].exterior.coords
170-
holes = [polygons[i].exterior.coords for i in interior]
171-
complete.append(Polygon(shell=shell, holes=holes))
172-
return complete
166+
# generate polygons with proper interiors
167+
return [
168+
Polygon(
169+
shell=polygons[root.exterior],
170+
holes=[polygons[i].exterior for i in tree[root].keys()],
171+
)
172+
for root in roots
173+
]
173174

174175

175176
def polygons_obb(polygons: Iterable[Polygon]):
@@ -864,17 +865,11 @@ def projected(
864865
return polygons[0]
865866
elif len(polygons) == 0:
866867
return None
867-
# inflate each polygon before unioning to remove zero-size
868-
# gaps then deflate the result after unioning by the same amount
869-
# note the following provides a 25% speedup but needs
870-
# more testing to see if it deflates to a decent looking
871-
# result:
872-
# polygon = ops.unary_union(
873-
# [p.buffer(padding,
874-
# join_style=2,
875-
# mitre_limit=1.5)
876-
# for p in polygons]).buffer(-padding)
877-
return ops.unary_union([p.buffer(padding) for p in polygons]).buffer(-padding)
868+
869+
# in my tests this was substantially faster than `shapely.ops.unary_union`
870+
return (
871+
reduce_cascade(lambda a, b: a.union(b), polygons).buffer(padding).buffer(-padding)
872+
)
878873

879874

880875
def second_moments(polygon: Polygon, return_centered=False):

0 commit comments

Comments
 (0)