Skip to content

Commit aed7eee

Browse files
authored
Release: Refactor Load Types (#2241)
A very common source of annoyance and confusion is that `trimesh.load` can return lots of different types depending on what type of file was passed (i.e. #2239). This refactor changes the return types for the loading functions to: - `trimesh.load_scene -> Scene` - This loads into a `Scene`, the most general container which can hold any loadable type. Most people should probably use this to load geometry. - `trimesh.load_mesh -> Trimesh` - Forces all mesh objects in a scene into a single `Trimesh` object. This potentially has to drop information and irreversibly concatenate multiple meshes. - The implementation of the concatenation logic is now in `Scene.to_mesh` rather than load. - `trimesh.load_path -> Path` - This loads into either a `Path2D` or `Path3D` which both inherit from `Path` - `trimesh.load -> Geometry` - This was the original load entry point and is deprecated, but there are no current plans to remove it. It has been modified into a thin wrapper for `load_scene` that attempts to match the behavior of the previous loader for backwards compatibility. In my testing against the current `main` branch it was returning the same types [99.8% of the time](https://gist.github.com/mikedh/8de541e066ce842932b1f6cd97c214ca) although there may be other subtle differences. - `trimesh.load(..., force='mesh')` will emit a deprecation warning in favor of `load_mesh` - `trimesh.load(..., force='scene')` will emit a deprecation warning in favor of `load_scene` Additional changes: - Removes `Geometry.metadata['file_path']` in favor of `Geometry.source.file_path`. Everything that inherits from `Geometry` should now have a `.source` attribute which is a typed dataclass. This was something of a struggle as `file_path` was populated into metadata on load, but we also try to make sure `metadata` is preserved through round-trips if at all possible. And so the `load` inserted *different* keys into the metadata. Making it first-class information rather than a loose key seems like an improvement, but users will have to replace `mesh.metadata["file_name"]` with `mesh.source.file_name`. - Moves all network fetching into `WebResolver` so it can be more easily gated by `allow_remote`. - Removes code for the following deprecations: - January 2025 deprecation for `trimesh.resources.get` in favor of the typed alternatives (`get_json`, `get_bytes`, etc). - January 2025 deprecation for `Scene.deduplicated` in favor of a very short list comprehension on `Scene.duplicate_nodes` - March 2024 deprecation for `trimesh.graph.smoothed` in favor of `trimesh.graph.smooth_shaded`. - Adds the following new deprecations: - January 2026 `Path3D.to_planar` -> `Path3D.to_2D` to be consistent with `Path2D.to_3D`. - Fixes #2335 - Fixes #2330 - Fixes #2239 - Releases #2313 - Releases #2327 - Releases #2336 - Releases #2339
2 parents 2fcb2b2 + b15df31 commit aed7eee

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+1687
-1254
lines changed

.github/workflows/release.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ jobs:
5454

5555
pypi:
5656
name: Release To PyPi
57-
needs: [tests, containers]
57+
needs: [tests, containers, corpus]
5858
runs-on: ubuntu-latest
5959
steps:
6060
- uses: actions/checkout@v4
@@ -118,13 +118,13 @@ jobs:
118118
- name: Install Trimesh
119119
run: pip install .[easy,test]
120120
- name: Run Corpus Check
121-
run: python tests/corpus.py
121+
run: python tests/corpus.py -run
122122

123123
release:
124124
permissions:
125125
contents: write # for actions/create-release
126126
name: Create GitHub Release
127-
needs: [tests, containers]
127+
needs: [tests, containers, corpus]
128128
runs-on: ubuntu-latest
129129
steps:
130130
- name: Checkout code

.github/workflows/test.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -68,5 +68,5 @@ jobs:
6868
- name: Install Trimesh
6969
run: pip install .[easy,test]
7070
- name: Run Corpus Check
71-
run: python tests/corpus.py
71+
run: python tests/corpus.py -run
7272

docs/requirements.txt

+9-9
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
1-
pypandoc==1.13
1+
pypandoc==1.14
22
recommonmark==0.7.1
3-
jupyter==1.0.0
3+
jupyter==1.1.1
44

55
# get sphinx version range from furo install
6-
furo==2024.5.6
7-
myst-parser==3.0.1
8-
pyopenssl==24.1.0
9-
autodocsumm==0.2.12
10-
jinja2==3.1.4
11-
matplotlib==3.8.4
12-
nbconvert==7.16.4
6+
furo==2024.8.6
7+
myst-parser==4.0.0
8+
pyopenssl==24.3.0
9+
autodocsumm==0.2.14
10+
jinja2==3.1.5
11+
matplotlib==3.10.0
12+
nbconvert==7.16.5
1313

examples/nearest.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@
7575
"# create a scene containing the mesh and two sets of points\n",
7676
"scene = trimesh.Scene([mesh, cloud_original, cloud_close])\n",
7777
"\n",
78-
"# show the scene wusing\n",
78+
"# show the scene we are using\n",
7979
"scene.show()"
8080
]
8181
}

pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ requires = ["setuptools >= 61.0", "wheel"]
55
[project]
66
name = "trimesh"
77
requires-python = ">=3.8"
8-
version = "4.5.3"
8+
version = "4.6.0"
99
authors = [{name = "Michael Dawson-Haggerty", email = "[email protected]"}]
1010
license = {file = "LICENSE.md"}
1111
description = "Import, export, process, analyze and view triangular meshes."
@@ -120,6 +120,7 @@ test_more = [
120120
"matplotlib",
121121
"pymeshlab",
122122
"triangle",
123+
"ipython",
123124
]
124125

125126
# interfaces.gmsh will be dropped Jan 2025

tests/corpus.py

+168-38
Original file line numberDiff line numberDiff line change
@@ -6,29 +6,85 @@
66
will download more than a gigabyte to your home directory!
77
"""
88

9+
import json
10+
import sys
11+
import time
12+
from dataclasses import asdict, dataclass
13+
914
import numpy as np
1015
from pyinstrument import Profiler
16+
from pyinstrument.renderers.jsonrenderer import JSONRenderer
1117

1218
import trimesh
19+
from trimesh.typed import List, Optional, Tuple
1320
from trimesh.util import log, wrap_as_stream
1421

15-
# get a set with available extension
16-
available = trimesh.available_formats()
1722

18-
# remove loaders that are thin wrappers
19-
available.difference_update(
20-
[
21-
k
22-
for k, v in trimesh.exchange.load.mesh_loaders.items()
23-
if v in (trimesh.exchange.misc.load_meshio,)
24-
]
25-
)
26-
# remove loaders we don't care about
27-
available.difference_update({"json", "dae", "zae"})
28-
available.update({"dxf", "svg"})
23+
@dataclass
24+
class LoadReport:
25+
# i.e. 'hi.glb'
26+
file_name: str
27+
28+
# i.e 'glb'
29+
file_type: str
30+
31+
# i.e. 'Scene'
32+
type_load: Optional[str] = None
33+
34+
# what type was every geometry
35+
type_geometry: Optional[Tuple[str]] = None
36+
37+
# what is the printed repr of the object, i.e. `<Trimesh ...>`
38+
repr_load: Optional[str] = None
39+
40+
# if there was an exception save it here
41+
exception: Optional[str] = None
42+
43+
44+
@dataclass
45+
class Report:
46+
# what did we load
47+
load: list[LoadReport]
48+
49+
# what version of trimesh was this produced on
50+
version: str
51+
52+
# what was the profiler output for this run
53+
# a pyinstrument.renderers.JSONRenderer output
54+
profile: str
55+
56+
def compare(self, other: "Report"):
57+
"""
58+
Compare this load report to another.
59+
"""
60+
# what files were loaded by both versions
61+
self_type = {o.file_name: o.type_load for o in self.load}
62+
other_type = {n.file_name: n.type_load for n in other.load}
63+
64+
both = set(self_type.keys()).intersection(other_type.keys())
65+
matches = np.array([self_type[k] == other_type[k] for k in both])
66+
percent = matches.sum() / len(matches)
2967

68+
print(f"Comparing `{self.version}` against `{other.version}`")
69+
print(f"Return types matched {percent * 100.0:0.3f}% of the time")
70+
print(f"Loaded {len(self.load)} vs Loaded {len(other.load)}")
3071

31-
def on_repo(repo, commit):
72+
73+
def from_dict(data: dict) -> Report:
74+
"""
75+
Parse a `Report` which has been exported using `dataclasses.asdict`
76+
into a Report object.
77+
"""
78+
return Report(
79+
load=[LoadReport(**r) for r in data.get("load", [])],
80+
version=data.get("version"),
81+
profile=data.get("profile"),
82+
)
83+
84+
85+
def on_repo(
86+
repo: str, commit: str, available: set, root: Optional[str] = None
87+
) -> List[LoadReport]:
3288
"""
3389
Try loading all supported files in a Github repo.
3490
@@ -38,6 +94,10 @@ def on_repo(repo, commit):
3894
Github "slug" i.e. "assimp/assimp"
3995
commit : str
4096
Full hash of the commit to check.
97+
available
98+
Which `file_type` to check
99+
root
100+
If passed only consider files under this root directory.
41101
"""
42102

43103
# get a resolver for the specific commit
@@ -47,32 +107,43 @@ def on_repo(repo, commit):
47107
# list file names in the repo we can load
48108
paths = [i for i in repo.keys() if i.lower().split(".")[-1] in available]
49109

50-
report = {}
110+
if root is not None:
111+
# clip off any file not under the root path
112+
paths = [p for p in paths if p.startswith(root)]
113+
114+
report = []
51115
for _i, path in enumerate(paths):
52116
namespace, name = path.rsplit("/", 1)
53117
# get a subresolver that has a root at
54118
# the file we are trying to load
55119
resolver = repo.namespaced(namespace)
56120

57121
check = path.lower()
58-
broke = (
59-
"malformed empty outofmemory "
60-
+ "bad incorrect missing "
61-
+ "failures pond.0.ply"
62-
).split()
122+
broke = "malformed outofmemory bad incorrect missing invalid failures".split()
63123
should_raise = any(b in check for b in broke)
64124
raised = False
65125

66-
# clip off the big old name from the archive
67-
saveas = path[path.find(commit) + len(commit) :]
126+
# start collecting data about the current load attempt
127+
current = LoadReport(file_name=name, file_type=trimesh.util.split_extension(name))
128+
129+
print(f"Attempting: {name}")
68130

69131
try:
70132
m = trimesh.load(
71133
file_obj=wrap_as_stream(resolver.get(name)),
72134
file_type=name,
73135
resolver=resolver,
74136
)
75-
report[saveas] = str(m)
137+
138+
# save the load types
139+
current.type_load = m.__class__.__name__
140+
if isinstance(m, trimesh.Scene):
141+
# save geometry types
142+
current.type_geometry = tuple(
143+
[g.__class__.__name__ for g in m.geometry.values()]
144+
)
145+
# save the <Trimesh ...> repr
146+
current.repr_load = str(m)
76147

77148
# if our source was a GLTF we should be able to roundtrip without
78149
# dropping
@@ -104,19 +175,19 @@ def on_repo(repo, commit):
104175
# this is what unsupported formats
105176
# like GLTF 1.0 should raise
106177
log.debug(E)
107-
report[saveas] = str(E)
178+
current.exception = str(E)
108179
except BaseException as E:
109180
raised = True
110181
# we got an error on a file that should have passed
111182
if not should_raise:
112183
log.debug(path, E)
113184
raise E
114-
report[saveas] = str(E)
185+
current.exception = str(E)
115186

116187
# if it worked when it didn't have to add a label
117188
if should_raise and not raised:
118-
# raise ValueError(name)
119-
report[saveas] += " SHOULD HAVE RAISED"
189+
current.exception = "PROBABLY SHOULD HAVE RAISED BUT DIDN'T!"
190+
report.append(current)
120191

121192
return report
122193

@@ -165,33 +236,92 @@ def equal(a, b):
165236
return a == b
166237

167238

168-
if __name__ == "__main__":
169-
trimesh.util.attach_to_log()
239+
def run(save: bool = False):
240+
"""
241+
Try to load and export every mesh we can get our hands on.
242+
243+
Parameters
244+
-----------
245+
save
246+
If passed, save a JSON dump of the load report.
247+
"""
248+
# get a set with available extension
249+
available = trimesh.available_formats()
250+
251+
# remove meshio loaders because we're not testing meshio
252+
available.difference_update(
253+
[
254+
k
255+
for k, v in trimesh.exchange.load.mesh_loaders.items()
256+
if v in (trimesh.exchange.misc.load_meshio,)
257+
]
258+
)
259+
260+
# TODO : waiting on a release containing pycollada/pycollada/147
261+
available.difference_update({"dae"})
170262

171263
with Profiler() as P:
264+
# check against the small trimesh corpus
265+
loads = on_repo(
266+
repo="mikedh/trimesh",
267+
commit="2fcb2b2ea8085d253e692ecd4f71b8f450890d51",
268+
available=available,
269+
root="models",
270+
)
271+
172272
# check the assimp corpus, about 50mb
173-
report = on_repo(
174-
repo="assimp/assimp", commit="c2967cf79acdc4cd48ecb0729e2733bf45b38a6f"
273+
loads.extend(
274+
on_repo(
275+
repo="assimp/assimp",
276+
commit="1e44036c363f64d57e9f799beb9f06d4d3389a87",
277+
available=available,
278+
root="test",
279+
)
175280
)
176281
# check the gltf-sample-models, about 1gb
177-
report.update(
282+
loads.extend(
178283
on_repo(
179284
repo="KhronosGroup/glTF-Sample-Models",
180285
commit="8e9a5a6ad1a2790e2333e3eb48a1ee39f9e0e31b",
286+
available=available,
181287
)
182288
)
183-
184-
# add back collada for this repo
185-
available.update(["dae", "zae"])
186-
report.update(
289+
# try on the universal robot models
290+
loads.extend(
187291
on_repo(
188292
repo="ros-industrial/universal_robot",
189293
commit="8f01aa1934079e5a2c859ccaa9dd6623d4cfa2fe",
294+
available=available,
190295
)
191296
)
192297

193298
# show all profiler lines
194299
log.info(P.output_text(show_all=True))
195300

196-
# print a formatted report of what we loaded
197-
log.debug("\n".join(f"# {k}\n{v}\n" for k, v in report.items()))
301+
# save the profile for comparison loader
302+
profile = P.output(JSONRenderer())
303+
304+
# compose the overall report
305+
report = Report(load=loads, version=trimesh.__version__, profile=profile)
306+
307+
if save:
308+
with open(f"trimesh.{trimesh.__version__}.{int(time.time())}.json", "w") as F:
309+
json.dump(asdict(report), F)
310+
311+
return report
312+
313+
314+
if __name__ == "__main__":
315+
trimesh.util.attach_to_log()
316+
317+
if "-run" in " ".join(sys.argv):
318+
run()
319+
320+
if "-compare" in " ".join(sys.argv):
321+
with open("trimesh.4.5.3.1737061410.json") as f:
322+
old = from_dict(json.load(f))
323+
324+
with open("trimesh.4.6.0.1737060030.json") as f:
325+
new = from_dict(json.load(f))
326+
327+
new.compare(old)

tests/generic.py

-1
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,6 @@ def check(item):
366366
batched.append(loaded)
367367

368368
for mesh in batched:
369-
mesh.metadata["file_name"] = file_name
370369
# only return our limit
371370
if returned[0] >= count:
372371
return

tests/regression.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ def typical_application():
1212
meshes = g.get_meshes(raise_error=True)
1313

1414
for mesh in meshes:
15-
g.log.info("Testing %s", mesh.metadata["file_name"])
15+
g.log.info("Testing %s", mesh.source.file_name)
1616
assert len(mesh.faces) > 0
1717
assert len(mesh.vertices) > 0
1818

0 commit comments

Comments
 (0)