Skip to content

Commit

Permalink
add urdf schema
Browse files Browse the repository at this point in the history
  • Loading branch information
mikedh committed Nov 23, 2022
1 parent a179c70 commit f689147
Show file tree
Hide file tree
Showing 12 changed files with 392 additions and 43 deletions.
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def abspath(rel):
# The theme to use for HTML and HTML Help pages
html_theme = 'sphinx_rtd_theme'
# html_theme = 'insegel'
# html_theme = 'furo'

# options for rtd-theme
html_theme_options = {
Expand Down
3 changes: 2 additions & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ pyopenssl==22.1.0
autodocsumm==0.2.9
jinja2==3.1.2
matplotlib==3.6.2
nbconvert==7.2.4
nbconvert==7.2.5

2 changes: 1 addition & 1 deletion examples/section.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@
"source": [
"# we can plot the intersection (red) and our original geometry(black and green)\n",
"ax = plt.gca()\n",
"for h in hits:\n",
"for h in hits.geoms:\n",
" ax.plot(*h.xy, color='r')\n",
"slice_2D.show()"
]
Expand Down
9 changes: 1 addition & 8 deletions trimesh/constants.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,6 @@
import numpy as np

from .util import log, PY3

if PY3:
# will be the highest granularity clock available
from time import perf_counter as now
else:
# perf_counter not available on python 2
from time import time as now
from .util import log, now


class ToleranceMesh(object):
Expand Down
21 changes: 10 additions & 11 deletions trimesh/exchange/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,9 @@
from ..parent import Geometry
from ..points import PointCloud
from ..scene.scene import Scene, append_scenes
from ..constants import log_time, log
from ..util import log, now

from . import misc

from .xyz import _xyz_loaders
from .ply import _ply_loaders
from .stl import _stl_loaders
Expand Down Expand Up @@ -173,7 +172,6 @@ def load(file_obj,
return loaded


@log_time
def load_mesh(file_obj,
file_type=None,
resolver=None,
Expand Down Expand Up @@ -209,10 +207,12 @@ def load_mesh(file_obj,
try:
# make sure we keep passed kwargs to loader
# but also make sure loader keys override passed keys
results = mesh_loaders[file_type](file_obj,
file_type=file_type,
resolver=resolver,
**kwargs)
loader = mesh_loaders[file_type]
tic = now()
results = loader(file_obj,
file_type=file_type,
resolver=resolver,
**kwargs)
if not isinstance(results, list):
results = [results]

Expand All @@ -223,10 +223,9 @@ def load_mesh(file_obj,
loaded[-1].metadata.update(metadata)
if len(loaded) == 1:
loaded = loaded[0]
# show the repr for loaded
log.debug('loaded {} using {}'.format(
str(loaded),
mesh_loaders[file_type].__name__))
# show the repr for loaded, loader used, and time
log.debug('loaded {} using `{}` in {:0.4f}s'.format(
str(loaded), loader.__name__, now() - tic))
finally:
# if we failed to load close file
if opened:
Expand Down
28 changes: 19 additions & 9 deletions trimesh/exchange/urdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import numpy as np

from ..constants import log
from ..constants import log, tol
from ..decomposition import convex_decomposition
from ..version import __version__ as trimesh_version

Expand All @@ -13,25 +13,28 @@ def export_urdf(mesh,
color=[0.75, 0.75, 0.75],
**kwargs):
"""
Convert a Trimesh object into a URDF package for physics simulation.
This breaks the mesh into convex pieces and writes them to the same
directory as the .urdf file.
Convert a Trimesh object into a URDF package for physics
simulation. This breaks the mesh into convex pieces and
writes them to the same directory as the .urdf file.
Parameters
---------
mesh : Trimesh object
mesh : trimesh.Trimesh
Input geometry
directory : str
The directory path for the URDF package
The directory path for the URDF package
Returns
---------
mesh : Trimesh object
Multi-body mesh containing convex decomposition
mesh : Trimesh
Multi-body mesh containing convex decomposition
"""

import lxml.etree as et
# TODO: fix circular import
from .export import export_mesh
from ..resources import get

# Extract the save directory and the file name
fullpath = os.path.abspath(directory)
name = os.path.basename(fullpath)
Expand Down Expand Up @@ -158,6 +161,13 @@ def export_urdf(mesh,
description.text = name

tree = et.ElementTree(root)
tree.write(os.path.join(fullpath, 'model.config'))

if tol.strict:
schema = et.XMLSchema(file=get(
'schema/urdf.xsd', as_stream=True))
if not schema.validate(tree):
# actual error isn't raised by validate
raise ValueError(schema.error_log)

tree.write(os.path.join(fullpath, 'model.config'))
return np.sum(convex_pieces)
15 changes: 11 additions & 4 deletions trimesh/path/exchange/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ def load_path(file_obj, file_type=None, **kwargs):
path : Path, Path2D, Path3D file_object
Data as a native trimesh Path file_object
"""
# avoid a circular import
from ...exchange.load import load_kwargs
# record how long we took
tic = util.now()

if isinstance(file_obj, Path):
# we have been passed a Path file_object so
Expand All @@ -46,7 +50,8 @@ def load_path(file_obj, file_type=None, **kwargs):
# get the file type from the extension
file_type = os.path.splitext(file_obj)[-1][1:].lower()
# call the loader
kwargs.update(path_loaders[file_type](f, file_type=file_type))
kwargs.update(path_loaders[file_type](
f, file_type=file_type))
elif util.is_instance_named(file_obj, ['Polygon', 'MultiPolygon']):
# convert from shapely polygons to Path2D
kwargs.update(misc.polygon_to_path(file_obj))
Expand All @@ -55,16 +60,18 @@ def load_path(file_obj, file_type=None, **kwargs):
kwargs.update(misc.linestrings_to_path(file_obj))
elif isinstance(file_obj, dict):
# load as kwargs
from ...exchange.load import load_kwargs
return load_kwargs(file_obj)
elif util.is_sequence(file_obj):
# load as lines in space
kwargs.update(misc.lines_to_path(file_obj))
else:
raise ValueError('Not a supported object type!')

from ...exchange.load import load_kwargs
return load_kwargs(kwargs)
result = load_kwargs(kwargs)
util.log.debug('loaded {} in {:0.4f}s'.format(
str(result), util.now() - tic))

return result


def path_formats():
Expand Down
21 changes: 15 additions & 6 deletions trimesh/resources/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import json

from ..util import decode_text
from ..util import decode_text, wrap_as_stream

# find the current absolute path to this directory
_pwd = os.path.expanduser(os.path.abspath(
Expand All @@ -11,7 +11,7 @@
_cache = {}


def get(name, decode=True, decode_json=False):
def get(name, decode=True, decode_json=False, as_stream=False):
"""
Get a resource from the `trimesh/resources` folder.
Expand All @@ -23,17 +23,24 @@ def get(name, decode=True, decode_json=False):
Whether or not to decode result as UTF-8
decode_json : bool
Run `json.loads` on resource if True.
as_stream : bool
Return as a file-like object
Returns
-------------
resource : str, bytes, or decoded JSON
File data
"""
# key by name and decode
cache_key = (name, bool(decode), bool(decode_json))
if cache_key in _cache:
# return cached resource
return _cache[cache_key]
cache_key = (name,
bool(decode),
bool(decode_json),
bool(as_stream))
cached = _cache.get(cache_key)
if hasattr(cached, 'seek'):
cached.seek(0)
if cached is not None:
return cached

# get the resource using relative names
with open(os.path.join(_pwd, name), 'rb') as f:
Expand All @@ -46,6 +53,8 @@ def get(name, decode=True, decode_json=False):

if decode_json:
resource = json.loads(resource)
elif as_stream:
resource = wrap_as_stream(resource)

# store for later access
_cache[cache_key] = resource
Expand Down
4 changes: 3 additions & 1 deletion trimesh/resources/schema/README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# trimesh/resources/schemas

Contains [JSON schema](https://json-schema.org/) for exports. The goal is if there is a JSON export format (like the header of a GLTF file) or a `to_dict` method to have a well-defined schema we can validate in unit tests.
Contain schemas for formats when available. They are currently mostly [JSON schema](https://json-schema.org/) although if formats have an XSD, DTD, or other schema format we are happy to include it here.

The `primitive` schema directory is a [JSON schema](https://json-schema.org/) for `trimesh` exports. The goal is if we implement a `to_dict` method to have a well-defined schema we can validate in unit tests.
Loading

0 comments on commit f689147

Please sign in to comment.