From 8fbdc06e352e2bb4e234a88f1094f2680c69f27c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:22:17 -0700 Subject: [PATCH 01/61] Initial version of asset conversion utils --- omnigibson/utils/asset_conversion_utils.py | 1539 ++++++++++++++++++++ 1 file changed, 1539 insertions(+) create mode 100644 omnigibson/utils/asset_conversion_utils.py diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py new file mode 100644 index 000000000..561c63fd5 --- /dev/null +++ b/omnigibson/utils/asset_conversion_utils.py @@ -0,0 +1,1539 @@ +import json +import os +import shutil +import xml.etree.ElementTree as ET +from collections import OrderedDict +from copy import deepcopy +from datetime import datetime +from os.path import exists +from pathlib import Path + +import numpy as np +from scipy.spatial.transform import Rotation as R +import omnigibson as og +import omnigibson.lazy as lazy +import trimesh +from omnigibson.objects import DatasetObject +from omnigibson.scenes import Scene +from omnigibson.utils.usd_utils import create_primitive_mesh + +LIGHT_MAPPING = { + 0: "Rect", + 2: "Sphere", + 4: "Disk", +} + +OBJECT_STATE_TEXTURES = { + "burnt", + "cooked", + "frozen", + "soaked", + "toggledon", +} + +MTL_MAP_TYPE_MAPPINGS = { + "map_kd": "albedo", + "map_bump": "normal", + "map_pr": "roughness", + "map_pm": "metalness", + "map_tf": "opacity", + "map_ke": "emission", + "map_ks": "ao", + "map_": "metalness", +} + +SPLIT_COLLISION_MESHES = False + +META_LINK_RENAME_MAPPING = { + "fillable": "container", + "fluidsink": "particlesink", + "fluidsource": "particlesource", +} + +ALLOWED_META_TYPES = { + "particlesource": "dimensionless", + "togglebutton": "primitive", + "attachment": "dimensionless", + "heatsource": "dimensionless", + "particleapplier": "primitive", + "particleremover": "primitive", + "particlesink": "primitive", + "slicer": "primitive", + "container": "primitive", + "collision": "convexmesh", + "lights": "light", +} + + +class NumpyEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, np.ndarray): + return o.tolist() + return json.JSONEncoder.default(self, o) + + +def string_to_array(string): + """ + Converts a array string in mujoco xml to np.array. + Examples: + "0 1 2" => [0, 1, 2] + Args: + string (str): String to convert to an array + Returns: + np.array: Numerical array equivalent of @string + """ + return np.array([float(x) for x in string.split(" ")]) + + +def array_to_string(array): + """ + Converts a numeric array into the string format in mujoco. + Examples: + [0, 1, 2] => "0 1 2" + Args: + array (n-array): Array to convert to a string + Returns: + str: String equivalent of @array + """ + return " ".join(["{}".format(x) for x in array]) + + +def split_obj_file(obj_fpath): + """ + Splits obj file at @obj_fpath into individual obj files + """ + # Open file in trimesh + obj = trimesh.load(obj_fpath, file_type="obj", force="mesh") + + # Split to grab all individual bodies + obj_bodies = obj.split(only_watertight=False) + + # Procedurally create new files in the same folder as obj_fpath + out_fpath = os.path.dirname(obj_fpath) + out_fname_root = os.path.splitext(os.path.basename(obj_fpath))[0] + + for i, obj_body in enumerate(obj_bodies): + # Write to a new file + obj_body.export(f"{out_fpath}/{out_fname_root}_{i}.obj", "obj") + + # We return the number of splits we had + return len(obj_bodies) + + +def split_objs_in_urdf(urdf_fpath, name_suffix="split", mesh_fpath_offset="."): + """ + Splits the obj reference in its urdf + """ + tree = ET.parse(urdf_fpath) + root = tree.getroot() + urdf_dir = os.path.dirname(urdf_fpath) + out_fname_root = os.path.splitext(os.path.basename(urdf_fpath))[0] + + def recursively_find_collision_meshes(ele): + # Finds all collision meshes starting at @ele + cols = [] + for child in ele: + if child.tag == "collision": + # If the nested geom type is a mesh, add this to our running list along with its parent node + if child.find("./geometry/mesh") is not None: + cols.append((child, ele)) + elif child.tag == "visual": + # There will be no collision mesh internally here so we simply pass + continue + else: + # Recurisvely look through all children of the child + cols += recursively_find_collision_meshes(ele=child) + + return cols + + # Iterate over the tree and find all collision entries + col_elements = recursively_find_collision_meshes(ele=root) + + # For each collision element and its parent, we remove the original one and create a set of new ones with their + # filename references changed + for col, parent in col_elements: + # Don't change the original + col_copy = deepcopy(col) + # Delete the original + parent.remove(col) + # Create new objs first so we know how many we need to create in the URDF + obj_fpath = col_copy.find("./geometry/mesh").attrib["filename"] + n_new_objs = split_obj_file(obj_fpath=f"{urdf_dir}/{mesh_fpath_offset}/{obj_fpath}") + # Create the new objs in the URDF + for i in range(n_new_objs): + # Copy collision again + col_copy_copy = deepcopy(col_copy) + # Modify the filename + fname = col_copy_copy.find("./geometry/mesh").attrib["filename"] + fname = fname.split(".obj")[0] + f"_{i}.obj" + col_copy_copy.find("./geometry/mesh").attrib["filename"] = fname + # Add to parent + parent.append(col_copy_copy) + + # Finally, write this to a new file + urdf_out_path = f"{urdf_dir}/{out_fname_root}_{name_suffix}.urdf" + tree.write(urdf_out_path) + + # Return the urdf it wrote to + return urdf_out_path + + +def set_mtl_albedo(mtl_prim, texture): + mtl = "diffuse_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_normal(mtl_prim, texture): + mtl = "normalmap_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_ao(mtl_prim, texture): + mtl = "ao_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_roughness(mtl_prim, texture): + mtl = "reflectionroughness_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + lazy.omni.usd.create_material_input( + mtl_prim, + "reflection_roughness_texture_influence", + 1.0, + lazy.pxr.Sdf.ValueTypeNames.Float, + ) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_metalness(mtl_prim, texture): + mtl = "metallic_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + lazy.omni.usd.create_material_input(mtl_prim, "metallic_texture_influence", 1.0, lazy.pxr.Sdf.ValueTypeNames.Float) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_opacity(mtl_prim, texture): + return + mtl = "opacity_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + lazy.omni.usd.create_material_input(mtl_prim, "enable_opacity", True, lazy.pxr.Sdf.ValueTypeNames.Bool) + lazy.omni.usd.create_material_input(mtl_prim, "enable_opacity_texture", True, lazy.pxr.Sdf.ValueTypeNames.Bool) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +def set_mtl_emission(mtl_prim, texture): + mtl = "emissive_color_texture" + lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) + lazy.omni.usd.create_material_input(mtl_prim, "enable_emission", True, lazy.pxr.Sdf.ValueTypeNames.Bool) + # Verify it was set + shade = lazy.omni.usd.get_shader_from_material(mtl_prim) + print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + + +RENDERING_CHANNEL_MAPPINGS = { + "diffuse": set_mtl_albedo, + "albedo": set_mtl_albedo, + "normal": set_mtl_normal, + "ao": set_mtl_ao, + "roughness": set_mtl_roughness, + "metalness": set_mtl_metalness, + "opacity": set_mtl_opacity, + "emission": set_mtl_emission, +} + + +def rename_prim(prim, name): + path_from = prim.GetPrimPath().pathString + path_to = f"{'/'.join(path_from.split('/')[:-1])}/{name}" + lazy.omni.kit.commands.execute("MovePrim", path_from=path_from, path_to=path_to) + return lazy.omni.isaac.core.utils.prims.get_prim_at_path(path_to) + + +def get_visual_objs_from_urdf(urdf_path): + # Will return a dictionary mapping link name (e.g.: base_link) to dictionary of owned visual meshes mapping mesh + # name to visual obj file for that mesh + visual_objs = OrderedDict() + # Parse URDF + tree = ET.parse(urdf_path) + root = tree.getroot() + for ele in root: + if ele.tag == "link": + name = ele.get("name").replace("-", "_") + visual_objs[name] = OrderedDict() + for sub_ele in ele: + if sub_ele.tag == "visual": + visual_mesh_name = sub_ele.get("name", "visuals").replace("-", "_") + obj_file = None if sub_ele.find(".//mesh") is None else sub_ele.find(".//mesh").get("filename") + if obj_file is None: + print(f"Warning: No obj file found associated with {name}/{visual_mesh_name}!") + visual_objs[name][visual_mesh_name] = obj_file + + return visual_objs + + +def copy_object_state_textures(obj_category, obj_model, dataset_root): + obj_root_dir = f"{dataset_root}/objects/{obj_category}/{obj_model}" + old_mat_fpath = f"{obj_root_dir}/material" + new_mat_fpath = f"{obj_root_dir}/usd/materials" + for mat_file in os.listdir(old_mat_fpath): + should_copy = False + for object_state in OBJECT_STATE_TEXTURES: + if object_state in mat_file.lower(): + should_copy = True + break + if should_copy: + shutil.copy(f"{old_mat_fpath}/{mat_file}", new_mat_fpath) + + +def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path, usd_path, dataset_root): + usd_dir = os.path.dirname(usd_path) + # # mat_dir = f"{model_root_path}/material/{obj_category}" if \ + # # obj_category in {"ceilings", "walls", "floors"} else f"{model_root_path}/material" + # mat_dir = f"{model_root_path}/material" + # # Compile all material files we have + # mat_files = set(os.listdir(mat_dir)) + + # Remove the material prims as we will create them explictly later. + # TODO: Be a bit more smart about this. a material procedurally generated will lose its material without it having + # be regenerated + stage = lazy.omni.usd.get_context().get_stage() + for prim in obj_prim.GetChildren(): + looks_prim = None + if prim.GetName() == "Looks": + looks_prim = prim + elif prim.GetPrimTypeInfo().GetTypeName() == "Xform": + looks_prim_path = f"{str(prim.GetPrimPath())}/Looks" + looks_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(looks_prim_path) + if not looks_prim: + continue + for subprim in looks_prim.GetChildren(): + if subprim.GetPrimTypeInfo().GetTypeName() != "Material": + continue + print( + f"Removed material prim {subprim.GetPath()}:", + stage.RemovePrim(subprim.GetPath()), + ) + + # # Create new default material for this object. + # mtl_created_list = [] + # lazy.omni.kit.commands.execute( + # "CreateAndBindMdlMaterialFromLibrary", + # mdl_name="OmniPBR.mdl", + # mtl_name="OmniPBR", + # mtl_created_list=mtl_created_list, + # ) + # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) + # default_mat = rename_prim(prim=default_mat, name=f"default_material") + # print("Created default material:", default_mat.GetPath()) + # + # # We may delete this default material if it's never used + # default_mat_is_used = False + + # Grab all visual objs for this object + urdf_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/{obj_model}_with_metalinks.urdf" + visual_objs = get_visual_objs_from_urdf(urdf_path) + + # Extract absolute paths to mtl files for each link + link_mtl_files = OrderedDict() # maps link name to dictionary mapping mesh name to mtl file + mtl_infos = OrderedDict() # maps mtl name to dictionary mapping material channel name to png file + mat_files = OrderedDict() # maps mtl name to corresponding list of material filenames + mtl_old_dirs = OrderedDict() # maps mtl name to corresponding directory where the mtl file exists + mat_old_paths = OrderedDict() # maps mtl name to corresponding list of relative mat paths from mtl directory + for link_name, link_meshes in visual_objs.items(): + link_mtl_files[link_name] = OrderedDict() + for mesh_name, obj_file in link_meshes.items(): + # Get absolute path and open the obj file if it exists: + if obj_file is not None: + obj_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/{obj_file}" + with open(obj_path, "r") as f: + mtls = [] + for line in f.readlines(): + if "mtllib" in line and line[0] != "#": + mtls.append(line.split("mtllib ")[-1].split("\n")[0]) + + if mtls: + assert len(mtls) == 1, f"Only one mtl is supported per obj file in omniverse -- found {len(mtls)}!" + mtl = mtls[0] + # TODO: Make name unique + mtl_name = ".".join(os.path.basename(mtl).split(".")[:-1]).replace("-", "_").replace(".", "_") + mtl_old_dir = f"{'/'.join(obj_path.split('/')[:-1])}" + link_mtl_files[link_name][mesh_name] = mtl_name + mtl_infos[mtl_name] = OrderedDict() + mtl_old_dirs[mtl_name] = mtl_old_dir + mat_files[mtl_name] = [] + mat_old_paths[mtl_name] = [] + # Open the mtl file + mtl_path = f"{mtl_old_dir}/{mtl}" + with open(mtl_path, "r") as f: + # Read any lines beginning with map that aren't commented out + for line in f.readlines(): + if line[:4] == "map_": + map_type, map_file = line.split(" ") + map_file = map_file.split("\n")[0] + map_filename = os.path.basename(map_file) + mat_files[mtl_name].append(map_filename) + mat_old_paths[mtl_name].append(map_file) + mtl_infos[mtl_name][MTL_MAP_TYPE_MAPPINGS[map_type.lower()]] = map_filename + + # Next, for each material information, we create a new material and port the material files to the USD directory + mat_new_fpath = os.path.join(usd_dir, "materials") + Path(mat_new_fpath).mkdir(parents=True, exist_ok=True) + shaders = OrderedDict() # maps mtl name to shader prim + for mtl_name, mtl_info in mtl_infos.items(): + for mat_old_path in mat_old_paths[mtl_name]: + shutil.copy(os.path.join(mtl_old_dirs[mtl_name], mat_old_path), mat_new_fpath) + + # Create the new material + mtl_created_list = [] + lazy.omni.kit.commands.execute( + "CreateAndBindMdlMaterialFromLibrary", + mdl_name="OmniPBR.mdl", + mtl_name="OmniPBR", + mtl_created_list=mtl_created_list, + ) + mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) + + # Apply all rendering channels for this material + for mat_type, mat_file in mtl_info.items(): + render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + if render_channel_fcn is not None: + render_channel_fcn(mat, os.path.join("materials", mat_file)) + else: + # Warn user that we didn't find the correct rendering channel + print(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") + + # Rename material + mat = rename_prim(prim=mat, name=mtl_name) + shade = lazy.pxr.UsdShade.Material(mat) + shaders[mtl_name] = shade + print(f"Created material {mtl_name}:", mtl_created_list[0]) + + # Bind each (visual) mesh to its appropriate material in the object + # We'll loop over each link, create a list of 2-tuples each consisting of (mesh_prim_path, mtl_name) to be bound + root_prim_path = obj_prim.GetPrimPath().pathString + for link_name, mesh_mtl_names in link_mtl_files.items(): + # Special case -- omni always calls the visuals "visuals" by default if there's only a single visual mesh for the + # given + if len(mesh_mtl_names) == 1: + mesh_mtl_infos = [ + ( + f"{root_prim_path}/{link_name}/visuals", + list(mesh_mtl_names.values())[0], + ) + ] + else: + mesh_mtl_infos = [] + for mesh_name, mtl_name in mesh_mtl_names.items(): + # Omni only accepts a-z, A-Z as valid start characters for prim names + # So we check if there is an invalid character, and modify it as we know Omni does + if not ord("a") <= ord(mesh_name[0]) <= ord("z") and not ord("A") <= ord(mesh_name[0]) <= ord("Z"): + mesh_name = "a_" + mesh_name[1:] + mesh_mtl_infos.append((f"{root_prim_path}/{link_name}/visuals/{mesh_name}", mtl_name)) + for mesh_prim_path, mtl_name in mesh_mtl_infos: + visual_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mesh_prim_path) + assert visual_prim, f"Error: Did not find valid visual prim at {mesh_prim_path}!" + # Bind the created link material to the visual prim + print(f"Binding material {mtl_name}, shader {shaders[mtl_name]}, to prim {mesh_prim_path}...") + lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind( + shaders[mtl_name], lazy.pxr.UsdShade.Tokens.strongerThanDescendants + ) + + # Lastly, we copy object_state texture maps that are state-conditioned; e.g.: cooked, soaked, etc. + copy_object_state_textures(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) + + # ################################### + # + # # Iterate over all children of the object prim, if ///visual exists, then we + # # know is a valid link, and we check explicitly for these material files in our set + # # Note: we assume that the link name is included as a string within the mat_file! + # for prim in obj_prim.GetChildren(): + # if prim.GetPrimTypeInfo().GetTypeName() == "Xform": + # # This could be a link, check if it owns a visual subprim + # link_name = prim.GetName() + # visual_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{prim.GetPrimPath().pathString}/visuals") + # print(f"path: {prim.GetPrimPath().pathString}/visuals") + # print(f"visual prim: {visual_prim}") + # + # if visual_prim: + # # Aggregate all material files for this prim + # link_mat_files = [] + # for mat_file in deepcopy(mat_files): + # if link_name in mat_file: + # # Add this mat file and remove it from the set + # link_mat_files.append(mat_file) + # mat_files.remove(mat_file) + # # Potentially write material files for this prim if we have any valid materials + # print("link_mat_files:", link_mat_files) + # if not link_mat_files: + # # Bind default material to the visual prim + # shade = lazy.pxr.UsdShade.Material(default_mat) + # lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind(shade, lazy.pxr.UsdShade.Tokens.strongerThanDescendants) + # default_mat_is_used = True + # else: + # # Create new material for this link + # mtl_created_list = [] + # lazy.omni.kit.commands.execute( + # "CreateAndBindMdlMaterialFromLibrary", + # mdl_name="OmniPBR.mdl", + # mtl_name="OmniPBR", + # mtl_created_list=mtl_created_list, + # ) + # print(f"Created material for link {link_name}:", mtl_created_list[0]) + # mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) + # + # shade = lazy.pxr.UsdShade.Material(mat) + # # Bind the created link material to the visual prim + # lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind(shade, lazy.pxr.UsdShade.Tokens.strongerThanDescendants) + # + # # Iterate over all material channels and write them to the material + # for link_mat_file in link_mat_files: + # # Copy this file into the materials folder + # mat_fpath = os.path.join(usd_dir, "materials") + # shutil.copy(os.path.join(mat_dir, link_mat_file), mat_fpath) + # # Check if any valid rendering channel + # mat_type = link_mat_file.split("_")[-1].split(".")[0].lower() + # # Apply the material if it exists + # render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + # if render_channel_fcn is not None: + # render_channel_fcn(mat, os.path.join("materials", link_mat_file)) + # else: + # # Warn user that we didn't find the correct rendering channel + # print(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") + # + # # Rename material + # mat = rename_prim(prim=mat, name=f"material_{link_name}") + # + # # For any remaining materials, we write them to the default material + # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_material_0") + # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_default") + # print(f"default mat: {default_mat}, obj: {obj_category}, {prim.GetPrimPath().pathString}") + # for mat_file in mat_files: + # # Copy this file into the materials folder + # mat_fpath = os.path.join(usd_dir, "materials") + # shutil.copy(os.path.join(mat_dir, mat_file), mat_fpath) + # # Check if any valid rendering channel + # mat_type = mat_file.split("_")[-1].split(".")[0].lower() + # # Apply the material if it exists + # render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + # if render_channel_fcn is not None: + # render_channel_fcn(default_mat, os.path.join("materials", mat_file)) + # default_mat_is_used = True + # else: + # # Warn user that we didn't find the correct rendering channel + # print(f"Warning: could not find rendering channel function for material: {mat_type}") + # + # # Possibly delete the default material prim if it was never used + # if not default_mat_is_used: + # stage.RemovePrim(default_mat.GetPrimPath()) + + +def add_xform_properties(prim): + properties_to_remove = [ + "xformOp:rotateX", + "xformOp:rotateXZY", + "xformOp:rotateY", + "xformOp:rotateYXZ", + "xformOp:rotateYZX", + "xformOp:rotateZ", + "xformOp:rotateZYX", + "xformOp:rotateZXY", + "xformOp:rotateXYZ", + "xformOp:transform", + ] + prop_names = prim.GetPropertyNames() + xformable = lazy.pxr.UsdGeom.Xformable(prim) + xformable.ClearXformOpOrder() + # TODO: wont be able to delete props for non root links on articulated objects + for prop_name in prop_names: + if prop_name in properties_to_remove: + prim.RemoveProperty(prop_name) + if "xformOp:scale" not in prop_names: + xform_op_scale = xformable.AddXformOp( + lazy.pxr.UsdGeom.XformOp.TypeScale, + lazy.pxr.UsdGeom.XformOp.PrecisionDouble, + "", + ) + xform_op_scale.Set(lazy.pxr.Gf.Vec3d([1.0, 1.0, 1.0])) + else: + xform_op_scale = lazy.pxr.UsdGeom.XformOp(prim.GetAttribute("xformOp:scale")) + + if "xformOp:translate" not in prop_names: + xform_op_translate = xformable.AddXformOp( + lazy.pxr.UsdGeom.XformOp.TypeTranslate, + lazy.pxr.UsdGeom.XformOp.PrecisionDouble, + "", + ) + else: + xform_op_translate = lazy.pxr.UsdGeom.XformOp(prim.GetAttribute("xformOp:translate")) + + if "xformOp:orient" not in prop_names: + xform_op_rot = xformable.AddXformOp( + lazy.pxr.UsdGeom.XformOp.TypeOrient, + lazy.pxr.UsdGeom.XformOp.PrecisionDouble, + "", + ) + else: + xform_op_rot = lazy.pxr.UsdGeom.XformOp(prim.GetAttribute("xformOp:orient")) + xformable.SetXformOpOrder([xform_op_translate, xform_op_rot, xform_op_scale]) + + +def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): + """ + Process a meta link by creating visual meshes or lights below it + """ + # TODO: Reenable after fillable meshes are backported into 3ds Max. + # Temporarily disable importing of fillable meshes. + if meta_link_type in ["container"]: + return + + assert meta_link_type in ALLOWED_META_TYPES + if ALLOWED_META_TYPES[meta_link_type] not in ["primitive", "light"] and meta_link_type != "particlesource": + return + + is_light = ALLOWED_META_TYPES[meta_link_type] == "light" + + for link_id, mesh_info_list in meta_link_infos.items(): + if len(mesh_info_list) == 0: + continue + + # TODO: Remove this after this is fixed. + if type(mesh_info_list) == dict: + keys = [str(x) for x in range(len(mesh_info_list))] + assert set(mesh_info_list.keys()) == set(keys), "Unexpected keys" + mesh_info_list = [mesh_info_list[k] for k in keys] + + if meta_link_type in [ + "togglebutton", + "particleapplier", + "particleremover", + "particlesink", + "particlesource", + ]: + assert len(mesh_info_list) == 1, f"Invalid number of meshes for {meta_link_type}" + + meta_link_in_parent_link_pos, meta_link_in_parent_link_orn = ( + mesh_info_list[0]["position"], + mesh_info_list[0]["orientation"], + ) + + # For particle applier only, the orientation of the meta link matters (particle should shoot towards the negative z-axis) + # If the meta link is created based on the orientation of the first mesh that is a cone, we need to rotate it by 180 degrees + # because the cone is pointing in the wrong direction. This is already done in update_obj_urdf_with_metalinks; + # we just need to make sure meta_link_in_parent_link_orn is updated correctly. + if meta_link_type == "particleapplier" and mesh_info_list[0]["type"] == "cone": + meta_link_in_parent_link_orn = ( + R.from_quat(meta_link_in_parent_link_orn) * R.from_rotvec([np.pi, 0.0, 0.0]) + ).as_quat() + + for i, mesh_info in enumerate(mesh_info_list): + is_mesh = False + if is_light: + # Create a light + light_type = LIGHT_MAPPING[mesh_info["type"]] + prim_path = f"/{obj_model}/lights_{link_id}_0_link/light_{i}" + prim = getattr(lazy.pxr.UsdLux, f"{light_type}Light").Define(stage, prim_path).GetPrim() + lazy.pxr.UsdLux.ShapingAPI.Apply(prim).GetShapingConeAngleAttr().Set(180.0) + else: + if meta_link_type == "particlesource": + mesh_type = "Cylinder" + else: + # Create a primitive shape + mesh_type = mesh_info["type"].capitalize() if mesh_info["type"] != "box" else "Cube" + prim_path = f"/{obj_model}/{meta_link_type}_{link_id}_0_link/mesh_{i}" + assert hasattr(lazy.pxr.UsdGeom, mesh_type) + # togglebutton has to be a sphere + if meta_link_type in ["togglebutton"]: + is_mesh = True + # particle applier has to be a cone or cylinder because of the visualization of the particle flow + elif meta_link_type in ["particleapplier"]: + assert mesh_type in [ + "Cone", + "Cylinder", + ], f"Invalid mesh type for particleapplier: {mesh_type}" + prim = ( + create_primitive_mesh(prim_path, mesh_type, stage=stage).GetPrim() + if is_mesh + else getattr(lazy.pxr.UsdGeom, mesh_type).Define(stage, prim_path).GetPrim() + ) + + add_xform_properties(prim=prim) + # Make sure mesh_prim has XForm properties + xform_prim = lazy.omni.isaac.core.prims.xform_prim.XFormPrim(prim_path=prim_path) + + # Get the mesh/light pose in the parent link frame + mesh_in_parent_link_pos, mesh_in_parent_link_orn = np.array(mesh_info["position"]), np.array( + mesh_info["orientation"] + ) + + # Get the mesh/light pose in the meta link frame + mesh_in_parent_link_tf = np.eye(4) + mesh_in_parent_link_tf[:3, :3] = R.from_quat(mesh_in_parent_link_orn).as_matrix() + mesh_in_parent_link_tf[:3, 3] = mesh_in_parent_link_pos + meta_link_in_parent_link_tf = np.eye(4) + meta_link_in_parent_link_tf[:3, :3] = R.from_quat(meta_link_in_parent_link_orn).as_matrix() + meta_link_in_parent_link_tf[:3, 3] = meta_link_in_parent_link_pos + mesh_in_meta_link_tf = np.linalg.inv(meta_link_in_parent_link_tf) @ mesh_in_parent_link_tf + mesh_in_meta_link_pos, mesh_in_meta_link_orn = ( + mesh_in_meta_link_tf[:3, 3], + R.from_matrix(mesh_in_meta_link_tf[:3, :3]).as_quat(), + ) + + if is_light: + xform_prim.prim.GetAttribute("inputs:color").Set( + lazy.pxr.Gf.Vec3f(*np.array(mesh_info["color"]) / 255.0) + ) + xform_prim.prim.GetAttribute("inputs:intensity").Set(mesh_info["intensity"]) + if light_type == "Rect": + xform_prim.prim.GetAttribute("inputs:width").Set(mesh_info["length"]) + xform_prim.prim.GetAttribute("inputs:height").Set(mesh_info["width"]) + elif light_type == "Disk": + xform_prim.prim.GetAttribute("inputs:radius").Set(mesh_info["length"]) + elif light_type == "Sphere": + xform_prim.prim.GetAttribute("inputs:radius").Set(mesh_info["length"]) + else: + raise ValueError(f"Invalid light type: {light_type}") + else: + if mesh_type == "Cylinder": + if not is_mesh: + xform_prim.prim.GetAttribute("radius").Set(0.5) + xform_prim.prim.GetAttribute("height").Set(1.0) + if meta_link_type == "particlesource": + desired_radius = 0.0125 + desired_height = 0.05 + height_offset = -desired_height / 2.0 + else: + desired_radius = mesh_info["size"][0] + desired_height = mesh_info["size"][2] + height_offset = desired_height / 2.0 + xform_prim.prim.GetAttribute("xformOp:scale").Set( + lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) + ) + # Offset the position by half the height because in 3dsmax the origin of the cylinder is at the center of the base + mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( + np.array([0.0, 0.0, height_offset]) + ) + elif mesh_type == "Cone": + if not is_mesh: + xform_prim.prim.GetAttribute("radius").Set(0.5) + xform_prim.prim.GetAttribute("height").Set(1.0) + desired_radius = mesh_info["size"][0] + desired_height = mesh_info["size"][2] + height_offset = -desired_height / 2.0 + xform_prim.prim.GetAttribute("xformOp:scale").Set( + lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) + ) + # Flip the orientation of the z-axis because in 3dsmax the cone is pointing in the opposite direction + mesh_in_meta_link_orn = ( + R.from_quat(mesh_in_meta_link_orn) * R.from_rotvec([np.pi, 0.0, 0.0]) + ).as_quat() + # Offset the position by half the height because in 3dsmax the origin of the cone is at the center of the base + mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( + np.array([0.0, 0.0, height_offset]) + ) + elif mesh_type == "Cube": + if not is_mesh: + xform_prim.prim.GetAttribute("size").Set(1.0) + xform_prim.prim.GetAttribute("xformOp:scale").Set(lazy.pxr.Gf.Vec3f(*mesh_info["size"])) + height_offset = mesh_info["size"][2] / 2.0 + mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( + np.array([0.0, 0.0, height_offset]) + ) + elif mesh_type == "Sphere": + if not is_mesh: + xform_prim.prim.GetAttribute("radius").Set(0.5) + desired_radius = mesh_info["size"][0] + xform_prim.prim.GetAttribute("xformOp:scale").Set( + lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_radius * 2) + ) + else: + raise ValueError(f"Invalid mesh type: {mesh_type}") + + # Make invisible + lazy.pxr.UsdGeom.Imageable(xform_prim.prim).MakeInvisible() + + xform_prim.set_local_pose( + translation=mesh_in_meta_link_pos, + orientation=mesh_in_meta_link_orn[[3, 0, 1, 2]], + ) + + +def process_glass_link(prim): + # Update any glass parts to use the glass material instead + glass_prim_paths = [] + for gchild in prim.GetChildren(): + if gchild.GetTypeName() == "Mesh": + # check if has col api, if not, this is visual + if not gchild.HasAPI(lazy.pxr.UsdPhysics.CollisionAPI): + glass_prim_paths.append(gchild.GetPath().pathString) + elif gchild.GetTypeName() == "Scope": + # contains multiple additional prims, check those + for ggchild in gchild.GetChildren(): + if ggchild.GetTypeName() == "Mesh": + # check if has col api, if not, this is visual + if not ggchild.HasAPI(lazy.pxr.UsdPhysics.CollisionAPI): + glass_prim_paths.append(ggchild.GetPath().pathString) + + assert glass_prim_paths + + stage = lazy.omni.isaac.core.utils.stage.get_current_stage() + root_path = stage.GetDefaultPrim().GetPath().pathString + glass_mtl_prim_path = f"{root_path}/Looks/OmniGlass" + if not lazy.omni.isaac.core.utils.prims.get_prim_at_path(glass_mtl_prim_path): + mtl_created = [] + lazy.omni.kit.commands.execute( + "CreateAndBindMdlMaterialFromLibrary", + mdl_name="OmniGlass.mdl", + mtl_name="OmniGlass", + mtl_created_list=mtl_created, + ) + + for glass_prim_path in glass_prim_paths: + lazy.omni.kit.commands.execute( + "BindMaterialCommand", + prim_path=glass_prim_path, + material_path=glass_mtl_prim_path, + strength=None, + ) + + +# TODO: Handle metalinks +# TODO: Import heights per link folder into USD folder +def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_channels=False): + # Check if filepath exists + model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" + usd_path = f"{model_root_path}/usd/{obj_model}.usd" + print("Loading", usd_path, "for metadata import.") + + print("Start metadata import") + + # Load model + lazy.omni.isaac.core.utils.stage.open_stage(usd_path) + stage = lazy.omni.isaac.core.utils.stage.get_current_stage() + prim = stage.GetDefaultPrim() + + data = dict() + for data_group in {"metadata", "mvbb_meta", "material_groups", "heights_per_link"}: + data_path = f"{model_root_path}/misc/{data_group}.json" + if exists(data_path): + # Load data + with open(data_path, "r") as f: + data[data_group] = json.load(f) + + # If certain metadata doesn't exist, populate with some core info + if "base_link_offset" not in data["metadata"]: + data["metadata"]["base_link_offset"] = [0, 0, 0] + if "bbox_size" not in data["metadata"]: + raise ValueError("We cannot work without a bbox size.") + + # Pop bb and base link offset and meta links info + base_link_offset = data["metadata"].pop("base_link_offset") + default_bb = data["metadata"].pop("bbox_size") + + # Manually modify material groups info + if "material_groups" in data: + data["material_groups"] = { + "groups": data["material_groups"][0], + "links": data["material_groups"][1], + } + + # Manually modify metadata + if "openable_joint_ids" in data["metadata"]: + data["metadata"]["openable_joint_ids"] = { + str(pair[0]): pair[1] for pair in data["metadata"]["openable_joint_ids"] + } + + # Grab light info if any + meta_links = data["metadata"].get("meta_links", dict()) + + print("Process meta links") + + # TODO: Use parent link name + for link_name, link_metadata in meta_links.items(): + for meta_link_type, meta_link_infos in link_metadata.items(): + process_meta_link(stage, obj_model, meta_link_type, meta_link_infos) + + # Apply temporary fillable meshes. + # TODo: Disable after fillable meshes are backported into 3ds Max. + fillable_path = f"{model_root_path}/fillable.obj" + if exists(fillable_path): + mesh = trimesh.load(fillable_path, force="mesh") + import_fillable_mesh(stage, obj_model, mesh) + + print("Done processing meta links") + + # # Update metalink info + # if "meta_links" in data["metadata"]: + # meta_links = data["metadata"].pop("meta_links") + # print("meta_links:", meta_links) + # # TODO: Use parent link name + # for parent_link_name, child_link_attrs in meta_links.items(): + # for meta_link_name, ml_attrs in child_link_attrs.items(): + # for ml_id, attrs_list in ml_attrs.items(): + # for i, attrs in enumerate(attrs_list): + # # # Create new Xform prim that will contain info + # ml_prim_path = ( + # f"{prim.GetPath()}/{meta_link_name}_{ml_id}_{i}_link" + # ) + # link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(ml_prim_path) + # assert ( + # link_prim + # ), f"Should have found valid metalink prim at prim path: {ml_prim_path}" + # + # link_prim.CreateAttribute("ig:is_metalink", lazy.pxr.Sdf.ValueTypeNames.Bool) + # link_prim.GetAttribute("ig:is_metalink").Set(True) + # + # # TODO! Validate that this works + # # test on water sink 02: water sink location is 0.1, 0.048, 0.32 + # # water source location is -0.03724, 0.008, 0.43223 + # add_xform_properties(prim=link_prim) + # link_prim.GetAttribute("xformOp:translate").Set(lazy.pxr.Gf.Vec3f(*atrr["xyz"])) + # if atrr["rpy"] is not None: + # link_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatf(*(R.from_euler(atrr["rpy"], "xyz").as_quat()[[3, 0, 1, 2]]))) + # + # link_prim.CreateAttribute("ig:orientation", lazy.pxr.Sdf.ValueTypeNames.Quatf) + # link_prim.GetAttribute("ig:orientation").Set(lazy.pxr.Gf.Quatf(*atrr["rpy"])) + + # Iterate over dict and replace any lists of dicts as dicts of dicts (with each dict being indexed by an integer) + data = recursively_replace_list_of_dict(data) + + print("Done recursively replacing") + + # Create attributes for bb, offset, category, model and store values + prim.CreateAttribute("ig:nativeBB", lazy.pxr.Sdf.ValueTypeNames.Vector3f) + prim.CreateAttribute("ig:offsetBaseLink", lazy.pxr.Sdf.ValueTypeNames.Vector3f) + prim.CreateAttribute("ig:category", lazy.pxr.Sdf.ValueTypeNames.String) + prim.CreateAttribute("ig:model", lazy.pxr.Sdf.ValueTypeNames.String) + prim.GetAttribute("ig:nativeBB").Set(lazy.pxr.Gf.Vec3f(*default_bb)) + prim.GetAttribute("ig:offsetBaseLink").Set(lazy.pxr.Gf.Vec3f(*base_link_offset)) + prim.GetAttribute("ig:category").Set(obj_category) + prim.GetAttribute("ig:model").Set(obj_model) + + print(f"data: {data}") + + # Store remaining data as metadata + prim.SetCustomData(data) + # for k, v in data.items(): + # print(f"setting custom data {k}") + # print(v) + # print() + # prim.SetCustomDataByKey(k, v) + # # if k == "metadata": + # # print(v) + # # print() + # # prim.SetCustomDataByKey(k, v["link_bounding_boxes"]["base_link"]["collision"]["axis_aligned"]["transform"]) + # # input("succeeded!") + # # else: + # # prim.SetCustomDataByKey(k, v) + + # Add material channels + # print(f"prim children: {prim.GetChildren()}") + # looks_prim_path = f"{str(prim.GetPrimPath())}/Looks" + # looks_prim = prim.GetChildren()[0] #lazy.omni.isaac.core.utils.prims.get_prim_at_path(looks_prim_path) + # mat_prim_path = f"{str(prim.GetPrimPath())}/Looks/material_material_0" + # mat_prim = looks_prim.GetChildren()[0] #lazy.omni.isaac.core.utils.prims.get_prim_at_path(mat_prim_path) + # print(f"looks children: {looks_prim.GetChildren()}") + # print(f"mat prim: {mat_prim}") + print("irc") + if import_render_channels: + import_rendering_channels( + obj_prim=prim, + obj_category=obj_category, + obj_model=obj_model, + model_root_path=model_root_path, + usd_path=usd_path, + dataset_root=dataset_root, + ) + print("done irc") + for link, link_tags in data["metadata"]["link_tags"].items(): + if "glass" in link_tags: + process_glass_link(prim.GetChild(link)) + + print("done glass") + # Save stage + stage.Save() + + print("done save") + + # Delete stage reference and clear the sim stage variable, opening the dummy stage along the way + del stage + + +def recursively_replace_list_of_dict(dic): + for k, v in dic.items(): + print(f"k: {k}") + if v is None: + # Replace None + dic[k] = lazy.pxr.lazy.pxr.UsdGeom.Tokens.none + elif isinstance(v, list) or isinstance(v, tuple): + if len(v) == 0: + dic[k] = lazy.pxr.Vt.Vec3fArray() + elif isinstance(v[0], dict): + # Replace with dict in place + v = {str(i): vv for i, vv in enumerate(v)} + dic[k] = v + elif isinstance(v[0], list) or isinstance(v[0], tuple): + # # Flatten the lists + # dic[k] = [] + # for vv in v: + # dic[k] += vv + print("v0: ", v[0]) + if len(v[0]) == 1: + # Do nothing + pass + if len(v[0]) == 2: + dic[k] = lazy.pxr.Vt.Vec2fArray(v) + elif len(v[0]) == 3: + dic[k] = lazy.pxr.Vt.Vec3fArray(v) + elif len(v[0]) == 4: + dic[k] = lazy.pxr.Vt.Vec4fArray(v) + else: + raise ValueError(f"No support for storing matrices of length {len(v[0])}!") + elif isinstance(v[0], int): + # if len(v) == 1: + # # Do nothing + # pass + # elif len(v) == 2: + # dic[k] = lazy.pxr.Gf.Vec2i(v) + # elif len(v) == 3: + # dic[k] = lazy.pxr.Gf.Vec3i(v) + # elif len(v) == 4: + # dic[k] = lazy.pxr.Gf.Vec4i(v) + # else: + dic[k] = lazy.pxr.Vt.IntArray(v) + # raise ValueError(f"No support for storing numeric arrays of length {len(v)}! Array: {v}") + elif isinstance(v[0], float): + # if len(v) == 1: + # # Do nothing + # pass + # elif len(v) == 2: + # dic[k] = lazy.pxr.Gf.Vec2f(v) + # elif len(v) == 3: + # dic[k] = lazy.pxr.Gf.Vec3f(v) + # elif len(v) == 4: + # dic[k] = lazy.pxr.Gf.Vec4f(v) + # else: + dic[k] = lazy.pxr.Vt.FloatArray(v) + # raise ValueError(f"No support for storing numeric arrays of length {len(v)}! Array: {v}") + else: + # Replace any Nones + for i, ele in enumerate(v): + if ele is None: + v[i] = lazy.pxr.lazy.pxr.UsdGeom.Tokens.none + if isinstance(v, dict): + # Iterate through nested dictionaries + dic[k] = recursively_replace_list_of_dict(v) + + return dic + + +def import_fillable_mesh(stage, obj_model, mesh): + def _create_mesh(prim_path): + stage.DefinePrim(prim_path, "Mesh") + mesh = lazy.pxr.UsdGeom.Mesh.Define(stage, prim_path) + return mesh + + def _create_fixed_joint(prim_path, body0, body1): + # Create the joint + joint = lazy.pxr.UsdPhysics.FixedJoint.Define(stage, prim_path) + + # Possibly add body0, body1 targets + if body0 is not None: + assert stage.GetPrimAtPath(body0).IsValid(), f"Invalid body0 path specified: {body0}" + joint.GetBody0Rel().SetTargets([lazy.pxr.Sdf.Path(body0)]) + if body1 is not None: + assert stage.GetPrimAtPath(body1).IsValid(), f"Invalid body1 path specified: {body1}" + joint.GetBody1Rel().SetTargets([lazy.pxr.Sdf.Path(body1)]) + + # Get the prim pointed to at this path + joint_prim = stage.GetPrimAtPath(prim_path) + + # Apply joint API interface + lazy.pxr.PhysxSchema.PhysxJointAPI.Apply(joint_prim) + + # Possibly (un-/)enable this joint + joint_prim.GetAttribute("physics:jointEnabled").Set(True) + + # Return this joint + return joint_prim + + container_link_path = f"/{obj_model}/container_0_0_link" + container_link = stage.DefinePrim(container_link_path, "Xform") + + for i, submesh in enumerate(mesh.split()): + mesh_prim = _create_mesh(prim_path=f"{container_link_path}/mesh_{i}").GetPrim() + + # Write mesh data + mesh_prim.GetAttribute("points").Set(lazy.pxr.Vt.Vec3fArray.FromNumpy(submesh.vertices)) + mesh_prim.GetAttribute("normals").Set(lazy.pxr.Vt.Vec3fArray.FromNumpy(submesh.vertex_normals)) + face_indices = [] + face_vertex_counts = [] + for face_idx, face_vertices in enumerate(submesh.faces): + face_indices.extend(face_vertices) + face_vertex_counts.append(len(face_vertices)) + mesh_prim.GetAttribute("faceVertexCounts").Set(np.array(face_vertex_counts, dtype=int)) + mesh_prim.GetAttribute("faceVertexIndices").Set(np.array(face_indices, dtype=int)) + # mesh_prim.GetAttribute("primvars:st").Set(lazy.pxr.Vt.Vec2fArray.FromNumpy(np.zeros((len(submesh.vertices), 2)))) + + # Make invisible + lazy.pxr.UsdGeom.Imageable(mesh_prim).MakeInvisible() + + # Create fixed joint + obj_root_path = f"/{obj_model}/base_link" + _create_fixed_joint( + prim_path=f"{obj_root_path}/container_0_{i}_joint", + body0=f"{obj_root_path}", + body1=f"{container_link_path}", + ) + + +def create_import_config(): + # Set up import configuration + _, import_config = lazy.omni.kit.commands.execute("URDFCreateImportConfig") + drive_mode = ( + import_config.default_drive_type.__class__ + ) # Hacky way to get class for default drive type, options are JOINT_DRIVE_{NONE / POSITION / VELOCITY} + + import_config.set_merge_fixed_joints(False) + import_config.set_convex_decomp(True) + import_config.set_fix_base(False) + import_config.set_import_inertia_tensor(False) + import_config.set_distance_scale(1.0) + import_config.set_density(0.0) + import_config.set_default_drive_type(drive_mode.JOINT_DRIVE_NONE) + import_config.set_default_drive_strength(0.0) + import_config.set_default_position_drive_damping(0.0) + import_config.set_self_collision(False) + import_config.set_up_vector(0, 0, 1) + import_config.set_make_default_prim(True) + import_config.set_create_physics_scene(True) + return import_config + + +def import_obj_urdf(obj_category, obj_model, dataset_root, skip_if_exist=False): + # Preprocess input URDF to account for metalinks + urdf_path = update_obj_urdf_with_metalinks( + obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root + ) + # Import URDF + cfg = create_import_config() + # Check if filepath exists + usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd" + if not (skip_if_exist and exists(usd_path)): + if SPLIT_COLLISION_MESHES: + print(f"Converting collision meshes from {obj_category}, {obj_model}...") + urdf_path = split_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") + print(f"Importing {obj_category}, {obj_model} into path {usd_path}...") + # Only import if it doesn't exist + lazy.omni.kit.commands.execute( + "URDFParseAndImportFile", + urdf_path=urdf_path, + import_config=cfg, + dest_path=usd_path, + ) + print(f"Imported {obj_category}, {obj_model}") + + +def pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): + space = "\t" if use_tabs else " " * 4 + for i, node in enumerate(current): + pretty_print_xml(node, current, i, depth + 1) + if parent is not None: + if index == 0: + parent.text = "\n" + (space * depth) + else: + parent[index - 1].tail = "\n" + (space * depth) + if index == len(parent) - 1: + current.tail = "\n" + (space * (depth - 1)) + + +def convert_to_string(inp): + """ + Converts any type of {bool, int, float, list, tuple, array, string, np.str_} into an mujoco-xml compatible string. + Note that an input string / np.str_ results in a no-op action. + Args: + inp: Input to convert to string + Returns: + str: String equivalent of @inp + """ + if type(inp) in {list, tuple, np.ndarray}: + return array_to_string(inp) + elif type(inp) in {int, float, bool, np.float32, np.float64, np.int32, np.int64}: + return str(inp).lower() + elif type(inp) in {str, np.str_}: + return inp + else: + raise ValueError("Unsupported type received: got {}".format(type(inp))) + + +def create_joint( + name, + parent, + child, + pos=(0, 0, 0), + rpy=(0, 0, 0), + joint_type="fixed", + axis=None, + damping=None, + friction=None, + limits=None, +): + """ + Generates XML joint + Args: + name (str): Name of this joint + parent (str or ET.Element): Name of parent link or parent link element itself for this joint + child (str or ET.Element): Name of child link or child link itself for this joint + pos (list or tuple or array): (x,y,z) offset pos values when creating the collision body + rpy (list or tuple or array): (r,p,y) offset rot values when creating the joint + joint_type (str): What type of joint to create. Must be one of {fixed, revolute, prismatic} + axis (None or 3-tuple): If specified, should be (x,y,z) axis corresponding to DOF + damping (None or float): If specified, should be damping value to apply to joint + friction (None or float): If specified, should be friction value to apply to joint + limits (None or 2-tuple): If specified, should be min / max limits to the applied joint + Returns: + ET.Element: Generated joint element + """ + # Create the initial joint + jnt = ET.Element("joint", name=name, type=joint_type) + # Create origin subtag + origin = ET.SubElement( + jnt, + "origin", + attrib={"rpy": convert_to_string(rpy), "xyz": convert_to_string(pos)}, + ) + # Make sure parent and child are both names (str) -- if they're not str already, we assume it's the element ref + if not isinstance(parent, str): + parent = parent.get("name") + if not isinstance(child, str): + child = child.get("name") + # Create parent and child subtags + parent = ET.SubElement(jnt, "parent", link=parent) + child = ET.SubElement(jnt, "child", link=child) + # Add additional parameters if specified + if axis is not None: + ax = ET.SubElement(jnt, "axis", xyz=convert_to_string(axis)) + dynamic_params = {} + if damping is not None: + dynamic_params["damping"] = convert_to_string(damping) + if friction is not None: + dynamic_params["friction"] = convert_to_string(friction) + if dynamic_params: + dp = ET.SubElement(jnt, "dynamics", **dynamic_params) + if limits is not None: + lim = ET.SubElement(jnt, "limit", lower=limits[0], upper=limits[1]) + + # Return this element + return jnt + + +def create_link(name, subelements=None, mass=None, inertia=None): + """ + Generates XML link element + Args: + name (str): Name of this link + subelements (None or list): If specified, specifies all nested elements that should belong to this link + (e.g.: visual, collision body elements) + mass (None or float): If specified, will add an inertial tag with specified mass value + inertia (None or 6-array): If specified, will add an inertial tag with specified inertia value + Value should be (ixx, iyy, izz, ixy, ixz, iyz) + Returns: + ET.Element: Generated link + """ + # Create the initial link + link = ET.Element("link", name=name) + # Add all subelements if specified + if subelements is not None: + for ele in subelements: + link.append(ele) + # Add mass subelement if requested + if mass is not None or inertia is not None: + inertial = ET.SubElement(link, "inertial") + if mass is not None: + ET.SubElement(inertial, "mass", value=convert_to_string(mass)) + if inertia is not None: + axes = ["ixx", "iyy", "izz", "ixy", "ixz", "iyz"] + inertia_vals = {ax: str(i) for ax, i in zip(axes, inertia)} + ET.SubElement(inertial, "inertia", **inertia_vals) + + # Return this element + return link + + +def create_metalink( + root_element, + metalink_name, + parent_link_name="base_link", + pos=(0, 0, 0), + rpy=(0, 0, 0), +): + # Create joint + jnt = create_joint( + name=f"{metalink_name}_joint", + parent=parent_link_name, + child=f"{metalink_name}_link", + pos=pos, + rpy=rpy, + joint_type="fixed", + ) + # Create child link + link = create_link( + name=f"{metalink_name}_link", + mass=0.0001, + inertia=[0.00001, 0.00001, 0.00001, 0, 0, 0], + ) + + # Add to root element + root_element.append(jnt) + root_element.append(link) + + +def generate_urdf_from_xmltree(root_element, name, dirpath, unique_urdf=False): + """ + Generates a URDF file corresponding to @xmltree at @dirpath with name @name.urdf. + Args: + root_element (ET.Element): Element tree that compose the URDF + name (str): Name of this file (name assigned to robot tag) + dirpath (str): Absolute path to the location / filename for the generated URDF + unique_urdf (bool): Whether to use a unique identifier when naming urdf (uses current datetime) + Returns: + str: Path to newly created urdf (fpath/.urdf) + """ + # Write to fpath, making sure the directory exists (if not, create it) + Path(dirpath).mkdir(parents=True, exist_ok=True) + # Get file + date = datetime.now().isoformat(timespec="microseconds").replace(".", "_").replace(":", "_").replace("-", "_") + fname = f"{name}_{date}.urdf" if unique_urdf else f"{name}.urdf" + fpath = os.path.join(dirpath, fname) + with open(fpath, "w") as f: + # Write top level header line first + f.write('\n') + # Convert xml to string form and write to file + pretty_print_xml(current=root_element) + xml_str = ET.tostring(root_element, encoding="unicode") + f.write(xml_str) + + # Return path to file + return fpath + + +def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): + # Check if filepath exists + model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" + urdf_path = f"{model_root_path}/{obj_model}.urdf" + + # Load urdf + tree = ET.parse(urdf_path) + root = tree.getroot() + + # Load metadata + metadata_fpath = f"{model_root_path}/misc/metadata.json" + with open(metadata_fpath, "r") as f: + metadata = json.load(f) + + # Pop meta links + assert not ( + "links" in metadata and "meta_links" in metadata + ), "Only expected one of links and meta_links to be found in metadata, but found both!" + + if "meta_links" in metadata: + # Rename meta links, e.g. from "fillable" to "container" + for link, meta_link in metadata["meta_links"].items(): + for meta_link_name in list(meta_link.keys()): + meta_link_attrs = meta_link[meta_link_name] + if meta_link_name in META_LINK_RENAME_MAPPING: + metadata["meta_links"][link][META_LINK_RENAME_MAPPING[meta_link_name]] = meta_link_attrs + del metadata["meta_links"][link][meta_link_name] + + with open(metadata_fpath, "w") as f: + json.dump(metadata, f) + + meta_links = metadata.pop("meta_links") + print("meta_links:", meta_links) + for parent_link_name, child_link_attrs in meta_links.items(): + for meta_link_name, ml_attrs in child_link_attrs.items(): + assert ( + meta_link_name in ALLOWED_META_TYPES + ), f"meta_link_name {meta_link_name} not in {ALLOWED_META_TYPES}" + + # TODO: Reenable after fillable meshes are backported into 3ds Max. + # Temporarily disable importing of fillable meshes. + if meta_link_name in ["container"]: + continue + + for ml_id, attrs_list in ml_attrs.items(): + if len(attrs_list) > 0: + if ALLOWED_META_TYPES[meta_link_name] != "dimensionless": + # If not dimensionless, we create one meta link for a list of meshes below it + attrs_list = [attrs_list[0]] + else: + # Otherwise, we create one meta link for each frame + # For non-attachment meta links, we expect only one instance per type + # E.g. heatsource_leftstove_0, heatsource_rightstove_0, but not heatsource_leftstove_1 + if meta_link_name != "attachment": + assert ( + len(attrs_list) == 1 + ), f"Expected only one instance for meta_link {meta_link_name}_{ml_id}, but found {len(attrs_list)}" + + # TODO: Remove this after this is fixed. + if type(attrs_list) == dict: + keys = [str(x) for x in range(len(attrs_list))] + assert set(attrs_list.keys()) == set(keys), "Unexpected keys" + attrs_list = [attrs_list[k] for k in keys] + + for i, attrs in enumerate(attrs_list): + pos = attrs["position"] + quat = attrs["orientation"] + + # For particle applier only, the orientation of the meta link matters (particle should shoot towards the negative z-axis) + # If the meta link is created based on the orientation of the first mesh that is a cone, we need to rotate it by 180 degrees + # because the cone is pointing in the wrong direction. + if meta_link_name == "particleapplier" and attrs["type"] == "cone": + assert ( + len(attrs_list) == 1 + ), f"Expected only one instance for meta_link {meta_link_name}_{ml_id}, but found {len(attrs_list)}" + quat = (R.from_quat(quat) * R.from_rotvec([np.pi, 0.0, 0.0])).as_quat() + + # Create metalink + create_metalink( + root_element=root, + metalink_name=f"{meta_link_name}_{ml_id}_{i}", + parent_link_name=parent_link_name, + pos=pos, + rpy=R.from_quat(quat).as_euler("xyz"), + ) + + # Export this URDF + return generate_urdf_from_xmltree( + root_element=root, + name=f"{obj_model}_with_metalinks", + dirpath=model_root_path, + unique_urdf=False, + ) + + +def convert_scene_urdf_to_json(urdf, json_path): + # First, load the requested objects from the URDF into OG + load_scene_from_urdf(urdf=urdf) + + # Play the simulator, then save + og.sim.play() + Path(os.path.dirname(json_path)).mkdir(parents=True, exist_ok=True) + og.sim.save(json_path=json_path) + + # Load the json, remove the init_info because we don't need it, then save it again + with open(json_path, "r") as f: + scene_info = json.load(f) + + scene_info.pop("init_info") + + with open(json_path, "w+") as f: + json.dump(scene_info, f, cls=NumpyEncoder, indent=4) + + +def load_scene_from_urdf(urdf): + # First, grab object info from the urdf + objs_info = get_objects_config_from_scene_urdf(urdf=urdf) + + # Load all the objects manually into a scene + scene = Scene(use_floor_plane=False) + og.sim.import_scene(scene) + + for obj_name, obj_info in objs_info.items(): + try: + if not os.path.exists( + DatasetObject.get_usd_path(obj_info["cfg"]["category"], obj_info["cfg"]["model"]).replace( + ".usd", ".encrypted.usd" + ) + ): + print("Missing object", obj_name) + continue + obj = DatasetObject( + name=obj_name, + **obj_info["cfg"], + ) + og.sim.import_object(obj) + obj.set_bbox_center_position_orientation(position=obj_info["bbox_pos"], orientation=obj_info["bbox_quat"]) + except Exception as e: + raise ValueError(f"Failed to load object {obj_name}") from e + + # Take a sim step + og.sim.step() + + +def get_objects_config_from_scene_urdf(urdf): + tree = ET.parse(urdf) + root = tree.getroot() + objects_cfg = dict() + get_objects_config_from_element(root, model_pose_info=objects_cfg) + return objects_cfg + + +def get_objects_config_from_element(element, model_pose_info): + # First pass through, populate the joint pose info + for ele in element: + if ele.tag == "joint": + name, pos, quat, fixed_jnt = get_joint_info(ele) + name = name.replace("-", "_") + model_pose_info[name] = { + "bbox_pos": pos, + "bbox_quat": quat, + "cfg": { + "fixed_base": fixed_jnt, + }, + } + + # Second pass through, import object models + for ele in element: + if ele.tag == "link": + # This is a valid object, import the model + name = ele.get("name").replace("-", "_") + if name == "world": + # Skip this + pass + else: + print(name) + assert name in model_pose_info, f"Did not find {name} in current model pose info!" + model_pose_info[name]["cfg"]["category"] = ele.get("category") + model_pose_info[name]["cfg"]["model"] = ele.get("model") + model_pose_info[name]["cfg"]["bounding_box"] = ( + string_to_array(ele.get("bounding_box")) if "bounding_box" in ele.keys() else None + ) + in_rooms = ele.get("rooms", "") + if in_rooms: + in_rooms = in_rooms.split(",") + model_pose_info[name]["cfg"]["in_rooms"] = in_rooms + model_pose_info[name]["cfg"]["scale"] = ( + string_to_array(ele.get("scale")) if "scale" in ele.keys() else None + ) + model_pose_info[name]["cfg"]["bddl_object_scope"] = ele.get("object_scope", None) + + # If there's children nodes, we iterate over those + for child in ele: + get_objects_config_from_element(child, model_pose_info=model_pose_info) + + +def get_joint_info(joint_element): + child, pos, quat, fixed_jnt = None, None, None, None + fixed_jnt = joint_element.get("type") == "fixed" + for ele in joint_element: + if ele.tag == "origin": + quat = R.from_euler(string_to_array(ele.get("rpy")), "xyz").as_quat() + pos = string_to_array(ele.get("xyz")) + elif ele.tag == "child": + child = ele.get("link") + return child, pos, quat, fixed_jnt From ff0d34f8fe3e64d90588045b6b776b3fc05e995b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:40:00 -0700 Subject: [PATCH 02/61] Scipy rotation to T --- omnigibson/utils/asset_conversion_utils.py | 37 +++++++++------------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 561c63fd5..34bbcf2a0 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1,4 +1,5 @@ import json +import math import os import shutil import xml.etree.ElementTree as ET @@ -9,13 +10,14 @@ from pathlib import Path import numpy as np -from scipy.spatial.transform import Rotation as R +import torch as th import omnigibson as og import omnigibson.lazy as lazy import trimesh from omnigibson.objects import DatasetObject from omnigibson.scenes import Scene from omnigibson.utils.usd_utils import create_primitive_mesh +import omnigibson.utils.transform_utils as T LIGHT_MAPPING = { 0: "Rect", @@ -636,9 +638,7 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): # because the cone is pointing in the wrong direction. This is already done in update_obj_urdf_with_metalinks; # we just need to make sure meta_link_in_parent_link_orn is updated correctly. if meta_link_type == "particleapplier" and mesh_info_list[0]["type"] == "cone": - meta_link_in_parent_link_orn = ( - R.from_quat(meta_link_in_parent_link_orn) * R.from_rotvec([np.pi, 0.0, 0.0]) - ).as_quat() + meta_link_in_parent_link_orn = T.quat_multiply(meta_link_in_parent_link_orn, T.axisangle2quat(th.tensor([math.pi, 0., 0.]))) for i, mesh_info in enumerate(mesh_info_list): is_mesh = False @@ -682,15 +682,15 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): # Get the mesh/light pose in the meta link frame mesh_in_parent_link_tf = np.eye(4) - mesh_in_parent_link_tf[:3, :3] = R.from_quat(mesh_in_parent_link_orn).as_matrix() + mesh_in_parent_link_tf[:3, :3] = T.quat2mat(mesh_in_parent_link_orn) mesh_in_parent_link_tf[:3, 3] = mesh_in_parent_link_pos meta_link_in_parent_link_tf = np.eye(4) - meta_link_in_parent_link_tf[:3, :3] = R.from_quat(meta_link_in_parent_link_orn).as_matrix() + meta_link_in_parent_link_tf[:3, :3] = T.quat2mat(meta_link_in_parent_link_orn) meta_link_in_parent_link_tf[:3, 3] = meta_link_in_parent_link_pos mesh_in_meta_link_tf = np.linalg.inv(meta_link_in_parent_link_tf) @ mesh_in_parent_link_tf mesh_in_meta_link_pos, mesh_in_meta_link_orn = ( mesh_in_meta_link_tf[:3, 3], - R.from_matrix(mesh_in_meta_link_tf[:3, :3]).as_quat(), + T.mat2quat(mesh_in_meta_link_tf[:3, :3]), ) if is_light: @@ -724,9 +724,7 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) ) # Offset the position by half the height because in 3dsmax the origin of the cylinder is at the center of the base - mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( - np.array([0.0, 0.0, height_offset]) - ) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) elif mesh_type == "Cone": if not is_mesh: xform_prim.prim.GetAttribute("radius").Set(0.5) @@ -738,20 +736,15 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) ) # Flip the orientation of the z-axis because in 3dsmax the cone is pointing in the opposite direction - mesh_in_meta_link_orn = ( - R.from_quat(mesh_in_meta_link_orn) * R.from_rotvec([np.pi, 0.0, 0.0]) - ).as_quat() + mesh_in_meta_link_orn = T.quat_multiply(mesh_in_meta_link_orn, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0]))) # Offset the position by half the height because in 3dsmax the origin of the cone is at the center of the base - mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( - np.array([0.0, 0.0, height_offset]) - ) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) elif mesh_type == "Cube": if not is_mesh: xform_prim.prim.GetAttribute("size").Set(1.0) xform_prim.prim.GetAttribute("xformOp:scale").Set(lazy.pxr.Gf.Vec3f(*mesh_info["size"])) height_offset = mesh_info["size"][2] / 2.0 - mesh_in_meta_link_pos += R.from_quat(mesh_in_meta_link_orn).apply( - np.array([0.0, 0.0, height_offset]) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) ) elif mesh_type == "Sphere": if not is_mesh: @@ -903,7 +896,7 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # add_xform_properties(prim=link_prim) # link_prim.GetAttribute("xformOp:translate").Set(lazy.pxr.Gf.Vec3f(*atrr["xyz"])) # if atrr["rpy"] is not None: - # link_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatf(*(R.from_euler(atrr["rpy"], "xyz").as_quat()[[3, 0, 1, 2]]))) + # link_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatf(*(T.euler2quat(atrr["rpy"])[[3, 0, 1, 2]].tolist()))) # # link_prim.CreateAttribute("ig:orientation", lazy.pxr.Sdf.ValueTypeNames.Quatf) # link_prim.GetAttribute("ig:orientation").Set(lazy.pxr.Gf.Quatf(*atrr["rpy"])) @@ -1406,7 +1399,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): assert ( len(attrs_list) == 1 ), f"Expected only one instance for meta_link {meta_link_name}_{ml_id}, but found {len(attrs_list)}" - quat = (R.from_quat(quat) * R.from_rotvec([np.pi, 0.0, 0.0])).as_quat() + quat = T.quat_multiply(quat, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0]))) # Create metalink create_metalink( @@ -1414,7 +1407,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): metalink_name=f"{meta_link_name}_{ml_id}_{i}", parent_link_name=parent_link_name, pos=pos, - rpy=R.from_quat(quat).as_euler("xyz"), + rpy=T.quat2euler(quat), ) # Export this URDF @@ -1532,7 +1525,7 @@ def get_joint_info(joint_element): fixed_jnt = joint_element.get("type") == "fixed" for ele in joint_element: if ele.tag == "origin": - quat = R.from_euler(string_to_array(ele.get("rpy")), "xyz").as_quat() + quat = T.euler2quat(string_to_array(ele.get("rpy"))) pos = string_to_array(ele.get("xyz")) elif ele.tag == "child": child = ele.get("link") From 2b3a7f43173e5436885899c54318187cf5ed3207 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:29:11 -0700 Subject: [PATCH 03/61] Update public API of asset utils --- omnigibson/utils/asset_conversion_utils.py | 366 +++++++-------------- 1 file changed, 127 insertions(+), 239 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 34bbcf2a0..94808d584 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -9,7 +9,6 @@ from os.path import exists from pathlib import Path -import numpy as np import torch as th import omnigibson as og import omnigibson.lazy as lazy @@ -19,13 +18,13 @@ from omnigibson.utils.usd_utils import create_primitive_mesh import omnigibson.utils.transform_utils as T -LIGHT_MAPPING = { +_LIGHT_MAPPING = { 0: "Rect", 2: "Sphere", 4: "Disk", } -OBJECT_STATE_TEXTURES = { +_OBJECT_STATE_TEXTURES = { "burnt", "cooked", "frozen", @@ -33,7 +32,7 @@ "toggledon", } -MTL_MAP_TYPE_MAPPINGS = { +_MTL_MAP_TYPE_MAPPINGS = { "map_kd": "albedo", "map_bump": "normal", "map_pr": "roughness", @@ -44,15 +43,15 @@ "map_": "metalness", } -SPLIT_COLLISION_MESHES = False +_SPLIT_COLLISION_MESHES = False -META_LINK_RENAME_MAPPING = { +_META_LINK_RENAME_MAPPING = { "fillable": "container", "fluidsink": "particlesink", "fluidsource": "particlesource", } -ALLOWED_META_TYPES = { +_ALLOWED_META_TYPES = { "particlesource": "dimensionless", "togglebutton": "primitive", "attachment": "dimensionless", @@ -67,42 +66,42 @@ } -class NumpyEncoder(json.JSONEncoder): +class _TorchEncoder(json.JSONEncoder): def default(self, o): - if isinstance(o, np.ndarray): + if isinstance(o, th.Tensor): return o.tolist() return json.JSONEncoder.default(self, o) -def string_to_array(string): +def _space_string_to_tensor(string): """ - Converts a array string in mujoco xml to np.array. + Converts a array string in mujoco xml to th.Tensor. Examples: "0 1 2" => [0, 1, 2] Args: string (str): String to convert to an array Returns: - np.array: Numerical array equivalent of @string + th.Tensor: Numerical array equivalent of @string """ - return np.array([float(x) for x in string.split(" ")]) + return th.tensor([float(x) for x in string.split(" ")]) -def array_to_string(array): +def _tensor_to_space_script(array): """ Converts a numeric array into the string format in mujoco. Examples: [0, 1, 2] => "0 1 2" Args: - array (n-array): Array to convert to a string + array (th.Tensor): Array to convert to a string Returns: str: String equivalent of @array """ - return " ".join(["{}".format(x) for x in array]) + return " ".join(["{}".format(x) for x in array.tolist()]) -def split_obj_file(obj_fpath): +def _split_obj_file_into_connected_components(obj_fpath): """ - Splits obj file at @obj_fpath into individual obj files + Splits obj file at @obj_fpath into individual obj files that each contain a single connected mesh. """ # Open file in trimesh obj = trimesh.load(obj_fpath, file_type="obj", force="mesh") @@ -122,9 +121,9 @@ def split_obj_file(obj_fpath): return len(obj_bodies) -def split_objs_in_urdf(urdf_fpath, name_suffix="split", mesh_fpath_offset="."): +def _split_all_objs_in_urdf(urdf_fpath, name_suffix="split", mesh_fpath_offset="."): """ - Splits the obj reference in its urdf + Splits the obj references in the given URDF """ tree = ET.parse(urdf_fpath) root = tree.getroot() @@ -160,7 +159,7 @@ def recursively_find_collision_meshes(ele): parent.remove(col) # Create new objs first so we know how many we need to create in the URDF obj_fpath = col_copy.find("./geometry/mesh").attrib["filename"] - n_new_objs = split_obj_file(obj_fpath=f"{urdf_dir}/{mesh_fpath_offset}/{obj_fpath}") + n_new_objs = _split_obj_file_into_connected_components(obj_fpath=f"{urdf_dir}/{mesh_fpath_offset}/{obj_fpath}") # Create the new objs in the URDF for i in range(n_new_objs): # Copy collision again @@ -180,7 +179,7 @@ def recursively_find_collision_meshes(ele): return urdf_out_path -def set_mtl_albedo(mtl_prim, texture): +def _set_mtl_albedo(mtl_prim, texture): mtl = "diffuse_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set @@ -188,7 +187,7 @@ def set_mtl_albedo(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_normal(mtl_prim, texture): +def _set_mtl_normal(mtl_prim, texture): mtl = "normalmap_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set @@ -196,7 +195,7 @@ def set_mtl_normal(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_ao(mtl_prim, texture): +def _set_mtl_ao(mtl_prim, texture): mtl = "ao_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set @@ -204,7 +203,7 @@ def set_mtl_ao(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_roughness(mtl_prim, texture): +def _set_mtl_roughness(mtl_prim, texture): mtl = "reflectionroughness_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) lazy.omni.usd.create_material_input( @@ -218,7 +217,7 @@ def set_mtl_roughness(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_metalness(mtl_prim, texture): +def _set_mtl_metalness(mtl_prim, texture): mtl = "metallic_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) lazy.omni.usd.create_material_input(mtl_prim, "metallic_texture_influence", 1.0, lazy.pxr.Sdf.ValueTypeNames.Float) @@ -227,8 +226,7 @@ def set_mtl_metalness(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_opacity(mtl_prim, texture): - return +def _set_mtl_opacity(mtl_prim, texture): mtl = "opacity_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) lazy.omni.usd.create_material_input(mtl_prim, "enable_opacity", True, lazy.pxr.Sdf.ValueTypeNames.Bool) @@ -238,7 +236,7 @@ def set_mtl_opacity(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -def set_mtl_emission(mtl_prim, texture): +def _set_mtl_emission(mtl_prim, texture): mtl = "emissive_color_texture" lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) lazy.omni.usd.create_material_input(mtl_prim, "enable_emission", True, lazy.pxr.Sdf.ValueTypeNames.Bool) @@ -247,26 +245,14 @@ def set_mtl_emission(mtl_prim, texture): print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") -RENDERING_CHANNEL_MAPPINGS = { - "diffuse": set_mtl_albedo, - "albedo": set_mtl_albedo, - "normal": set_mtl_normal, - "ao": set_mtl_ao, - "roughness": set_mtl_roughness, - "metalness": set_mtl_metalness, - "opacity": set_mtl_opacity, - "emission": set_mtl_emission, -} - - -def rename_prim(prim, name): +def _rename_prim(prim, name): path_from = prim.GetPrimPath().pathString path_to = f"{'/'.join(path_from.split('/')[:-1])}/{name}" lazy.omni.kit.commands.execute("MovePrim", path_from=path_from, path_to=path_to) return lazy.omni.isaac.core.utils.prims.get_prim_at_path(path_to) -def get_visual_objs_from_urdf(urdf_path): +def _get_visual_objs_from_urdf(urdf_path): # Will return a dictionary mapping link name (e.g.: base_link) to dictionary of owned visual meshes mapping mesh # name to visual obj file for that mesh visual_objs = OrderedDict() @@ -288,13 +274,13 @@ def get_visual_objs_from_urdf(urdf_path): return visual_objs -def copy_object_state_textures(obj_category, obj_model, dataset_root): +def _copy_object_state_textures(obj_category, obj_model, dataset_root): obj_root_dir = f"{dataset_root}/objects/{obj_category}/{obj_model}" old_mat_fpath = f"{obj_root_dir}/material" new_mat_fpath = f"{obj_root_dir}/usd/materials" for mat_file in os.listdir(old_mat_fpath): should_copy = False - for object_state in OBJECT_STATE_TEXTURES: + for object_state in _OBJECT_STATE_TEXTURES: if object_state in mat_file.lower(): should_copy = True break @@ -302,7 +288,7 @@ def copy_object_state_textures(obj_category, obj_model, dataset_root): shutil.copy(f"{old_mat_fpath}/{mat_file}", new_mat_fpath) -def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path, usd_path, dataset_root): +def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path, usd_path, dataset_root): usd_dir = os.path.dirname(usd_path) # # mat_dir = f"{model_root_path}/material/{obj_category}" if \ # # obj_category in {"ceilings", "walls", "floors"} else f"{model_root_path}/material" @@ -348,7 +334,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path # Grab all visual objs for this object urdf_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/{obj_model}_with_metalinks.urdf" - visual_objs = get_visual_objs_from_urdf(urdf_path) + visual_objs = _get_visual_objs_from_urdf(urdf_path) # Extract absolute paths to mtl files for each link link_mtl_files = OrderedDict() # maps link name to dictionary mapping mesh name to mtl file @@ -390,12 +376,22 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path map_filename = os.path.basename(map_file) mat_files[mtl_name].append(map_filename) mat_old_paths[mtl_name].append(map_file) - mtl_infos[mtl_name][MTL_MAP_TYPE_MAPPINGS[map_type.lower()]] = map_filename + mtl_infos[mtl_name][_MTL_MAP_TYPE_MAPPINGS[map_type.lower()]] = map_filename # Next, for each material information, we create a new material and port the material files to the USD directory mat_new_fpath = os.path.join(usd_dir, "materials") Path(mat_new_fpath).mkdir(parents=True, exist_ok=True) shaders = OrderedDict() # maps mtl name to shader prim + rendering_channel_mappings = { + "diffuse": _set_mtl_albedo, + "albedo": _set_mtl_albedo, + "normal": _set_mtl_normal, + "ao": _set_mtl_ao, + "roughness": _set_mtl_roughness, + "metalness": _set_mtl_metalness, + "opacity": _set_mtl_opacity, + "emission": _set_mtl_emission, + } for mtl_name, mtl_info in mtl_infos.items(): for mat_old_path in mat_old_paths[mtl_name]: shutil.copy(os.path.join(mtl_old_dirs[mtl_name], mat_old_path), mat_new_fpath) @@ -412,7 +408,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path # Apply all rendering channels for this material for mat_type, mat_file in mtl_info.items(): - render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + render_channel_fcn = rendering_channel_mappings.get(mat_type, None) if render_channel_fcn is not None: render_channel_fcn(mat, os.path.join("materials", mat_file)) else: @@ -420,7 +416,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path print(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") # Rename material - mat = rename_prim(prim=mat, name=mtl_name) + mat = _rename_prim(prim=mat, name=mtl_name) shade = lazy.pxr.UsdShade.Material(mat) shaders[mtl_name] = shade print(f"Created material {mtl_name}:", mtl_created_list[0]) @@ -456,7 +452,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path ) # Lastly, we copy object_state texture maps that are state-conditioned; e.g.: cooked, soaked, etc. - copy_object_state_textures(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) + _copy_object_state_textures(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) # ################################### # @@ -510,7 +506,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path # # Check if any valid rendering channel # mat_type = link_mat_file.split("_")[-1].split(".")[0].lower() # # Apply the material if it exists - # render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + # render_channel_fcn = rendering_channel_mappings.get(mat_type, None) # if render_channel_fcn is not None: # render_channel_fcn(mat, os.path.join("materials", link_mat_file)) # else: @@ -531,7 +527,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path # # Check if any valid rendering channel # mat_type = mat_file.split("_")[-1].split(".")[0].lower() # # Apply the material if it exists - # render_channel_fcn = RENDERING_CHANNEL_MAPPINGS.get(mat_type, None) + # render_channel_fcn = rendering_channel_mappings.get(mat_type, None) # if render_channel_fcn is not None: # render_channel_fcn(default_mat, os.path.join("materials", mat_file)) # default_mat_is_used = True @@ -544,7 +540,7 @@ def import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path # stage.RemovePrim(default_mat.GetPrimPath()) -def add_xform_properties(prim): +def _add_xform_properties(prim): properties_to_remove = [ "xformOp:rotateX", "xformOp:rotateXZY", @@ -594,7 +590,7 @@ def add_xform_properties(prim): xformable.SetXformOpOrder([xform_op_translate, xform_op_rot, xform_op_scale]) -def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): +def _process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): """ Process a meta link by creating visual meshes or lights below it """ @@ -603,11 +599,11 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): if meta_link_type in ["container"]: return - assert meta_link_type in ALLOWED_META_TYPES - if ALLOWED_META_TYPES[meta_link_type] not in ["primitive", "light"] and meta_link_type != "particlesource": + assert meta_link_type in _ALLOWED_META_TYPES + if _ALLOWED_META_TYPES[meta_link_type] not in ["primitive", "light"] and meta_link_type != "particlesource": return - is_light = ALLOWED_META_TYPES[meta_link_type] == "light" + is_light = _ALLOWED_META_TYPES[meta_link_type] == "light" for link_id, mesh_info_list in meta_link_infos.items(): if len(mesh_info_list) == 0: @@ -638,13 +634,15 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): # because the cone is pointing in the wrong direction. This is already done in update_obj_urdf_with_metalinks; # we just need to make sure meta_link_in_parent_link_orn is updated correctly. if meta_link_type == "particleapplier" and mesh_info_list[0]["type"] == "cone": - meta_link_in_parent_link_orn = T.quat_multiply(meta_link_in_parent_link_orn, T.axisangle2quat(th.tensor([math.pi, 0., 0.]))) + meta_link_in_parent_link_orn = T.quat_multiply( + meta_link_in_parent_link_orn, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0])) + ) for i, mesh_info in enumerate(mesh_info_list): is_mesh = False if is_light: # Create a light - light_type = LIGHT_MAPPING[mesh_info["type"]] + light_type = _LIGHT_MAPPING[mesh_info["type"]] prim_path = f"/{obj_model}/lights_{link_id}_0_link/light_{i}" prim = getattr(lazy.pxr.UsdLux, f"{light_type}Light").Define(stage, prim_path).GetPrim() lazy.pxr.UsdLux.ShapingAPI.Apply(prim).GetShapingConeAngleAttr().Set(180.0) @@ -671,23 +669,23 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): else getattr(lazy.pxr.UsdGeom, mesh_type).Define(stage, prim_path).GetPrim() ) - add_xform_properties(prim=prim) + _add_xform_properties(prim=prim) # Make sure mesh_prim has XForm properties xform_prim = lazy.omni.isaac.core.prims.xform_prim.XFormPrim(prim_path=prim_path) # Get the mesh/light pose in the parent link frame - mesh_in_parent_link_pos, mesh_in_parent_link_orn = np.array(mesh_info["position"]), np.array( + mesh_in_parent_link_pos, mesh_in_parent_link_orn = th.tensor(mesh_info["position"]), th.tensor( mesh_info["orientation"] ) # Get the mesh/light pose in the meta link frame - mesh_in_parent_link_tf = np.eye(4) + mesh_in_parent_link_tf = th.eye(4) mesh_in_parent_link_tf[:3, :3] = T.quat2mat(mesh_in_parent_link_orn) mesh_in_parent_link_tf[:3, 3] = mesh_in_parent_link_pos - meta_link_in_parent_link_tf = np.eye(4) + meta_link_in_parent_link_tf = th.eye(4) meta_link_in_parent_link_tf[:3, :3] = T.quat2mat(meta_link_in_parent_link_orn) meta_link_in_parent_link_tf[:3, 3] = meta_link_in_parent_link_pos - mesh_in_meta_link_tf = np.linalg.inv(meta_link_in_parent_link_tf) @ mesh_in_parent_link_tf + mesh_in_meta_link_tf = th.linalg.inv(meta_link_in_parent_link_tf) @ mesh_in_parent_link_tf mesh_in_meta_link_pos, mesh_in_meta_link_orn = ( mesh_in_meta_link_tf[:3, 3], T.mat2quat(mesh_in_meta_link_tf[:3, :3]), @@ -695,7 +693,7 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): if is_light: xform_prim.prim.GetAttribute("inputs:color").Set( - lazy.pxr.Gf.Vec3f(*np.array(mesh_info["color"]) / 255.0) + lazy.pxr.Gf.Vec3f(*(th.tensor(mesh_info["color"]) / 255.0).tolist()) ) xform_prim.prim.GetAttribute("inputs:intensity").Set(mesh_info["intensity"]) if light_type == "Rect": @@ -724,7 +722,7 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) ) # Offset the position by half the height because in 3dsmax the origin of the cylinder is at the center of the base - mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, th.tensor([0.0, 0.0, height_offset])) elif mesh_type == "Cone": if not is_mesh: xform_prim.prim.GetAttribute("radius").Set(0.5) @@ -736,16 +734,17 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): lazy.pxr.Gf.Vec3f(desired_radius * 2, desired_radius * 2, desired_height) ) # Flip the orientation of the z-axis because in 3dsmax the cone is pointing in the opposite direction - mesh_in_meta_link_orn = T.quat_multiply(mesh_in_meta_link_orn, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0]))) + mesh_in_meta_link_orn = T.quat_multiply( + mesh_in_meta_link_orn, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0])) + ) # Offset the position by half the height because in 3dsmax the origin of the cone is at the center of the base - mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, th.tensor([0.0, 0.0, height_offset])) elif mesh_type == "Cube": if not is_mesh: xform_prim.prim.GetAttribute("size").Set(1.0) xform_prim.prim.GetAttribute("xformOp:scale").Set(lazy.pxr.Gf.Vec3f(*mesh_info["size"])) height_offset = mesh_info["size"][2] / 2.0 - mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, np.array([0.0, 0.0, height_offset])) - ) + mesh_in_meta_link_pos += T.quat_apply(mesh_in_meta_link_orn, th.tensor([0.0, 0.0, height_offset])) elif mesh_type == "Sphere": if not is_mesh: xform_prim.prim.GetAttribute("radius").Set(0.5) @@ -765,7 +764,7 @@ def process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): ) -def process_glass_link(prim): +def _process_glass_link(prim): # Update any glass parts to use the glass material instead glass_prim_paths = [] for gchild in prim.GetChildren(): @@ -804,8 +803,6 @@ def process_glass_link(prim): ) -# TODO: Handle metalinks -# TODO: Import heights per link folder into USD folder def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_channels=False): # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" @@ -858,7 +855,7 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # TODO: Use parent link name for link_name, link_metadata in meta_links.items(): for meta_link_type, meta_link_infos in link_metadata.items(): - process_meta_link(stage, obj_model, meta_link_type, meta_link_infos) + _process_meta_link(stage, obj_model, meta_link_type, meta_link_infos) # Apply temporary fillable meshes. # TODo: Disable after fillable meshes are backported into 3ds Max. @@ -869,40 +866,8 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha print("Done processing meta links") - # # Update metalink info - # if "meta_links" in data["metadata"]: - # meta_links = data["metadata"].pop("meta_links") - # print("meta_links:", meta_links) - # # TODO: Use parent link name - # for parent_link_name, child_link_attrs in meta_links.items(): - # for meta_link_name, ml_attrs in child_link_attrs.items(): - # for ml_id, attrs_list in ml_attrs.items(): - # for i, attrs in enumerate(attrs_list): - # # # Create new Xform prim that will contain info - # ml_prim_path = ( - # f"{prim.GetPath()}/{meta_link_name}_{ml_id}_{i}_link" - # ) - # link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(ml_prim_path) - # assert ( - # link_prim - # ), f"Should have found valid metalink prim at prim path: {ml_prim_path}" - # - # link_prim.CreateAttribute("ig:is_metalink", lazy.pxr.Sdf.ValueTypeNames.Bool) - # link_prim.GetAttribute("ig:is_metalink").Set(True) - # - # # TODO! Validate that this works - # # test on water sink 02: water sink location is 0.1, 0.048, 0.32 - # # water source location is -0.03724, 0.008, 0.43223 - # add_xform_properties(prim=link_prim) - # link_prim.GetAttribute("xformOp:translate").Set(lazy.pxr.Gf.Vec3f(*atrr["xyz"])) - # if atrr["rpy"] is not None: - # link_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatf(*(T.euler2quat(atrr["rpy"])[[3, 0, 1, 2]].tolist()))) - # - # link_prim.CreateAttribute("ig:orientation", lazy.pxr.Sdf.ValueTypeNames.Quatf) - # link_prim.GetAttribute("ig:orientation").Set(lazy.pxr.Gf.Quatf(*atrr["rpy"])) - # Iterate over dict and replace any lists of dicts as dicts of dicts (with each dict being indexed by an integer) - data = recursively_replace_list_of_dict(data) + data = _recursively_replace_list_of_dict(data) print("Done recursively replacing") @@ -920,18 +885,6 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # Store remaining data as metadata prim.SetCustomData(data) - # for k, v in data.items(): - # print(f"setting custom data {k}") - # print(v) - # print() - # prim.SetCustomDataByKey(k, v) - # # if k == "metadata": - # # print(v) - # # print() - # # prim.SetCustomDataByKey(k, v["link_bounding_boxes"]["base_link"]["collision"]["axis_aligned"]["transform"]) - # # input("succeeded!") - # # else: - # # prim.SetCustomDataByKey(k, v) # Add material channels # print(f"prim children: {prim.GetChildren()}") @@ -941,9 +894,8 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # mat_prim = looks_prim.GetChildren()[0] #lazy.omni.isaac.core.utils.prims.get_prim_at_path(mat_prim_path) # print(f"looks children: {looks_prim.GetChildren()}") # print(f"mat prim: {mat_prim}") - print("irc") if import_render_channels: - import_rendering_channels( + _import_rendering_channels( obj_prim=prim, obj_category=obj_category, obj_model=obj_model, @@ -951,22 +903,18 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha usd_path=usd_path, dataset_root=dataset_root, ) - print("done irc") for link, link_tags in data["metadata"]["link_tags"].items(): if "glass" in link_tags: - process_glass_link(prim.GetChild(link)) + _process_glass_link(prim.GetChild(link)) - print("done glass") # Save stage stage.Save() - print("done save") - # Delete stage reference and clear the sim stage variable, opening the dummy stage along the way del stage -def recursively_replace_list_of_dict(dic): +def _recursively_replace_list_of_dict(dic): for k, v in dic.items(): print(f"k: {k}") if v is None: @@ -1029,72 +977,12 @@ def recursively_replace_list_of_dict(dic): v[i] = lazy.pxr.lazy.pxr.UsdGeom.Tokens.none if isinstance(v, dict): # Iterate through nested dictionaries - dic[k] = recursively_replace_list_of_dict(v) + dic[k] = _recursively_replace_list_of_dict(v) return dic -def import_fillable_mesh(stage, obj_model, mesh): - def _create_mesh(prim_path): - stage.DefinePrim(prim_path, "Mesh") - mesh = lazy.pxr.UsdGeom.Mesh.Define(stage, prim_path) - return mesh - - def _create_fixed_joint(prim_path, body0, body1): - # Create the joint - joint = lazy.pxr.UsdPhysics.FixedJoint.Define(stage, prim_path) - - # Possibly add body0, body1 targets - if body0 is not None: - assert stage.GetPrimAtPath(body0).IsValid(), f"Invalid body0 path specified: {body0}" - joint.GetBody0Rel().SetTargets([lazy.pxr.Sdf.Path(body0)]) - if body1 is not None: - assert stage.GetPrimAtPath(body1).IsValid(), f"Invalid body1 path specified: {body1}" - joint.GetBody1Rel().SetTargets([lazy.pxr.Sdf.Path(body1)]) - - # Get the prim pointed to at this path - joint_prim = stage.GetPrimAtPath(prim_path) - - # Apply joint API interface - lazy.pxr.PhysxSchema.PhysxJointAPI.Apply(joint_prim) - - # Possibly (un-/)enable this joint - joint_prim.GetAttribute("physics:jointEnabled").Set(True) - - # Return this joint - return joint_prim - - container_link_path = f"/{obj_model}/container_0_0_link" - container_link = stage.DefinePrim(container_link_path, "Xform") - - for i, submesh in enumerate(mesh.split()): - mesh_prim = _create_mesh(prim_path=f"{container_link_path}/mesh_{i}").GetPrim() - - # Write mesh data - mesh_prim.GetAttribute("points").Set(lazy.pxr.Vt.Vec3fArray.FromNumpy(submesh.vertices)) - mesh_prim.GetAttribute("normals").Set(lazy.pxr.Vt.Vec3fArray.FromNumpy(submesh.vertex_normals)) - face_indices = [] - face_vertex_counts = [] - for face_idx, face_vertices in enumerate(submesh.faces): - face_indices.extend(face_vertices) - face_vertex_counts.append(len(face_vertices)) - mesh_prim.GetAttribute("faceVertexCounts").Set(np.array(face_vertex_counts, dtype=int)) - mesh_prim.GetAttribute("faceVertexIndices").Set(np.array(face_indices, dtype=int)) - # mesh_prim.GetAttribute("primvars:st").Set(lazy.pxr.Vt.Vec2fArray.FromNumpy(np.zeros((len(submesh.vertices), 2)))) - - # Make invisible - lazy.pxr.UsdGeom.Imageable(mesh_prim).MakeInvisible() - - # Create fixed joint - obj_root_path = f"/{obj_model}/base_link" - _create_fixed_joint( - prim_path=f"{obj_root_path}/container_0_{i}_joint", - body0=f"{obj_root_path}", - body1=f"{container_link_path}", - ) - - -def create_import_config(): +def _create_urdf_import_config(): # Set up import configuration _, import_config = lazy.omni.kit.commands.execute("URDFCreateImportConfig") drive_mode = ( @@ -1119,17 +1007,17 @@ def create_import_config(): def import_obj_urdf(obj_category, obj_model, dataset_root, skip_if_exist=False): # Preprocess input URDF to account for metalinks - urdf_path = update_obj_urdf_with_metalinks( + urdf_path = _add_metalinks_to_urdf( obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root ) # Import URDF - cfg = create_import_config() + cfg = _create_urdf_import_config() # Check if filepath exists usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd" if not (skip_if_exist and exists(usd_path)): - if SPLIT_COLLISION_MESHES: + if _SPLIT_COLLISION_MESHES: print(f"Converting collision meshes from {obj_category}, {obj_model}...") - urdf_path = split_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") + urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") print(f"Importing {obj_category}, {obj_model} into path {usd_path}...") # Only import if it doesn't exist lazy.omni.kit.commands.execute( @@ -1141,10 +1029,10 @@ def import_obj_urdf(obj_category, obj_model, dataset_root, skip_if_exist=False): print(f"Imported {obj_category}, {obj_model}") -def pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): +def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): space = "\t" if use_tabs else " " * 4 for i, node in enumerate(current): - pretty_print_xml(node, current, i, depth + 1) + _pretty_print_xml(node, current, i, depth + 1) if parent is not None: if index == 0: parent.text = "\n" + (space * depth) @@ -1154,26 +1042,26 @@ def pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): current.tail = "\n" + (space * (depth - 1)) -def convert_to_string(inp): +def _convert_to_xml_string(inp): """ - Converts any type of {bool, int, float, list, tuple, array, string, np.str_} into an mujoco-xml compatible string. - Note that an input string / np.str_ results in a no-op action. + Converts any type of {bool, int, float, list, tuple, array, string, th.Tensor} into an mujoco-xml compatible string. + Note that an input string / th.Tensor results in a no-op action. Args: inp: Input to convert to string Returns: str: String equivalent of @inp """ - if type(inp) in {list, tuple, np.ndarray}: - return array_to_string(inp) - elif type(inp) in {int, float, bool, np.float32, np.float64, np.int32, np.int64}: + if type(inp) in {list, tuple, th.Tensor}: + return _tensor_to_space_script(inp) + elif type(inp) in {int, float, bool, th.float32, th.float64, th.int32, th.int64}: return str(inp).lower() - elif type(inp) in {str, np.str_}: + elif type(inp) in {str}: return inp else: raise ValueError("Unsupported type received: got {}".format(type(inp))) -def create_joint( +def _create_urdf_joint( name, parent, child, @@ -1191,8 +1079,8 @@ def create_joint( name (str): Name of this joint parent (str or ET.Element): Name of parent link or parent link element itself for this joint child (str or ET.Element): Name of child link or child link itself for this joint - pos (list or tuple or array): (x,y,z) offset pos values when creating the collision body - rpy (list or tuple or array): (r,p,y) offset rot values when creating the joint + pos (list or tuple or th.Tensor): (x,y,z) offset pos values when creating the collision body + rpy (list or tuple or th.Tensor): (r,p,y) offset rot values when creating the joint joint_type (str): What type of joint to create. Must be one of {fixed, revolute, prismatic} axis (None or 3-tuple): If specified, should be (x,y,z) axis corresponding to DOF damping (None or float): If specified, should be damping value to apply to joint @@ -1207,7 +1095,7 @@ def create_joint( origin = ET.SubElement( jnt, "origin", - attrib={"rpy": convert_to_string(rpy), "xyz": convert_to_string(pos)}, + attrib={"rpy": _convert_to_xml_string(rpy), "xyz": _convert_to_xml_string(pos)}, ) # Make sure parent and child are both names (str) -- if they're not str already, we assume it's the element ref if not isinstance(parent, str): @@ -1219,12 +1107,12 @@ def create_joint( child = ET.SubElement(jnt, "child", link=child) # Add additional parameters if specified if axis is not None: - ax = ET.SubElement(jnt, "axis", xyz=convert_to_string(axis)) + ax = ET.SubElement(jnt, "axis", xyz=_convert_to_xml_string(axis)) dynamic_params = {} if damping is not None: - dynamic_params["damping"] = convert_to_string(damping) + dynamic_params["damping"] = _convert_to_xml_string(damping) if friction is not None: - dynamic_params["friction"] = convert_to_string(friction) + dynamic_params["friction"] = _convert_to_xml_string(friction) if dynamic_params: dp = ET.SubElement(jnt, "dynamics", **dynamic_params) if limits is not None: @@ -1234,7 +1122,7 @@ def create_joint( return jnt -def create_link(name, subelements=None, mass=None, inertia=None): +def _create_urdf_link(name, subelements=None, mass=None, inertia=None): """ Generates XML link element Args: @@ -1257,7 +1145,7 @@ def create_link(name, subelements=None, mass=None, inertia=None): if mass is not None or inertia is not None: inertial = ET.SubElement(link, "inertial") if mass is not None: - ET.SubElement(inertial, "mass", value=convert_to_string(mass)) + ET.SubElement(inertial, "mass", value=_convert_to_xml_string(mass)) if inertia is not None: axes = ["ixx", "iyy", "izz", "ixy", "ixz", "iyz"] inertia_vals = {ax: str(i) for ax, i in zip(axes, inertia)} @@ -1267,7 +1155,7 @@ def create_link(name, subelements=None, mass=None, inertia=None): return link -def create_metalink( +def _create_urdf_metalink( root_element, metalink_name, parent_link_name="base_link", @@ -1275,7 +1163,7 @@ def create_metalink( rpy=(0, 0, 0), ): # Create joint - jnt = create_joint( + jnt = _create_urdf_joint( name=f"{metalink_name}_joint", parent=parent_link_name, child=f"{metalink_name}_link", @@ -1284,7 +1172,7 @@ def create_metalink( joint_type="fixed", ) # Create child link - link = create_link( + link = _create_urdf_link( name=f"{metalink_name}_link", mass=0.0001, inertia=[0.00001, 0.00001, 0.00001, 0, 0, 0], @@ -1295,7 +1183,7 @@ def create_metalink( root_element.append(link) -def generate_urdf_from_xmltree(root_element, name, dirpath, unique_urdf=False): +def _save_xmltree_as_urdf(root_element, name, dirpath, unique_urdf=False): """ Generates a URDF file corresponding to @xmltree at @dirpath with name @name.urdf. Args: @@ -1316,7 +1204,7 @@ def generate_urdf_from_xmltree(root_element, name, dirpath, unique_urdf=False): # Write top level header line first f.write('\n') # Convert xml to string form and write to file - pretty_print_xml(current=root_element) + _pretty_print_xml(current=root_element) xml_str = ET.tostring(root_element, encoding="unicode") f.write(xml_str) @@ -1324,7 +1212,7 @@ def generate_urdf_from_xmltree(root_element, name, dirpath, unique_urdf=False): return fpath -def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): +def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" urdf_path = f"{model_root_path}/{obj_model}.urdf" @@ -1348,8 +1236,8 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): for link, meta_link in metadata["meta_links"].items(): for meta_link_name in list(meta_link.keys()): meta_link_attrs = meta_link[meta_link_name] - if meta_link_name in META_LINK_RENAME_MAPPING: - metadata["meta_links"][link][META_LINK_RENAME_MAPPING[meta_link_name]] = meta_link_attrs + if meta_link_name in _META_LINK_RENAME_MAPPING: + metadata["meta_links"][link][_META_LINK_RENAME_MAPPING[meta_link_name]] = meta_link_attrs del metadata["meta_links"][link][meta_link_name] with open(metadata_fpath, "w") as f: @@ -1360,8 +1248,8 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): for parent_link_name, child_link_attrs in meta_links.items(): for meta_link_name, ml_attrs in child_link_attrs.items(): assert ( - meta_link_name in ALLOWED_META_TYPES - ), f"meta_link_name {meta_link_name} not in {ALLOWED_META_TYPES}" + meta_link_name in _ALLOWED_META_TYPES + ), f"meta_link_name {meta_link_name} not in {_ALLOWED_META_TYPES}" # TODO: Reenable after fillable meshes are backported into 3ds Max. # Temporarily disable importing of fillable meshes. @@ -1370,7 +1258,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): for ml_id, attrs_list in ml_attrs.items(): if len(attrs_list) > 0: - if ALLOWED_META_TYPES[meta_link_name] != "dimensionless": + if _ALLOWED_META_TYPES[meta_link_name] != "dimensionless": # If not dimensionless, we create one meta link for a list of meshes below it attrs_list = [attrs_list[0]] else: @@ -1402,7 +1290,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): quat = T.quat_multiply(quat, T.axisangle2quat(th.tensor([math.pi, 0.0, 0.0]))) # Create metalink - create_metalink( + _create_urdf_metalink( root_element=root, metalink_name=f"{meta_link_name}_{ml_id}_{i}", parent_link_name=parent_link_name, @@ -1411,7 +1299,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): ) # Export this URDF - return generate_urdf_from_xmltree( + return _save_xmltree_as_urdf( root_element=root, name=f"{obj_model}_with_metalinks", dirpath=model_root_path, @@ -1421,7 +1309,7 @@ def update_obj_urdf_with_metalinks(obj_category, obj_model, dataset_root): def convert_scene_urdf_to_json(urdf, json_path): # First, load the requested objects from the URDF into OG - load_scene_from_urdf(urdf=urdf) + _load_scene_from_urdf(urdf=urdf) # Play the simulator, then save og.sim.play() @@ -1435,12 +1323,12 @@ def convert_scene_urdf_to_json(urdf, json_path): scene_info.pop("init_info") with open(json_path, "w+") as f: - json.dump(scene_info, f, cls=NumpyEncoder, indent=4) + json.dump(scene_info, f, cls=_TorchEncoder, indent=4) -def load_scene_from_urdf(urdf): +def _load_scene_from_urdf(urdf): # First, grab object info from the urdf - objs_info = get_objects_config_from_scene_urdf(urdf=urdf) + objs_info = _get_objects_config_from_scene_urdf(urdf=urdf) # Load all the objects manually into a scene scene = Scene(use_floor_plane=False) @@ -1468,19 +1356,19 @@ def load_scene_from_urdf(urdf): og.sim.step() -def get_objects_config_from_scene_urdf(urdf): +def _get_objects_config_from_scene_urdf(urdf): tree = ET.parse(urdf) root = tree.getroot() objects_cfg = dict() - get_objects_config_from_element(root, model_pose_info=objects_cfg) + _get_objects_config_from_element(root, model_pose_info=objects_cfg) return objects_cfg -def get_objects_config_from_element(element, model_pose_info): +def _get_objects_config_from_element(element, model_pose_info): # First pass through, populate the joint pose info for ele in element: if ele.tag == "joint": - name, pos, quat, fixed_jnt = get_joint_info(ele) + name, pos, quat, fixed_jnt = _get_joint_info(ele) name = name.replace("-", "_") model_pose_info[name] = { "bbox_pos": pos, @@ -1504,29 +1392,29 @@ def get_objects_config_from_element(element, model_pose_info): model_pose_info[name]["cfg"]["category"] = ele.get("category") model_pose_info[name]["cfg"]["model"] = ele.get("model") model_pose_info[name]["cfg"]["bounding_box"] = ( - string_to_array(ele.get("bounding_box")) if "bounding_box" in ele.keys() else None + _space_string_to_tensor(ele.get("bounding_box")) if "bounding_box" in ele.keys() else None ) in_rooms = ele.get("rooms", "") if in_rooms: in_rooms = in_rooms.split(",") model_pose_info[name]["cfg"]["in_rooms"] = in_rooms model_pose_info[name]["cfg"]["scale"] = ( - string_to_array(ele.get("scale")) if "scale" in ele.keys() else None + _space_string_to_tensor(ele.get("scale")) if "scale" in ele.keys() else None ) model_pose_info[name]["cfg"]["bddl_object_scope"] = ele.get("object_scope", None) # If there's children nodes, we iterate over those for child in ele: - get_objects_config_from_element(child, model_pose_info=model_pose_info) + _get_objects_config_from_element(child, model_pose_info=model_pose_info) -def get_joint_info(joint_element): +def _get_joint_info(joint_element): child, pos, quat, fixed_jnt = None, None, None, None fixed_jnt = joint_element.get("type") == "fixed" for ele in joint_element: if ele.tag == "origin": - quat = T.euler2quat(string_to_array(ele.get("rpy"))) - pos = string_to_array(ele.get("xyz")) + quat = T.euler2quat(_space_string_to_tensor(ele.get("rpy"))) + pos = _space_string_to_tensor(ele.get("xyz")) elif ele.tag == "child": child = ele.get("link") return child, pos, quat, fixed_jnt From e18cf9764d0f9e352031f7002d5726c5b266c932 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:51:26 -0700 Subject: [PATCH 04/61] Documentation and cleanup --- omnigibson/utils/asset_conversion_utils.py | 403 ++++++++++++++++++--- 1 file changed, 351 insertions(+), 52 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 94808d584..a4468ed21 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -67,6 +67,15 @@ class _TorchEncoder(json.JSONEncoder): + """ + Custom JSON encoder for PyTorch tensors. + + This encoder converts PyTorch tensors to lists, making them JSON serializable. + + Methods: + default(o): Overrides the default method to handle PyTorch tensors. + """ + def default(self, o): if isinstance(o, th.Tensor): return o.tolist() @@ -75,13 +84,16 @@ def default(self, o): def _space_string_to_tensor(string): """ - Converts a array string in mujoco xml to th.Tensor. + Converts a space-separated string of numbers into a PyTorch tensor. + Examples: - "0 1 2" => [0, 1, 2] + "0 1 2" => tensor([0., 1., 2.]) + Args: - string (str): String to convert to an array + string (str): Space-separated string of numbers to convert. + Returns: - th.Tensor: Numerical array equivalent of @string + torch.Tensor: Tensor containing the numerical values from the input string. """ return th.tensor([float(x) for x in string.split(" ")]) @@ -89,10 +101,13 @@ def _space_string_to_tensor(string): def _tensor_to_space_script(array): """ Converts a numeric array into the string format in mujoco. + Examples: [0, 1, 2] => "0 1 2" + Args: array (th.Tensor): Array to convert to a string + Returns: str: String equivalent of @array """ @@ -101,7 +116,18 @@ def _tensor_to_space_script(array): def _split_obj_file_into_connected_components(obj_fpath): """ - Splits obj file at @obj_fpath into individual obj files that each contain a single connected mesh. + Splits an OBJ file into individual OBJ files, each containing a single connected mesh. + + Args: + obj_fpath (str): The file path to the input OBJ file. + + Returns: + int: The number of individual connected mesh files created. + + The function performs the following steps: + 1. Loads the OBJ file using trimesh. + 2. Splits the loaded mesh into individual connected components. + 3. Saves each connected component as a separate OBJ file in the same directory as the input file. """ # Open file in trimesh obj = trimesh.load(obj_fpath, file_type="obj", force="mesh") @@ -123,7 +149,19 @@ def _split_obj_file_into_connected_components(obj_fpath): def _split_all_objs_in_urdf(urdf_fpath, name_suffix="split", mesh_fpath_offset="."): """ - Splits the obj references in the given URDF + Splits the OBJ references in the given URDF file into separate files for each connected component. + + This function parses a URDF file, finds all collision mesh references, splits the referenced OBJ files into + connected components, and updates the URDF file to reference these new OBJ files. The updated URDF file is + saved with a new name. + + Args: + urdf_fpath (str): The file path to the URDF file to be processed. + name_suffix (str, optional): Suffix to append to the output URDF file name. Defaults to "split". + mesh_fpath_offset (str, optional): Offset path to the directory containing the mesh files. Defaults to ".". + + Returns: + str: The file path to the newly created URDF file with split OBJ references. """ tree = ET.parse(urdf_fpath) root = tree.getroot() @@ -246,6 +284,16 @@ def _set_mtl_emission(mtl_prim, texture): def _rename_prim(prim, name): + """ + Renames a given prim to a new name. + + Args: + prim (Usd.Prim): The prim to be renamed. + name (str): The new name for the prim. + + Returns: + Usd.Prim: The renamed prim at the new path. + """ path_from = prim.GetPrimPath().pathString path_to = f"{'/'.join(path_from.split('/')[:-1])}/{name}" lazy.omni.kit.commands.execute("MovePrim", path_from=path_from, path_to=path_to) @@ -253,8 +301,17 @@ def _rename_prim(prim, name): def _get_visual_objs_from_urdf(urdf_path): - # Will return a dictionary mapping link name (e.g.: base_link) to dictionary of owned visual meshes mapping mesh - # name to visual obj file for that mesh + """ + Extracts visual objects from a URDF file. + + Args: + urdf_path (str): Path to the URDF file. + + Returns: + OrderedDict: A dictionary mapping link names to dictionaries of visual meshes. Each link name (e.g., 'base_link') + maps to another dictionary where the keys are visual mesh names and the values are the corresponding + visual object file paths. If no visual object file is found for a mesh, the value will be None. + """ visual_objs = OrderedDict() # Parse URDF tree = ET.parse(urdf_path) @@ -275,6 +332,17 @@ def _get_visual_objs_from_urdf(urdf_path): def _copy_object_state_textures(obj_category, obj_model, dataset_root): + """ + Copies specific object state texture files from the old material directory to the new material directory. + + Args: + obj_category (str): The category of the object. + obj_model (str): The model of the object. + dataset_root (str): The root directory of the dataset. + + Returns: + None + """ obj_root_dir = f"{dataset_root}/objects/{obj_category}/{obj_model}" old_mat_fpath = f"{obj_root_dir}/material" new_mat_fpath = f"{obj_root_dir}/usd/materials" @@ -289,6 +357,29 @@ def _copy_object_state_textures(obj_category, obj_model, dataset_root): def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_path, usd_path, dataset_root): + """ + Imports and binds rendering channels for a given object in an Omniverse USD stage. + + This function performs the following steps: + 1. Removes existing material prims from the object. + 2. Extracts visual objects and their associated material files from the object's URDF file. + 3. Copies material files to the USD directory and creates new materials. + 4. Applies rendering channels to the new materials. + 5. Binds the new materials to the visual meshes of the object. + 6. Copies state-conditioned texture maps (e.g., cooked, soaked) for the object. + + Args: + obj_prim (Usd.Prim): The USD prim representing the object. + obj_category (str): The category of the object (e.g., "ceilings", "walls"). + obj_model (str): The model name of the object. + model_root_path (str): The root path of the model files. + usd_path (str): The path to the USD file. + dataset_root (str): The root path of the dataset containing the object files. + + Raises: + AssertionError: If more than one material file is found in an OBJ file. + AssertionError: If a valid visual prim is not found for a mesh. + """ usd_dir = os.path.dirname(usd_path) # # mat_dir = f"{model_root_path}/material/{obj_category}" if \ # # obj_category in {"ceilings", "walls", "floors"} else f"{model_root_path}/material" @@ -541,6 +632,25 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat def _add_xform_properties(prim): + """ + Adds and configures transformation properties for a given USD prim. + + This function ensures that the specified USD prim has the necessary transformation + properties (scale, translate, and orient) and removes any unwanted transformation + properties. It also sets the order of the transformation operations. + + Args: + prim (pxr.Usd.Prim): The USD prim to which the transformation properties will be added. + + Notes: + - The function removes the following properties if they exist: + "xformOp:rotateX", "xformOp:rotateXZY", "xformOp:rotateY", "xformOp:rotateYXZ", + "xformOp:rotateYZX", "xformOp:rotateZ", "xformOp:rotateZYX", "xformOp:rotateZXY", + "xformOp:rotateXYZ", "xformOp:transform". + - If the prim does not have "xformOp:scale", "xformOp:translate", or "xformOp:orient", + these properties are added with default values. + - The order of the transformation operations is set to translate, orient, and scale. + """ properties_to_remove = [ "xformOp:rotateX", "xformOp:rotateXZY", @@ -592,7 +702,28 @@ def _add_xform_properties(prim): def _process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): """ - Process a meta link by creating visual meshes or lights below it + Process a meta link by creating visual meshes or lights below it. + + Args: + stage (pxr.Usd.Stage): The USD stage where the meta link will be processed. + obj_model (str): The object model name. + meta_link_type (str): The type of the meta link. Must be one of the allowed meta types. + meta_link_infos (dict): A dictionary containing meta link information. The keys are link IDs and the values are lists of mesh information dictionaries. + + Returns: + None + + Raises: + AssertionError: If the meta_link_type is not in the allowed meta types or if the mesh_info_list has unexpected keys or invalid number of meshes. + ValueError: If an invalid light type or mesh type is encountered. + + Notes: + - Temporarily disables importing of fillable meshes for "container" meta link type. + - Handles specific meta link types such as "togglebutton", "particleapplier", "particleremover", "particlesink", and "particlesource". + - For "particleapplier" meta link type, adjusts the orientation if the mesh type is "cone". + - Creates lights or primitive shapes based on the meta link type and mesh information. + - Sets various attributes for lights and meshes, including color, intensity, size, and scale. + - Makes meshes invisible and sets their local pose. """ # TODO: Reenable after fillable meshes are backported into 3ds Max. # Temporarily disable importing of fillable meshes. @@ -765,6 +896,19 @@ def _process_meta_link(stage, obj_model, meta_link_type, meta_link_infos): def _process_glass_link(prim): + """ + Processes the given USD prim to update any glass parts to use the glass material. + + This function traverses the children of the given prim to find any Mesh-type prims + that do not have a CollisionAPI, indicating they are visual elements. It collects + the paths of these prims and ensures they are bound to a glass material. + + Args: + prim (pxr.Usd.Prim): The USD prim to process. + + Raises: + AssertionError: If no glass prim paths are found. + """ # Update any glass parts to use the glass material instead glass_prim_paths = [] for gchild in prim.GetChildren(): @@ -804,6 +948,23 @@ def _process_glass_link(prim): def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_channels=False): + """ + Imports metadata for a given object model from the dataset. This metadata consist of information + that is NOT included in the URDF file and instead included in the various JSON files shipped in + iGibson and OmniGibson datasets. + + Args: + obj_category (str): The category of the object. + obj_model (str): The model name of the object. + dataset_root (str): The root directory of the dataset. + import_render_channels (bool, optional): Flag to import rendering channels. Defaults to False. + + Raises: + ValueError: If the bounding box size is not found in the metadata. + + Returns: + None + """ # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" usd_path = f"{model_root_path}/usd/{obj_model}.usd" @@ -915,6 +1076,29 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha def _recursively_replace_list_of_dict(dic): + """ + Recursively processes a dictionary to replace specific values and structures that can be stored + in USD. + + This function performs the following transformations: + - Replaces `None` values with `lazy.pxr.lazy.pxr.UsdGeom.Tokens.none`. + - Converts empty lists or tuples to `lazy.pxr.Vt.Vec3fArray()`. + - Converts lists of dictionaries to a dictionary with string keys. + - Converts nested lists or tuples to specific `lazy.pxr.Vt` array types based on their length: + - Length 2: `lazy.pxr.Vt.Vec2fArray` + - Length 3: `lazy.pxr.Vt.Vec3fArray` + - Length 4: `lazy.pxr.Vt.Vec4fArray` + - Converts lists of integers to `lazy.pxr.Vt.IntArray`. + - Converts lists of floats to `lazy.pxr.Vt.FloatArray`. + - Replaces `None` values within lists with `lazy.pxr.lazy.pxr.UsdGeom.Tokens.none`. + - Recursively processes nested dictionaries. + + Args: + dic (dict): The dictionary to process. + + Returns: + dict: The processed dictionary with the specified transformations applied. + """ for k, v in dic.items(): print(f"k: {k}") if v is None: @@ -945,31 +1129,9 @@ def _recursively_replace_list_of_dict(dic): else: raise ValueError(f"No support for storing matrices of length {len(v[0])}!") elif isinstance(v[0], int): - # if len(v) == 1: - # # Do nothing - # pass - # elif len(v) == 2: - # dic[k] = lazy.pxr.Gf.Vec2i(v) - # elif len(v) == 3: - # dic[k] = lazy.pxr.Gf.Vec3i(v) - # elif len(v) == 4: - # dic[k] = lazy.pxr.Gf.Vec4i(v) - # else: dic[k] = lazy.pxr.Vt.IntArray(v) - # raise ValueError(f"No support for storing numeric arrays of length {len(v)}! Array: {v}") elif isinstance(v[0], float): - # if len(v) == 1: - # # Do nothing - # pass - # elif len(v) == 2: - # dic[k] = lazy.pxr.Gf.Vec2f(v) - # elif len(v) == 3: - # dic[k] = lazy.pxr.Gf.Vec3f(v) - # elif len(v) == 4: - # dic[k] = lazy.pxr.Gf.Vec4f(v) - # else: dic[k] = lazy.pxr.Vt.FloatArray(v) - # raise ValueError(f"No support for storing numeric arrays of length {len(v)}! Array: {v}") else: # Replace any Nones for i, ele in enumerate(v): @@ -983,6 +1145,18 @@ def _recursively_replace_list_of_dict(dic): def _create_urdf_import_config(): + """ + Creates and configures a URDF import configuration. + + This function sets up the import configuration for URDF files by executing the + "URDFCreateImportConfig" command and adjusting various settings such as drive type, + joint merging, convex decomposition, base fixing, inertia tensor import, distance scale, + density, drive strength, position drive damping, self-collision, up vector, default prim + creation, and physics scene creation. + + Returns: + import_config: The configured URDF import configuration object. + """ # Set up import configuration _, import_config = lazy.omni.kit.commands.execute("URDFCreateImportConfig") drive_mode = ( @@ -1005,31 +1179,52 @@ def _create_urdf_import_config(): return import_config -def import_obj_urdf(obj_category, obj_model, dataset_root, skip_if_exist=False): +def import_obj_urdf(obj_category, obj_model, dataset_root): + """ + Imports an object from a URDF file into the current stage. + + Args: + obj_category (str): The category of the object. + obj_model (str): The model name of the object. + dataset_root (str): The root directory of the dataset. + + Returns: + None + """ # Preprocess input URDF to account for metalinks - urdf_path = _add_metalinks_to_urdf( - obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root - ) + urdf_path = _add_metalinks_to_urdf(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) # Import URDF cfg = _create_urdf_import_config() # Check if filepath exists usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd" - if not (skip_if_exist and exists(usd_path)): - if _SPLIT_COLLISION_MESHES: - print(f"Converting collision meshes from {obj_category}, {obj_model}...") - urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") - print(f"Importing {obj_category}, {obj_model} into path {usd_path}...") - # Only import if it doesn't exist - lazy.omni.kit.commands.execute( - "URDFParseAndImportFile", - urdf_path=urdf_path, - import_config=cfg, - dest_path=usd_path, - ) - print(f"Imported {obj_category}, {obj_model}") + if _SPLIT_COLLISION_MESHES: + print(f"Converting collision meshes from {obj_category}, {obj_model}...") + urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") + print(f"Importing {obj_category}, {obj_model} into path {usd_path}...") + # Only import if it doesn't exist + lazy.omni.kit.commands.execute( + "URDFParseAndImportFile", + urdf_path=urdf_path, + import_config=cfg, + dest_path=usd_path, + ) + print(f"Imported {obj_category}, {obj_model}") def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): + """ + Recursively formats an XML element tree to be pretty-printed with indentation. + + Args: + current (xml.etree.ElementTree.Element): The current XML element to format. + parent (xml.etree.ElementTree.Element, optional): The parent XML element. Defaults to None. + index (int, optional): The index of the current element in the parent's children. Defaults to -1. + depth (int, optional): The current depth in the XML tree, used for indentation. Defaults to 0. + use_tabs (bool, optional): If True, use tabs for indentation; otherwise, use spaces. Defaults to False. + + Returns: + None + """ space = "\t" if use_tabs else " " * 4 for i, node in enumerate(current): _pretty_print_xml(node, current, i, depth + 1) @@ -1044,12 +1239,17 @@ def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): def _convert_to_xml_string(inp): """ - Converts any type of {bool, int, float, list, tuple, array, string, th.Tensor} into an mujoco-xml compatible string. - Note that an input string / th.Tensor results in a no-op action. + Converts any type of {bool, int, float, list, tuple, array, string, th.Tensor} into a URDF-compatible string. + Note that an input string / th.Tensor results in a no-op action. + Args: inp: Input to convert to string + Returns: str: String equivalent of @inp + + Raises: + ValueError: If the input type is unsupported. """ if type(inp) in {list, tuple, th.Tensor}: return _tensor_to_space_script(inp) @@ -1074,7 +1274,7 @@ def _create_urdf_joint( limits=None, ): """ - Generates XML joint + Generates URDF joint Args: name (str): Name of this joint parent (str or ET.Element): Name of parent link or parent link element itself for this joint @@ -1124,7 +1324,7 @@ def _create_urdf_joint( def _create_urdf_link(name, subelements=None, mass=None, inertia=None): """ - Generates XML link element + Generates URDF link element Args: name (str): Name of this link subelements (None or list): If specified, specifies all nested elements that should belong to this link @@ -1162,6 +1362,19 @@ def _create_urdf_metalink( pos=(0, 0, 0), rpy=(0, 0, 0), ): + """ + Creates the appropriate URDF joint and link for a meta link and appends it to the root element. + + Args: + root_element (Element): The root XML element to which the metalink will be appended. + metalink_name (str): The name of the metalink to be created. + parent_link_name (str, optional): The name of the parent link. Defaults to "base_link". + pos (tuple, optional): The position of the joint in the form (x, y, z). Defaults to (0, 0, 0). + rpy (tuple, optional): The roll, pitch, and yaw of the joint in the form (r, p, y). Defaults to (0, 0, 0). + + Returns: + None + """ # Create joint jnt = _create_urdf_joint( name=f"{metalink_name}_joint", @@ -1213,6 +1426,20 @@ def _save_xmltree_as_urdf(root_element, name, dirpath, unique_urdf=False): def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): + """ + Adds meta links to a URDF file based on metadata. + + This function reads a URDF file and corresponding metadata, processes the metadata to add meta links, and then + saves an updated version of the URDF file with these meta links. + + Args: + obj_category (str): The category of the object. + obj_model (str): The model name of the object. + dataset_root (str): The root directory of the dataset. + + Returns: + str: The path to the updated URDF file. + """ # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" urdf_path = f"{model_root_path}/{obj_model}.urdf" @@ -1308,6 +1535,17 @@ def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): def convert_scene_urdf_to_json(urdf, json_path): + """ + Converts a scene from a URDF file to a JSON file. + + This function loads the scene described by the URDF file into the OmniGibson simulator, + plays the simulation, and saves the scene to a JSON file. After saving, it removes the + "init_info" from the JSON file and saves it again. + + Args: + urdf (str): The file path to the URDF file describing the scene. + json_path (str): The file path where the JSON file will be saved. + """ # First, load the requested objects from the URDF into OG _load_scene_from_urdf(urdf=urdf) @@ -1327,6 +1565,24 @@ def convert_scene_urdf_to_json(urdf, json_path): def _load_scene_from_urdf(urdf): + """ + Loads a scene from a URDF file. + + Args: + urdf (str): Path to the URDF file. + + Raises: + ValueError: If an object fails to load. + + This function performs the following steps: + 1. Extracts object configuration information from the URDF file. + 2. Creates a new scene without a floor plane and imports it into the simulator. + 3. Iterates over the objects' information and attempts to load each object into the scene. + - If the USD file for an object does not exist, it prints a message and skips the object. + - If an object fails to load, it raises a ValueError with the object's name. + 4. Sets the bounding box center position and orientation for each loaded object. + 5. Takes a simulation step to finalize the scene setup. + """ # First, grab object info from the urdf objs_info = _get_objects_config_from_scene_urdf(urdf=urdf) @@ -1357,6 +1613,15 @@ def _load_scene_from_urdf(urdf): def _get_objects_config_from_scene_urdf(urdf): + """ + Parses a URDF file to extract object configuration information. + + Args: + urdf (str): Path to the URDF file. + + Returns: + dict: A dictionary containing the configuration of objects extracted from the URDF file. + """ tree = ET.parse(urdf) root = tree.getroot() objects_cfg = dict() @@ -1365,6 +1630,27 @@ def _get_objects_config_from_scene_urdf(urdf): def _get_objects_config_from_element(element, model_pose_info): + """ + Extracts and populates object configuration information from an URDF element. + + This function processes an URDF element to extract joint and link information, + populating the provided `model_pose_info` dictionary with the relevant data. + + Args: + element (xml.etree.ElementTree.Element): The URDF element containing object configuration data. + model_pose_info (dict): A dictionary to be populated with the extracted configuration information. + + The function performs two passes through the URDF element: + 1. In the first pass, it extracts joint information and populates `model_pose_info` with joint pose data. + 2. In the second pass, it extracts link information, imports object models, and updates `model_pose_info` with + additional configuration details such as category, model, bounding box, rooms, scale, and object scope. + + The function also handles nested elements by recursively calling itself for child elements. + + Note: + - Joint names with hyphens are replaced with underscores. + - The function asserts that each link name (except "world") is present in `model_pose_info` after the first pass. + """ # First pass through, populate the joint pose info for ele in element: if ele.tag == "joint": @@ -1409,6 +1695,19 @@ def _get_objects_config_from_element(element, model_pose_info): def _get_joint_info(joint_element): + """ + Extracts joint information from an URDF element. + + Args: + joint_element (xml.etree.ElementTree.Element): The URDF element containing joint information. + + Returns: + tuple: A tuple containing: + - child (str or None): The name of the child link, or None if not specified. + - pos (numpy.ndarray or None): The position as a tensor, or None if not specified. + - quat (numpy.ndarray or None): The orientation as a quaternion, or None if not specified. + - fixed_jnt (bool): True if the joint is fixed, False otherwise. + """ child, pos, quat, fixed_jnt = None, None, None, None fixed_jnt = joint_element.get("type") == "fixed" for ele in joint_element: From 80e8ce79ddcd87b918800baf33af6abba6051fed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:45:45 -0700 Subject: [PATCH 05/61] Start work on obj import --- omnigibson/macros.py | 1 + omnigibson/utils/asset_conversion_utils.py | 65 ++++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/omnigibson/macros.py b/omnigibson/macros.py index b9f730808..07635049f 100644 --- a/omnigibson/macros.py +++ b/omnigibson/macros.py @@ -60,6 +60,7 @@ def determine_gm_path(default_path, env_var_name): # can override assets_path and dataset_path from environment variable gm.ASSET_PATH = determine_gm_path(os.path.join("data", "assets"), "OMNIGIBSON_ASSET_PATH") gm.DATASET_PATH = determine_gm_path(os.path.join("data", "og_dataset"), "OMNIGIBSON_DATASET_PATH") +gm.USER_ASSETS_PATH = determine_gm_path(os.path.join("data", "user_assets"), "OMNIGIBSON_USER_ASSETS_PATH") gm.KEY_PATH = determine_gm_path(os.path.join("data", "omnigibson.key"), "OMNIGIBSON_KEY_PATH") # Which GPU to use -- None will result in omni automatically using an appropriate GPU. Otherwise, set with either diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index a4468ed21..adc2b48d0 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1,6 +1,7 @@ import json import math import os +import pathlib import shutil import xml.etree.ElementTree as ET from collections import OrderedDict @@ -13,6 +14,7 @@ import omnigibson as og import omnigibson.lazy as lazy import trimesh +from omnigibson.macros import gm from omnigibson.objects import DatasetObject from omnigibson.scenes import Scene from omnigibson.utils.usd_utils import create_primitive_mesh @@ -1717,3 +1719,66 @@ def _get_joint_info(joint_element): elif ele.tag == "child": child = ele.get("link") return child, pos, quat, fixed_jnt + + +def _generate_collision_meshes(trimesh_mesh, hull_count=32, discard_not_volume=True): + """ + Generates a set of collision meshes from a trimesh mesh using CoACD. + + Args: + trimesh_mesh (trimesh.Trimesh): The trimesh mesh to generate the collision mesh from. + + Returns: + List[trimesh.Trimesh]: The collision meshes. + """ + try: + import coacd + except ImportError: + raise ImportError("Please install the `coacd` package to use this function.") + + # Get the vertices and faces + coacd_mesh = coacd.Mesh(trimesh_mesh.vertices, trimesh_mesh.faces) + + # Run CoACD with the hull count + result = coacd.run_coacd( + coacd_mesh, + max_convex_hull=hull_count, + ) + + # Convert the returned vertices and faces to trimesh meshes + # and assert that they are volumes (and if not, discard them if required) + hulls = [] + for vs, fs in result: + hull = trimesh.Trimesh(vertices=vs, faces=fs, process=False) + if discard_not_volume and not hull.is_volume: + continue + hulls.append(hull) + + # Assert that we got _some_ collision meshes + assert len(hulls) > 0, "No collision meshes generated!" + + return hulls + + +def generate_urdf_for_obj(obj_file_path, category, mdl, collision_file_path=None, collision_generation_option="coacd"): + # Load the mesh + mesh: trimesh.Trimesh = trimesh.load(obj_file_path, force="mesh", process=False) + + # Either load, or generate, the collision mesh + if collision_file_path is not None: + collision_mesh = trimesh.load(collision_file_path, force="mesh", process=False) + else: + if collision_generation_option == "coacd": + collision_meshes = _generate_collision_meshes(mesh) + collision_mesh = trimesh.util.concatenate(collision_meshes) + elif collision_generation_option == "convex": + collision_mesh = mesh.convex_hull + else: + raise ValueError(f"Unsupported collision generation option: {collision_generation_option}") + + # Create a directory for the object + obj_dir = pathlib.Path(gm.USER_ASSETS_PATH) / "objects" / category / mdl + assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" + obj_dir.mkdir(parents=True) + + # Start the XML file From cf0db52a3aeff0d3a4c0b5b3a164bc3b38d31a96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:54:37 -0700 Subject: [PATCH 06/61] Some more work, including replacing print with log --- omnigibson/utils/asset_conversion_utils.py | 256 ++++++++++++++++----- 1 file changed, 200 insertions(+), 56 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index adc2b48d0..ab2a51eb3 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1,8 +1,11 @@ +import io import json import math import os import pathlib import shutil +import tempfile +from xml.dom import minidom import xml.etree.ElementTree as ET from collections import OrderedDict from copy import deepcopy @@ -224,7 +227,7 @@ def _set_mtl_albedo(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_normal(mtl_prim, texture): @@ -232,7 +235,7 @@ def _set_mtl_normal(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_ao(mtl_prim, texture): @@ -240,7 +243,7 @@ def _set_mtl_ao(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, mtl, texture, lazy.pxr.Sdf.ValueTypeNames.Asset) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_roughness(mtl_prim, texture): @@ -254,7 +257,7 @@ def _set_mtl_roughness(mtl_prim, texture): ) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_metalness(mtl_prim, texture): @@ -263,7 +266,7 @@ def _set_mtl_metalness(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, "metallic_texture_influence", 1.0, lazy.pxr.Sdf.ValueTypeNames.Float) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_opacity(mtl_prim, texture): @@ -273,7 +276,7 @@ def _set_mtl_opacity(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, "enable_opacity_texture", True, lazy.pxr.Sdf.ValueTypeNames.Bool) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _set_mtl_emission(mtl_prim, texture): @@ -282,7 +285,7 @@ def _set_mtl_emission(mtl_prim, texture): lazy.omni.usd.create_material_input(mtl_prim, "enable_emission", True, lazy.pxr.Sdf.ValueTypeNames.Bool) # Verify it was set shade = lazy.omni.usd.get_shader_from_material(mtl_prim) - print(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") + log.debug(f"mtl {mtl}: {shade.GetInput(mtl).Get()}") def _rename_prim(prim, name): @@ -327,7 +330,7 @@ def _get_visual_objs_from_urdf(urdf_path): visual_mesh_name = sub_ele.get("name", "visuals").replace("-", "_") obj_file = None if sub_ele.find(".//mesh") is None else sub_ele.find(".//mesh").get("filename") if obj_file is None: - print(f"Warning: No obj file found associated with {name}/{visual_mesh_name}!") + log.debug(f"Warning: No obj file found associated with {name}/{visual_mesh_name}!") visual_objs[name][visual_mesh_name] = obj_file return visual_objs @@ -405,7 +408,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat for subprim in looks_prim.GetChildren(): if subprim.GetPrimTypeInfo().GetTypeName() != "Material": continue - print( + log.debug( f"Removed material prim {subprim.GetPath()}:", stage.RemovePrim(subprim.GetPath()), ) @@ -420,7 +423,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # ) # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) # default_mat = rename_prim(prim=default_mat, name=f"default_material") - # print("Created default material:", default_mat.GetPath()) + # log.debug("Created default material:", default_mat.GetPath()) # # # We may delete this default material if it's never used # default_mat_is_used = False @@ -506,13 +509,13 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat render_channel_fcn(mat, os.path.join("materials", mat_file)) else: # Warn user that we didn't find the correct rendering channel - print(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") + log.debug(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") # Rename material mat = _rename_prim(prim=mat, name=mtl_name) shade = lazy.pxr.UsdShade.Material(mat) shaders[mtl_name] = shade - print(f"Created material {mtl_name}:", mtl_created_list[0]) + log.debug(f"Created material {mtl_name}:", mtl_created_list[0]) # Bind each (visual) mesh to its appropriate material in the object # We'll loop over each link, create a list of 2-tuples each consisting of (mesh_prim_path, mtl_name) to be bound @@ -539,7 +542,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat visual_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mesh_prim_path) assert visual_prim, f"Error: Did not find valid visual prim at {mesh_prim_path}!" # Bind the created link material to the visual prim - print(f"Binding material {mtl_name}, shader {shaders[mtl_name]}, to prim {mesh_prim_path}...") + log.debug(f"Binding material {mtl_name}, shader {shaders[mtl_name]}, to prim {mesh_prim_path}...") lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind( shaders[mtl_name], lazy.pxr.UsdShade.Tokens.strongerThanDescendants ) @@ -557,8 +560,8 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # # This could be a link, check if it owns a visual subprim # link_name = prim.GetName() # visual_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{prim.GetPrimPath().pathString}/visuals") - # print(f"path: {prim.GetPrimPath().pathString}/visuals") - # print(f"visual prim: {visual_prim}") + # log.debug(f"path: {prim.GetPrimPath().pathString}/visuals") + # log.debug(f"visual prim: {visual_prim}") # # if visual_prim: # # Aggregate all material files for this prim @@ -569,7 +572,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # link_mat_files.append(mat_file) # mat_files.remove(mat_file) # # Potentially write material files for this prim if we have any valid materials - # print("link_mat_files:", link_mat_files) + # log.debug("link_mat_files:", link_mat_files) # if not link_mat_files: # # Bind default material to the visual prim # shade = lazy.pxr.UsdShade.Material(default_mat) @@ -584,7 +587,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # mtl_name="OmniPBR", # mtl_created_list=mtl_created_list, # ) - # print(f"Created material for link {link_name}:", mtl_created_list[0]) + # log.debug(f"Created material for link {link_name}:", mtl_created_list[0]) # mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) # # shade = lazy.pxr.UsdShade.Material(mat) @@ -604,7 +607,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # render_channel_fcn(mat, os.path.join("materials", link_mat_file)) # else: # # Warn user that we didn't find the correct rendering channel - # print(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") + # log.warning(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") # # # Rename material # mat = rename_prim(prim=mat, name=f"material_{link_name}") @@ -612,7 +615,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # # For any remaining materials, we write them to the default material # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_material_0") # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_default") - # print(f"default mat: {default_mat}, obj: {obj_category}, {prim.GetPrimPath().pathString}") + # log.debug(f"default mat: {default_mat}, obj: {obj_category}, {prim.GetPrimPath().pathString}") # for mat_file in mat_files: # # Copy this file into the materials folder # mat_fpath = os.path.join(usd_dir, "materials") @@ -626,7 +629,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # default_mat_is_used = True # else: # # Warn user that we didn't find the correct rendering channel - # print(f"Warning: could not find rendering channel function for material: {mat_type}") + # log.warning(f"Could not find rendering channel function for material: {mat_type}") # # # Possibly delete the default material prim if it was never used # if not default_mat_is_used: @@ -970,9 +973,7 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" usd_path = f"{model_root_path}/usd/{obj_model}.usd" - print("Loading", usd_path, "for metadata import.") - - print("Start metadata import") + log.debug("Loading", usd_path, "for metadata import.") # Load model lazy.omni.isaac.core.utils.stage.open_stage(usd_path) @@ -1013,7 +1014,7 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha # Grab light info if any meta_links = data["metadata"].get("meta_links", dict()) - print("Process meta links") + log.debug("Process meta links") # TODO: Use parent link name for link_name, link_metadata in meta_links.items(): @@ -1027,12 +1028,12 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha mesh = trimesh.load(fillable_path, force="mesh") import_fillable_mesh(stage, obj_model, mesh) - print("Done processing meta links") + log.debug("Done processing meta links") # Iterate over dict and replace any lists of dicts as dicts of dicts (with each dict being indexed by an integer) data = _recursively_replace_list_of_dict(data) - print("Done recursively replacing") + log.debug("Done recursively replacing") # Create attributes for bb, offset, category, model and store values prim.CreateAttribute("ig:nativeBB", lazy.pxr.Sdf.ValueTypeNames.Vector3f) @@ -1044,19 +1045,19 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha prim.GetAttribute("ig:category").Set(obj_category) prim.GetAttribute("ig:model").Set(obj_model) - print(f"data: {data}") + log.debug(f"data: {data}") # Store remaining data as metadata prim.SetCustomData(data) # Add material channels - # print(f"prim children: {prim.GetChildren()}") + # log.debug(f"prim children: {prim.GetChildren()}") # looks_prim_path = f"{str(prim.GetPrimPath())}/Looks" # looks_prim = prim.GetChildren()[0] #lazy.omni.isaac.core.utils.prims.get_prim_at_path(looks_prim_path) # mat_prim_path = f"{str(prim.GetPrimPath())}/Looks/material_material_0" # mat_prim = looks_prim.GetChildren()[0] #lazy.omni.isaac.core.utils.prims.get_prim_at_path(mat_prim_path) - # print(f"looks children: {looks_prim.GetChildren()}") - # print(f"mat prim: {mat_prim}") + # log.debug(f"looks children: {looks_prim.GetChildren()}") + # log.debug(f"mat prim: {mat_prim}") if import_render_channels: _import_rendering_channels( obj_prim=prim, @@ -1102,7 +1103,6 @@ def _recursively_replace_list_of_dict(dic): dict: The processed dictionary with the specified transformations applied. """ for k, v in dic.items(): - print(f"k: {k}") if v is None: # Replace None dic[k] = lazy.pxr.lazy.pxr.UsdGeom.Tokens.none @@ -1118,7 +1118,6 @@ def _recursively_replace_list_of_dict(dic): # dic[k] = [] # for vv in v: # dic[k] += vv - print("v0: ", v[0]) if len(v[0]) == 1: # Do nothing pass @@ -1200,9 +1199,9 @@ def import_obj_urdf(obj_category, obj_model, dataset_root): # Check if filepath exists usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd" if _SPLIT_COLLISION_MESHES: - print(f"Converting collision meshes from {obj_category}, {obj_model}...") + log.debug(f"Converting collision meshes from {obj_category}, {obj_model}...") urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") - print(f"Importing {obj_category}, {obj_model} into path {usd_path}...") + log.debug(f"Importing {obj_category}, {obj_model} into path {usd_path}...") # Only import if it doesn't exist lazy.omni.kit.commands.execute( "URDFParseAndImportFile", @@ -1210,7 +1209,7 @@ def import_obj_urdf(obj_category, obj_model, dataset_root): import_config=cfg, dest_path=usd_path, ) - print(f"Imported {obj_category}, {obj_model}") + log.debug(f"Imported {obj_category}, {obj_model}") def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): @@ -1473,7 +1472,7 @@ def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): json.dump(metadata, f) meta_links = metadata.pop("meta_links") - print("meta_links:", meta_links) + log.debug("meta_links:", meta_links) for parent_link_name, child_link_attrs in meta_links.items(): for meta_link_name, ml_attrs in child_link_attrs.items(): assert ( @@ -1599,7 +1598,7 @@ def _load_scene_from_urdf(urdf): ".usd", ".encrypted.usd" ) ): - print("Missing object", obj_name) + log.warning("Missing object", obj_name) continue obj = DatasetObject( name=obj_name, @@ -1675,7 +1674,7 @@ def _get_objects_config_from_element(element, model_pose_info): # Skip this pass else: - print(name) + log.debug(name) assert name in model_pose_info, f"Did not find {name} in current model pose info!" model_pose_info[name]["cfg"]["category"] = ele.get("category") model_pose_info[name]["cfg"]["model"] = ele.get("model") @@ -1721,7 +1720,7 @@ def _get_joint_info(joint_element): return child, pos, quat, fixed_jnt -def _generate_collision_meshes(trimesh_mesh, hull_count=32, discard_not_volume=True): +def generate_collision_meshes(trimesh_mesh, hull_count=32, discard_not_volume=True): """ Generates a set of collision meshes from a trimesh mesh using CoACD. @@ -1760,25 +1759,170 @@ def _generate_collision_meshes(trimesh_mesh, hull_count=32, discard_not_volume=T return hulls -def generate_urdf_for_obj(obj_file_path, category, mdl, collision_file_path=None, collision_generation_option="coacd"): - # Load the mesh - mesh: trimesh.Trimesh = trimesh.load(obj_file_path, force="mesh", process=False) - - # Either load, or generate, the collision mesh - if collision_file_path is not None: - collision_mesh = trimesh.load(collision_file_path, force="mesh", process=False) - else: - if collision_generation_option == "coacd": - collision_meshes = _generate_collision_meshes(mesh) - collision_mesh = trimesh.util.concatenate(collision_meshes) - elif collision_generation_option == "convex": - collision_mesh = mesh.convex_hull - else: - raise ValueError(f"Unsupported collision generation option: {collision_generation_option}") - +def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): # Create a directory for the object obj_dir = pathlib.Path(gm.USER_ASSETS_PATH) / "objects" / category / mdl assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" obj_dir.mkdir(parents=True) - # Start the XML file + obj_name = "-".join([category, mdl]) + + # Prepare the URDF tree + tree_root = ET.Element("robot") + tree_root.attrib = {"name": mdl} + + # Canonicalize the object by putting the origin at the visual mesh center + mesh_center = visual_mesh.centroid + if visual_mesh.is_watertight: + mesh_center = visual_mesh.center_mass + transform = th.eye(4) + transform[:3, 3] = th.as_tensor(mesh_center) + inv_transform = th.linalg.inv(transform) + visual_mesh.apply_transform(inv_transform.numpy()) + + # Somehow we need to manually write the vertex normals to cache + visual_mesh._cache.cache["vertex_normals"] = visual_mesh.vertex_normals + + # Save the mesh + with tempfile.TemporaryDirectory() as temp_dir: + temp_dir_path = pathlib.Path(temp_dir) + obj_relative_path = f"{obj_name}-base_link.obj" + obj_temp_path = temp_dir_path / obj_relative_path + visual_mesh.export(obj_temp_path, file_type="obj") + + # Move the mesh to the correct path + obj_link_mesh_folder = obj_dir / "shape" + obj_link_mesh_folder.mkdir(exist_ok=True) + obj_link_visual_mesh_folder = obj_link_mesh_folder / "visual" + obj_link_visual_mesh_folder.mkdir(exist_ok=True) + obj_link_collision_mesh_folder = obj_link_mesh_folder / "collision" + obj_link_collision_mesh_folder.mkdir(exist_ok=True) + obj_link_material_folder = obj_dir / "material" + obj_link_material_folder.mkdir(exist_ok=True) + + # Check if a material got exported. + material_files = [x for x in temp_dir_path.iterdir() if x.suffix == ".mtl"] + if material_files: + assert ( + len(material_files) == 1 + ), f"Something's wrong: there's more than 1 material file in {list(temp_dir_path.iterdir())}" + original_material_filename = material_files[0] + + # Fix texture file paths if necessary. + # original_material_dir = G.nodes[link_node]["material_dir"] + # if original_material_dir: + # for src_texture_file in original_material_dir.iterdir(): + # fname = src_texture_file + # # fname is in the same format as room_light-0-0_VRayAOMap.png + # vray_name = fname[fname.index("VRay") : -4] if "VRay" in fname else None + # if vray_name in VRAY_MAPPING: + # dst_fname = VRAY_MAPPING[vray_name] + # else: + # raise ValueError(f"Unknown texture map: {fname}") + + # dst_texture_file = f"{obj_name}-base_link-{dst_fname}.png" + + # # Load the image + # shutil.copy2(original_material_dir / src_texture_file, obj_link_material_folder / dst_texture_file) + + # Modify MTL reference in OBJ file + mtl_name = f"{obj_name}-base_link.mtl" + with open(obj_temp_path, "r") as f: + new_lines = [] + for line in f.readlines(): + if f"mtllib {original_material_filename}" in line: + line = f"mtllib {mtl_name}\n" + new_lines.append(line) + + with open(obj_temp_path, "w") as f: + for line in new_lines: + f.write(line) + + # # Modify texture reference in MTL file + # with open(temp_dir_path / original_material_filename, "r") as f: + # new_lines = [] + # for line in f.readlines(): + # if "map_Kd material_0.png" in line: + # line = "" + # for key in MTL_MAPPING: + # line += f"{key} ../../material/{obj_name}-{link_name}-{MTL_MAPPING[key]}.png\n" + # new_lines.append(line) + + with open(obj_link_visual_mesh_folder / mtl_name, "w") as f: + for line in new_lines: + f.write(line) + + # Copy the OBJ into the right spot + obj_final_path = obj_link_visual_mesh_folder / obj_relative_path + shutil.copy2(obj_temp_path, obj_final_path) + + # Save and merge precomputed collision mesh + collision_filenames_and_scales = [] + for i, collision_mesh in enumerate(collision_meshes): + processed_collision_mesh = collision_mesh.copy() + processed_collision_mesh.apply_transform(inv_transform) + processed_collision_mesh._cache.cache["vertex_normals"] = processed_collision_mesh.vertex_normals + collision_filename = obj_relative_path.replace(".obj", f"-{i}.obj") + + # OmniGibson requires unit-bbox collision meshes, so here we do that scaling + scaled_collision_mesh = processed_collision_mesh.copy() + bounding_box = scaled_collision_mesh.bounding_box.extents + assert all(x > 0 for x in bounding_box), f"Bounding box extents are not all positive: {bounding_box}" + collision_scale = 1 / bounding_box + collision_scale_matrix = th.eye(4) + collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale)) + scaled_collision_mesh.apply_transform(collision_scale_matrix.numpy()) + processed_collision_mesh.export(obj_link_collision_mesh_folder / collision_filename, file_type="obj") + collision_filenames_and_scales.append((collision_filename, collision_scale)) + + # Create the link in URDF + link_xml = ET.SubElement(tree_root, "link") + link_xml.attrib = {"name": "base_link"} + visual_xml = ET.SubElement(link_xml, "visual") + visual_origin_xml = ET.SubElement(visual_xml, "origin") + visual_origin_xml.attrib = {"xyz": " ".join([str(item) for item in [0.0] * 3])} + visual_geometry_xml = ET.SubElement(visual_xml, "geometry") + visual_mesh_xml = ET.SubElement(visual_geometry_xml, "mesh") + visual_mesh_xml.attrib = { + "filename": os.path.join("shape", "visual", obj_relative_path).replace("\\", "/"), + "scale": "1 1 1", + } + + collision_origin_xmls = [] + for collision_filename, collision_scale in collision_filenames_and_scales: + collision_xml = ET.SubElement(link_xml, "collision") + collision_xml.attrib = {"name": collision_filename.replace(".obj", "")} + collision_origin_xml = ET.SubElement(collision_xml, "origin") + collision_origin_xml.attrib = {"xyz": " ".join([str(item) for item in [0.0] * 3])} + collision_geometry_xml = ET.SubElement(collision_xml, "geometry") + collision_mesh_xml = ET.SubElement(collision_geometry_xml, "mesh") + collision_mesh_xml.attrib = { + "filename": os.path.join("shape", "collision", collision_filename).replace("\\", "/"), + "scale": " ".join([str(item) for item in collision_scale]), + } + collision_origin_xmls.append(collision_origin_xml) + + # Save the URDF file. + xmlstr = minidom.parseString(ET.tostring(tree_root)).toprettyxml(indent=" ") + xmlio = io.StringIO(xmlstr) + tree = ET.parse(xmlio) + + with open(obj_dir / f"{mdl}.urdf", "wb") as f: + tree.write(f, xml_declaration=True) + + base_link_offset = visual_mesh.bounding_box.centroid + bbox_size = visual_mesh.bounding_box.extents + + # Save metadata json + out_metadata = { + "meta_links": {}, + "link_tags": {}, + "object_parts": [], + "base_link_offset": base_link_offset.tolist(), + "bbox_size": bbox_size.tolist(), + "orientations": [], + } + misc_dir = obj_dir / "misc" + misc_dir.mkdir() + with open(misc_dir / "metadata.json", "w") as f: + json.dump(out_metadata, f) From cd02f5c29ae61e27ea39836e9a0fb3f31f301813 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:54:42 -0700 Subject: [PATCH 07/61] New entrypoint --- omnigibson/import_asset.py | 92 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 omnigibson/import_asset.py diff --git a/omnigibson/import_asset.py b/omnigibson/import_asset.py new file mode 100644 index 000000000..fc55ef4f4 --- /dev/null +++ b/omnigibson/import_asset.py @@ -0,0 +1,92 @@ +""" +Helper script to download OmniGibson dataset and assets. +""" + +import pathlib +from typing import List, Literal, Optional, Union + +import click +import trimesh + +import omnigibson as og +from omnigibson.macros import gm +from omnigibson.utils.asset_conversion_utils import ( + generate_collision_meshes, + generate_urdf_for_obj, + import_obj_metadata, + import_obj_urdf, +) + + +@click.command() +@click.argument("visual_mesh_path", type=click.Path(exists=True, dir_okay=False)) +@click.argument("category", type=click.STRING) +@click.argument("model", type=click.STRING) +@click.option( + "--collision-mesh-path", + type=click.Path(exists=True, dir_okay=False), + default=None, + help="Path to the collision mesh file. If not provided, a collision mesh will be generated using the provided collision generation method.", +) +@click.option( + "--collision-method", + type=click.Choice(["coacd", "convex"]), + default="coacd", + help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull.", +) +def import_asset( + visual_mesh_path: str, + category: str, + model: str, + collision_mesh_path: Optional[Union[str, pathlib.Path, List[Union[str, pathlib.Path]]]] = None, + collision_method: Literal["coacd", "convex"] = "coacd", +): + assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." + + # Load the mesh + visual_mesh: trimesh.Trimesh = trimesh.load(visual_mesh_path, force="mesh", process=False) + + # Either load, or generate, the collision mesh + if collision_mesh_path is not None: + # If it's a single path, we import it and try to split + if isinstance(collision_mesh_path, (str, pathlib.Path)): + collision_meshes = trimesh.load(collision_mesh_path, force="mesh", process=False).split() + else: + # Otherwise, we assume it's a list of paths and we load each one + collision_meshes = [ + trimesh.load(collision_file, force="mesh", process=False) for collision_file in collision_mesh_path + ] + else: + if collision_method == "coacd": + collision_meshes = generate_collision_meshes(visual_mesh) + elif collision_method == "convex": + collision_meshes = [visual_mesh.convex_hull] + else: + raise ValueError(f"Unsupported collision generation option: {collision_method}") + + # Generate the URDF + click.echo(f"Generating URDF for {category}/{model}...") + og.launch() + generate_urdf_for_obj(visual_mesh, collision_meshes, category, model) + click.echo("URDF generation complete!") + + # Convert to USD + click.echo("Converting to USD...") + import_obj_urdf( + obj_category=category, + obj_model=model, + dataset_root=gm.USER_ASSETS_PATH, + ) + import_obj_metadata( + obj_category=category, + obj_model=model, + dataset_root=gm.USER_ASSETS_PATH, + import_render_channels=True, + ) + click.echo("Conversion complete!") + + click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") + + +if __name__ == "__main__": + import_asset() From d992afb2a82e89ffe3cd6957d48631d4bc8af71a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:55:34 -0700 Subject: [PATCH 08/61] Add logger --- omnigibson/utils/asset_conversion_utils.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index ab2a51eb3..dc9763e64 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -13,6 +13,7 @@ from os.path import exists from pathlib import Path +from omnigibson.utils.ui_utils import create_module_logger import torch as th import omnigibson as og import omnigibson.lazy as lazy @@ -23,6 +24,9 @@ from omnigibson.utils.usd_utils import create_primitive_mesh import omnigibson.utils.transform_utils as T +# Create module logger +log = create_module_logger(module_name=__name__) + _LIGHT_MAPPING = { 0: "Rect", 2: "Sphere", @@ -1021,13 +1025,6 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha for meta_link_type, meta_link_infos in link_metadata.items(): _process_meta_link(stage, obj_model, meta_link_type, meta_link_infos) - # Apply temporary fillable meshes. - # TODo: Disable after fillable meshes are backported into 3ds Max. - fillable_path = f"{model_root_path}/fillable.obj" - if exists(fillable_path): - mesh = trimesh.load(fillable_path, force="mesh") - import_fillable_mesh(stage, obj_model, mesh) - log.debug("Done processing meta links") # Iterate over dict and replace any lists of dicts as dicts of dicts (with each dict being indexed by an integer) From 4fce1433785a80f88b0b32997be551ce5e188590 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:55:49 -0700 Subject: [PATCH 09/61] Add logger --- omnigibson/utils/asset_conversion_utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index dc9763e64..78cf1880d 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -5,24 +5,25 @@ import pathlib import shutil import tempfile -from xml.dom import minidom import xml.etree.ElementTree as ET from collections import OrderedDict from copy import deepcopy from datetime import datetime from os.path import exists from pathlib import Path +from xml.dom import minidom -from omnigibson.utils.ui_utils import create_module_logger import torch as th +import trimesh + import omnigibson as og import omnigibson.lazy as lazy -import trimesh +import omnigibson.utils.transform_utils as T from omnigibson.macros import gm from omnigibson.objects import DatasetObject from omnigibson.scenes import Scene +from omnigibson.utils.ui_utils import create_module_logger from omnigibson.utils.usd_utils import create_primitive_mesh -import omnigibson.utils.transform_utils as T # Create module logger log = create_module_logger(module_name=__name__) From 2a4d2189929a502fe69383c4c574d670554ca910 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Mon, 23 Sep 2024 16:56:59 -0700 Subject: [PATCH 10/61] Some more fixes --- omnigibson/import_asset.py | 17 ++++++++++++++++- omnigibson/utils/asset_conversion_utils.py | 9 ++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/omnigibson/import_asset.py b/omnigibson/import_asset.py index fc55ef4f4..9c317b8c7 100644 --- a/omnigibson/import_asset.py +++ b/omnigibson/import_asset.py @@ -2,6 +2,7 @@ Helper script to download OmniGibson dataset and assets. """ +import math import pathlib from typing import List, Literal, Optional, Union @@ -34,12 +35,16 @@ default="coacd", help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull.", ) +@click.option("--up-axis", type=click.Choice(["z", "y"]), default="z", help="Up axis for the mesh.") +@click.option("--headless", is_flag=True, help="Run the script in headless mode.") def import_asset( visual_mesh_path: str, category: str, model: str, collision_mesh_path: Optional[Union[str, pathlib.Path, List[Union[str, pathlib.Path]]]] = None, collision_method: Literal["coacd", "convex"] = "coacd", + up_axis: Literal["z", "y"] = "z", + headless: bool = False, ): assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." @@ -64,6 +69,13 @@ def import_asset( else: raise ValueError(f"Unsupported collision generation option: {collision_method}") + # If the up axis is y, we need to rotate the meshes + if up_axis == "y": + rotation_matrix = trimesh.transformations.rotation_matrix(math.pi / 2, [1, 0, 0]) + visual_mesh.apply_transform(rotation_matrix) + for collision_mesh in collision_meshes: + collision_mesh.apply_transform(rotation_matrix) + # Generate the URDF click.echo(f"Generating URDF for {category}/{model}...") og.launch() @@ -85,7 +97,10 @@ def import_asset( ) click.echo("Conversion complete!") - click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") + if not headless: + click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") + while True: + og.sim.render() if __name__ == "__main__": diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 78cf1880d..5ca872f4e 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1863,15 +1863,14 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): collision_filename = obj_relative_path.replace(".obj", f"-{i}.obj") # OmniGibson requires unit-bbox collision meshes, so here we do that scaling - scaled_collision_mesh = processed_collision_mesh.copy() - bounding_box = scaled_collision_mesh.bounding_box.extents + bounding_box = processed_collision_mesh.bounding_box.extents assert all(x > 0 for x in bounding_box), f"Bounding box extents are not all positive: {bounding_box}" - collision_scale = 1 / bounding_box + collision_scale = 1. / bounding_box collision_scale_matrix = th.eye(4) collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale)) - scaled_collision_mesh.apply_transform(collision_scale_matrix.numpy()) + processed_collision_mesh.apply_transform(collision_scale_matrix.numpy()) processed_collision_mesh.export(obj_link_collision_mesh_folder / collision_filename, file_type="obj") - collision_filenames_and_scales.append((collision_filename, collision_scale)) + collision_filenames_and_scales.append((collision_filename, 1 / collision_scale)) # Create the link in URDF link_xml = ET.SubElement(tree_root, "link") From 5b999f4bb9e61d75a35c528acf37135160cab9bc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 03:38:50 +0000 Subject: [PATCH 11/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/utils/asset_conversion_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 5ca872f4e..5fb5eedc0 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1865,7 +1865,7 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): # OmniGibson requires unit-bbox collision meshes, so here we do that scaling bounding_box = processed_collision_mesh.bounding_box.extents assert all(x > 0 for x in bounding_box), f"Bounding box extents are not all positive: {bounding_box}" - collision_scale = 1. / bounding_box + collision_scale = 1.0 / bounding_box collision_scale_matrix = th.eye(4) collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale)) processed_collision_mesh.apply_transform(collision_scale_matrix.numpy()) From 5b1e0a1504d0e06461787ed194b1eab697fdccaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Tue, 24 Sep 2024 12:54:47 -0700 Subject: [PATCH 12/61] Add support for user-added datasetobject --- omnigibson/objects/dataset_object.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/omnigibson/objects/dataset_object.py b/omnigibson/objects/dataset_object.py index 5237995d0..bee8111d5 100644 --- a/omnigibson/objects/dataset_object.py +++ b/omnigibson/objects/dataset_object.py @@ -49,6 +49,7 @@ def __init__( include_default_states=True, bounding_box=None, in_rooms=None, + user_added=False, **kwargs, ): """ @@ -84,6 +85,8 @@ def __init__( -- not both! in_rooms (None or str or list): If specified, sets the room(s) that this object should belong to. Either a list of room type(s) or a single room type + user_added (bool): Whether this object was added by the user or not. If True, then this object will be found + in the user_assets dir instead of og_dataset, and will not be encrypted. kwargs (dict): Additional keyword arguments that are used for other super() calls from subclasses, allowing for flexible compositions of various object subclasses (e.g.: Robot is USDObject + ControllableObject). """ @@ -99,6 +102,7 @@ def __init__( # Add info to load config load_config = dict() if load_config is None else load_config load_config["bounding_box"] = bounding_box + load_config["user_added"] = user_added # All DatasetObjects should have xform properties pre-loaded load_config["xform_props_pre_loaded"] = True @@ -119,13 +123,13 @@ def __init__( ) self._model = model - usd_path = self.get_usd_path(category=category, model=model) + usd_path = self.get_usd_path(category=category, model=model, user_added=user_added) # Run super init super().__init__( relative_prim_path=relative_prim_path, usd_path=usd_path, - encrypted=True, + encrypted=not user_added, name=name, category=category, scale=scale, @@ -142,7 +146,7 @@ def __init__( ) @classmethod - def get_usd_path(cls, category, model): + def get_usd_path(cls, category, model, user_added=False): """ Grabs the USD path for a DatasetObject corresponding to @category and @model. @@ -155,7 +159,8 @@ def get_usd_path(cls, category, model): Returns: str: Absolute filepath to the corresponding USD asset file """ - return os.path.join(gm.DATASET_PATH, "objects", category, model, "usd", f"{model}.usd") + dataset_path = gm.USER_ASSETS_PATH if user_added else gm.DATASET_PATH + return os.path.join(dataset_path, "objects", category, model, "usd", f"{model}.usd") def sample_orientation(self): """ From b6b50c32b5f17b75eae8f46388aee1f3e1d8f58c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 25 Sep 2024 00:19:04 -0700 Subject: [PATCH 13/61] Update asset_conversion_utils.py --- omnigibson/utils/asset_conversion_utils.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 5fb5eedc0..dcee2f176 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1804,7 +1804,7 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): assert ( len(material_files) == 1 ), f"Something's wrong: there's more than 1 material file in {list(temp_dir_path.iterdir())}" - original_material_filename = material_files[0] + original_material_filename = material_files[0].name # Fix texture file paths if necessary. # original_material_dir = G.nodes[link_node]["material_dir"] @@ -1836,15 +1836,16 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): for line in new_lines: f.write(line) - # # Modify texture reference in MTL file - # with open(temp_dir_path / original_material_filename, "r") as f: - # new_lines = [] - # for line in f.readlines(): - # if "map_Kd material_0.png" in line: - # line = "" - # for key in MTL_MAPPING: - # line += f"{key} ../../material/{obj_name}-{link_name}-{MTL_MAPPING[key]}.png\n" - # new_lines.append(line) + # Modify texture reference in MTL file + with open(temp_dir_path / original_material_filename, "r") as f: + new_lines = [] + for line in f.readlines(): + # We temporarily disable this material renaming. + # if "map_Kd material_0.png" in line: + # line = "" + # for key in MTL_MAPPING: + # line += f"{key} ../../material/{obj_name}-{link_name}-{MTL_MAPPING[key]}.png\n" + new_lines.append(line) with open(obj_link_visual_mesh_folder / mtl_name, "w") as f: for line in new_lines: From cfbe5c95803384421ea66583511bdd5c9141e02d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Wed, 25 Sep 2024 01:18:39 -0700 Subject: [PATCH 14/61] Update asset_conversion_utils.py --- omnigibson/utils/asset_conversion_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index dcee2f176..cc964188d 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -460,7 +460,7 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat mtl = mtls[0] # TODO: Make name unique mtl_name = ".".join(os.path.basename(mtl).split(".")[:-1]).replace("-", "_").replace(".", "_") - mtl_old_dir = f"{'/'.join(obj_path.split('/')[:-1])}" + mtl_old_dir = os.path.dirname(obj_path) link_mtl_files[link_name][mesh_name] = mtl_name mtl_infos[mtl_name] = OrderedDict() mtl_old_dirs[mtl_name] = mtl_old_dir @@ -479,6 +479,8 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat mat_old_paths[mtl_name].append(map_file) mtl_infos[mtl_name][_MTL_MAP_TYPE_MAPPINGS[map_type.lower()]] = map_filename + print("Found material file:", mtl_name, mtl_infos[mtl_name]) + # Next, for each material information, we create a new material and port the material files to the USD directory mat_new_fpath = os.path.join(usd_dir, "materials") Path(mat_new_fpath).mkdir(parents=True, exist_ok=True) From 8fc8e8f66ac829bb00471fcb965425d8689d536f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= <1408354+cgokmen@users.noreply.github.com> Date: Fri, 27 Sep 2024 17:59:30 -0700 Subject: [PATCH 15/61] Add scale --- omnigibson/import_asset.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/omnigibson/import_asset.py b/omnigibson/import_asset.py index 9c317b8c7..80f0698f7 100644 --- a/omnigibson/import_asset.py +++ b/omnigibson/import_asset.py @@ -35,16 +35,20 @@ default="coacd", help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull.", ) +@click.option("--scale", type=float, default=1.0, help="Scale factor to apply to the mesh.") @click.option("--up-axis", type=click.Choice(["z", "y"]), default="z", help="Up axis for the mesh.") @click.option("--headless", is_flag=True, help="Run the script in headless mode.") +@click.option("--confirm-bbox", default=True, help="Whether to confirm the scale factor.") def import_asset( visual_mesh_path: str, category: str, model: str, - collision_mesh_path: Optional[Union[str, pathlib.Path, List[Union[str, pathlib.Path]]]] = None, - collision_method: Literal["coacd", "convex"] = "coacd", - up_axis: Literal["z", "y"] = "z", - headless: bool = False, + collision_mesh_path: Optional[Union[str, pathlib.Path, List[Union[str, pathlib.Path]]]], + collision_method: Literal["coacd", "convex"], + scale: float, + up_axis: Literal["z", "y"], + headless: bool, + confirm_bbox: bool, ): assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." @@ -76,6 +80,25 @@ def import_asset( for collision_mesh in collision_meshes: collision_mesh.apply_transform(rotation_matrix) + # If the scale is nonzero, we apply it to the meshes + if scale != 1.0: + scale_transform = trimesh.transformations.scale_matrix(scale) + visual_mesh.apply_transform(scale_transform) + for collision_mesh in collision_meshes: + collision_mesh.apply_transform(scale_transform) + + # Check the bounding box size and complain if it's larger than 3 meters + bbox_size = visual_mesh.bounding_box.extents + click.echo(f"Visual mesh bounding box size: {bbox_size}") + + if confirm_bbox: + if any(size > 3.0 for size in bbox_size): + click.echo( + f"Warning: The bounding box sounds a bit large. Are you sure you don't need to scale? " + f"We just wanted to confirm this is intentional. You can skip this check by passing --no-confirm-bbox." + ) + click.confirm("Do you want to continue?", abort=True) + # Generate the URDF click.echo(f"Generating URDF for {category}/{model}...") og.launch() From 9713a88caabed63087e0954fb84ffa2eb5f8ec8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cem=20G=C3=B6kmen?= Date: Thu, 10 Oct 2024 17:31:45 -0700 Subject: [PATCH 16/61] Support importing textures --- omnigibson/import_asset.py | 2 +- omnigibson/utils/asset_conversion_utils.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/omnigibson/import_asset.py b/omnigibson/import_asset.py index 80f0698f7..0a32d3ec7 100644 --- a/omnigibson/import_asset.py +++ b/omnigibson/import_asset.py @@ -101,12 +101,12 @@ def import_asset( # Generate the URDF click.echo(f"Generating URDF for {category}/{model}...") - og.launch() generate_urdf_for_obj(visual_mesh, collision_meshes, category, model) click.echo("URDF generation complete!") # Convert to USD click.echo("Converting to USD...") + og.launch() import_obj_urdf( obj_category=category, obj_model=model, diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index cc964188d..05f29abb8 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1842,6 +1842,22 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): with open(temp_dir_path / original_material_filename, "r") as f: new_lines = [] for line in f.readlines(): + if "map_" in line: + map_kind, texture_filename = line.split(" ") + texture_filename = texture_filename.strip() + map_kind = map_kind.strip().replace("map_", "") + new_filename = f"../../material/{obj_name}-base_link-{map_kind}.png" + + # Copy from the texture_filename relative to the original path, to the new path relative to the target path + texture_from_path = temp_dir_path / texture_filename + assert texture_from_path.exists(), f"Texture file {texture_from_path} does not exist!" + texture_to_path = obj_link_visual_mesh_folder / new_filename + assert not texture_to_path.exists(), f"Texture file {texture_to_path} already exists!" + shutil.copy2(texture_from_path, texture_to_path) + + # Change the line to point to the new path + line = line.replace(texture_filename, new_filename) + # We temporarily disable this material renaming. # if "map_Kd material_0.png" in line: # line = "" From 72f1508f1832965e110c44ccde89b7ced0c75a31 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:02:57 -0800 Subject: [PATCH 17/61] add urdfy utils --- omnigibson/utils/urdfpy_utils.py | 4208 ++++++++++++++++++++++++++++++ setup.py | 1 + 2 files changed, 4209 insertions(+) create mode 100644 omnigibson/utils/urdfpy_utils.py diff --git a/omnigibson/utils/urdfpy_utils.py b/omnigibson/utils/urdfpy_utils.py new file mode 100644 index 000000000..e9769c6ad --- /dev/null +++ b/omnigibson/utils/urdfpy_utils.py @@ -0,0 +1,4208 @@ +from collections import OrderedDict +import copy +import os +import time + +from lxml import etree as ET +import networkx as nx +import numpy as np +import PIL +import trimesh +import six + + +class URDFType(object): + """Abstract base class for all URDF types. + + This has useful class methods for automatic parsing/unparsing + of XML trees. + + There are three overridable class variables: + + - ``_ATTRIBS`` - This is a dictionary mapping attribute names to a tuple, + ``(type, required)`` where ``type`` is the Python type for the + attribute and ``required`` is a boolean stating whether the attribute + is required to be present. + - ``_ELEMENTS`` - This is a dictionary mapping element names to a tuple, + ``(type, required, multiple)`` where ``type`` is the Python type for the + element, ``required`` is a boolean stating whether the element + is required to be present, and ``multiple`` is a boolean indicating + whether multiple elements of this type could be present. + Elements are child nodes in the XML tree, and their type must be a + subclass of :class:`.URDFType`. + - ``_TAG`` - This is a string that represents the XML tag for the node + containing this type of object. + """ + _ATTRIBS = {} # Map from attrib name to (type, required) + _ELEMENTS = {} # Map from element name to (type, required, multiple) + _TAG = '' # XML tag for this element + + def __init__(self): + pass + + @classmethod + def _parse_attrib(cls, val_type, val): + """Parse an XML attribute into a python value. + + Parameters + ---------- + val_type : :class:`type` + The type of value to create. + val : :class:`object` + The value to parse. + + Returns + ------- + val : :class:`object` + The parsed attribute. + """ + if val_type == np.ndarray: + val = np.fromstring(val, sep=' ') + else: + val = val_type(val) + return val + + @classmethod + def _parse_simple_attribs(cls, node): + """Parse all attributes in the _ATTRIBS array for this class. + + Parameters + ---------- + node : :class:`lxml.etree.Element` + The node to parse attributes for. + + Returns + ------- + kwargs : dict + Map from attribute name to value. If the attribute is not + required and is not present, that attribute's name will map to + ``None``. + """ + kwargs = {} + for a in cls._ATTRIBS: + t, r = cls._ATTRIBS[a] # t = type, r = required (bool) + if r: + try: + v = cls._parse_attrib(t, node.attrib[a]) + except Exception: + raise ValueError( + 'Missing required attribute {} when parsing an object ' + 'of type {}'.format(a, cls.__name__) + ) + else: + v = None + if a in node.attrib: + v = cls._parse_attrib(t, node.attrib[a]) + kwargs[a] = v + return kwargs + + @classmethod + def _parse_simple_elements(cls, node, path): + """Parse all elements in the _ELEMENTS array from the children of + this node. + + Parameters + ---------- + node : :class:`lxml.etree.Element` + The node to parse children for. + path : str + The string path where the XML file is located (used for resolving + the location of mesh or image files). + + Returns + ------- + kwargs : dict + Map from element names to the :class:`URDFType` subclass (or list, + if ``multiple`` was set) created for that element. + """ + kwargs = {} + for a in cls._ELEMENTS: + t, r, m = cls._ELEMENTS[a] + if not m: + v = node.find(t._TAG) + if r or v is not None: + v = t._from_xml(v, path) + else: + vs = node.findall(t._TAG) + if len(vs) == 0 and r: + raise ValueError( + 'Missing required subelement(s) of type {} when ' + 'parsing an object of type {}'.format( + t.__name__, cls.__name__ + ) + ) + v = [t._from_xml(n, path) for n in vs] + kwargs[a] = v + return kwargs + + @classmethod + def _parse(cls, node, path): + """Parse all elements and attributes in the _ELEMENTS and _ATTRIBS + arrays for a node. + + Parameters + ---------- + node : :class:`lxml.etree.Element` + The node to parse. + path : str + The string path where the XML file is located (used for resolving + the location of mesh or image files). + + Returns + ------- + kwargs : dict + Map from names to Python classes created from the attributes + and elements in the class arrays. + """ + kwargs = cls._parse_simple_attribs(node) + kwargs.update(cls._parse_simple_elements(node, path)) + return kwargs + + @classmethod + def _from_xml(cls, node, path): + """Create an instance of this class from an XML node. + + Parameters + ---------- + node : :class:`lxml.etree.Element` + The node to parse. + path : str + The string path where the XML file is located (used for resolving + the location of mesh or image files). + + Returns + ------- + obj : :class:`URDFType` + An instance of this class parsed from the node. + """ + return cls(**cls._parse(node, path)) + + def _unparse_attrib(self, val_type, val): + """Convert a Python value into a string for storage in an + XML attribute. + + Parameters + ---------- + val_type : :class:`type` + The type of the Python object. + val : :class:`object` + The actual value. + + Returns + ------- + s : str + The attribute string. + """ + if val_type == np.ndarray: + val = np.array2string(val)[1:-1] + else: + val = str(val) + return val + + def _unparse_simple_attribs(self, node): + """Convert all Python types from the _ATTRIBS array back into attributes + for an XML node. + + Parameters + ---------- + node : :class:`object` + The XML node to add the attributes to. + """ + for a in self._ATTRIBS: + t, r = self._ATTRIBS[a] + v = getattr(self, a, None) + if r or v is not None: + node.attrib[a] = self._unparse_attrib(t, v) + + def _unparse_simple_elements(self, node, path): + """Unparse all Python types from the _ELEMENTS array back into child + nodes of an XML node. + + Parameters + ---------- + node : :class:`object` + The XML node for this object. Elements will be added as children + of this node. + path : str + The string path where the XML file is being written to (used for + writing out meshes and image files). + """ + for a in self._ELEMENTS: + t, r, m = self._ELEMENTS[a] + v = getattr(self, a, None) + if not m: + if r or v is not None: + node.append(v._to_xml(node, path)) + else: + vs = v + for v in vs: + node.append(v._to_xml(node, path)) + + def _unparse(self, path): + """Create a node for this object and unparse all elements and + attributes in the class arrays. + + Parameters + ---------- + path : str + The string path where the XML file is being written to (used for + writing out meshes and image files). + + Returns + ------- + node : :class:`lxml.etree.Element` + The newly-created node. + """ + node = ET.Element(self._TAG) + self._unparse_simple_attribs(node) + self._unparse_simple_elements(node, path) + return node + + def _to_xml(self, parent, path): + """Create and return an XML node for this object. + + Parameters + ---------- + parent : :class:`lxml.etree.Element` + The parent node that this element will eventually be added to. + This base implementation doesn't use this information, but + classes that override this function may use it. + path : str + The string path where the XML file is being written to (used for + writing out meshes and image files). + + Returns + ------- + node : :class:`lxml.etree.Element` + The newly-created node. + """ + return self._unparse(path) + + +############################################################################### +# Link types +############################################################################### + + +class Box(URDFType): + """A rectangular prism whose center is at the local origin. + + Parameters + ---------- + size : (3,) float + The length, width, and height of the box in meters. + """ + + _ATTRIBS = { + 'size': (np.ndarray, True) + } + _TAG = 'box' + + def __init__(self, size): + self.size = size + self._meshes = [] + + @property + def size(self): + """(3,) float : The length, width, and height of the box in meters. + """ + return self._size + + @size.setter + def size(self, value): + self._size = np.asanyarray(value).astype(np.float64) + self._meshes = [] + + @property + def meshes(self): + """list of :class:`~trimesh.base.Trimesh` : The triangular meshes + that represent this object. + """ + if len(self._meshes) == 0: + self._meshes = [trimesh.creation.box(extents=self.size)] + return self._meshes + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Box` + A deep copy. + """ + if scale is None: + scale = 1.0 + b = Box( + size=self.size.copy() * scale, + ) + return b + + +class Cylinder(URDFType): + """A cylinder whose center is at the local origin. + + Parameters + ---------- + radius : float + The radius of the cylinder in meters. + length : float + The length of the cylinder in meters. + """ + + _ATTRIBS = { + 'radius': (float, True), + 'length': (float, True), + } + _TAG = 'cylinder' + + def __init__(self, radius, length): + self.radius = radius + self.length = length + self._meshes = [] + + @property + def radius(self): + """float : The radius of the cylinder in meters. + """ + return self._radius + + @radius.setter + def radius(self, value): + self._radius = float(value) + self._meshes = [] + + @property + def length(self): + """float : The length of the cylinder in meters. + """ + return self._length + + @length.setter + def length(self, value): + self._length = float(value) + self._meshes = [] + + @property + def meshes(self): + """list of :class:`~trimesh.base.Trimesh` : The triangular meshes + that represent this object. + """ + if len(self._meshes) == 0: + self._meshes = [trimesh.creation.cylinder( + radius=self.radius, height=self.length + )] + return self._meshes + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Cylinder` + A deep copy. + """ + if scale is None: + scale = 1.0 + if isinstance(scale, (list, np.ndarray)): + if scale[0] != scale[1]: + raise ValueError('Cannot rescale cylinder geometry with asymmetry in x/y') + c = Cylinder( + radius=self.radius * scale[0], + length=self.length * scale[2], + ) + else: + c = Cylinder( + radius=self.radius * scale, + length=self.length * scale, + ) + return c + + +class Sphere(URDFType): + """A sphere whose center is at the local origin. + + Parameters + ---------- + radius : float + The radius of the sphere in meters. + """ + _ATTRIBS = { + 'radius': (float, True), + } + _TAG = 'sphere' + + def __init__(self, radius): + self.radius = radius + self._meshes = [] + + @property + def radius(self): + """float : The radius of the sphere in meters. + """ + return self._radius + + @radius.setter + def radius(self, value): + self._radius = float(value) + self._meshes = [] + + @property + def meshes(self): + """list of :class:`~trimesh.base.Trimesh` : The triangular meshes + that represent this object. + """ + if len(self._meshes) == 0: + self._meshes = [trimesh.creation.icosphere(radius=self.radius)] + return self._meshes + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Sphere` + A deep copy. + """ + if scale is None: + scale = 1.0 + if isinstance(scale, (list, np.ndarray)): + if scale[0] != scale[1] or scale[0] != scale[2]: + raise ValueError('Spheres do not support non-uniform scaling!') + scale = scale[0] + s = Sphere( + radius=self.radius * scale, + ) + return s + + +class Mesh(URDFType): + """A triangular mesh object. + + Parameters + ---------- + filename : str + The path to the mesh that contains this object. This can be + relative to the top-level URDF or an absolute path. + scale : (3,) float, optional + The scaling value for the mesh along the XYZ axes. + If ``None``, assumes no scale is applied. + meshes : list of :class:`~trimesh.base.Trimesh` + A list of meshes that compose this mesh. + The list of meshes is useful for visual geometries that + might be composed of separate trimesh objects. + If not specified, the mesh is loaded from the file using trimesh. + """ + _ATTRIBS = { + 'filename': (str, True), + 'scale': (np.ndarray, False) + } + _TAG = 'mesh' + + def __init__(self, filename, scale=None, meshes=None): + if meshes is None: + meshes = load_meshes(filename) + self.filename = filename + self.scale = scale + self.meshes = meshes + + @property + def filename(self): + """str : The path to the mesh file for this object. + """ + return self._filename + + @filename.setter + def filename(self, value): + self._filename = value + + @property + def scale(self): + """(3,) float : A scaling for the mesh along its local XYZ axes. + """ + return self._scale + + @scale.setter + def scale(self, value): + if value is not None: + value = np.asanyarray(value).astype(np.float64) + self._scale = value + + @property + def meshes(self): + """list of :class:`~trimesh.base.Trimesh` : The triangular meshes + that represent this object. + """ + return self._meshes + + @meshes.setter + def meshes(self, value): + if isinstance(value, six.string_types): + value = load_meshes(value) + elif isinstance(value, (list, tuple, set, np.ndarray)): + value = list(value) + if len(value) == 0: + raise ValueError('Mesh must have at least one trimesh.Trimesh') + for m in value: + if not isinstance(m, trimesh.Trimesh): + raise TypeError('Mesh requires a trimesh.Trimesh or a ' + 'list of them') + elif isinstance(value, trimesh.Trimesh): + value = [value] + else: + raise TypeError('Mesh requires a trimesh.Trimesh') + self._meshes = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + + # Load the mesh, combining collision geometry meshes but keeping + # visual ones separate to preserve colors and textures + fn = get_filename(path, kwargs['filename']) + combine = node.getparent().getparent().tag == Collision._TAG + meshes = load_meshes(fn) + if combine: + # Delete visuals for simplicity + for m in meshes: + m.visual = trimesh.visual.ColorVisuals(mesh=m) + meshes = [meshes[0] + meshes[1:]] + kwargs['meshes'] = meshes + + return Mesh(**kwargs) + + def _to_xml(self, parent, path): + # Get the filename + fn = get_filename(path, self.filename, makedirs=True) + + # Export the meshes as a single file + meshes = self.meshes + if len(meshes) == 1: + meshes = meshes[0] + elif os.path.splitext(fn)[1] == '.glb': + meshes = trimesh.scene.Scene(geometry=meshes) + trimesh.exchange.export.export_mesh(meshes, fn) + + # Unparse the node + node = self._unparse(path) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Sphere` + A deep copy. + """ + meshes = [m.copy() for m in self.meshes] + if scale is not None: + sm = np.eye(4) + if isinstance(scale, (list, np.ndarray)): + sm[:3,:3] = np.diag(scale) + else: + sm[:3,:3] = np.diag(np.repeat(scale, 3)) + for i, m in enumerate(meshes): + meshes[i] = m.apply_transform(sm) + base, fn = os.path.split(self.filename) + fn = '{}{}'.format(prefix, self.filename) + m = Mesh( + filename=os.path.join(base, fn), + scale=(self.scale.copy() if self.scale is not None else None), + meshes=meshes + ) + return m + + +class Geometry(URDFType): + """A wrapper for all geometry types. + + Only one of the following values can be set, all others should be set + to ``None``. + + Parameters + ---------- + box : :class:`.Box`, optional + Box geometry. + cylinder : :class:`.Cylinder` + Cylindrical geometry. + sphere : :class:`.Sphere` + Spherical geometry. + mesh : :class:`.Mesh` + Mesh geometry. + """ + + _ELEMENTS = { + 'box': (Box, False, False), + 'cylinder': (Cylinder, False, False), + 'sphere': (Sphere, False, False), + 'mesh': (Mesh, False, False), + } + _TAG = 'geometry' + + def __init__(self, box=None, cylinder=None, sphere=None, mesh=None): + if (box is None and cylinder is None and + sphere is None and mesh is None): + raise ValueError('At least one geometry element must be set') + self.box = box + self.cylinder = cylinder + self.sphere = sphere + self.mesh = mesh + + @property + def box(self): + """:class:`.Box` : Box geometry. + """ + return self._box + + @box.setter + def box(self, value): + if value is not None and not isinstance(value, Box): + raise TypeError('Expected Box type') + self._box = value + + @property + def cylinder(self): + """:class:`.Cylinder` : Cylinder geometry. + """ + return self._cylinder + + @cylinder.setter + def cylinder(self, value): + if value is not None and not isinstance(value, Cylinder): + raise TypeError('Expected Cylinder type') + self._cylinder = value + + @property + def sphere(self): + """:class:`.Sphere` : Spherical geometry. + """ + return self._sphere + + @sphere.setter + def sphere(self, value): + if value is not None and not isinstance(value, Sphere): + raise TypeError('Expected Sphere type') + self._sphere = value + + @property + def mesh(self): + """:class:`.Mesh` : Mesh geometry. + """ + return self._mesh + + @mesh.setter + def mesh(self, value): + if value is not None and not isinstance(value, Mesh): + raise TypeError('Expected Mesh type') + self._mesh = value + + @property + def geometry(self): + """:class:`.Box`, :class:`.Cylinder`, :class:`.Sphere`, or + :class:`.Mesh` : The valid geometry element. + """ + if self.box is not None: + return self.box + if self.cylinder is not None: + return self.cylinder + if self.sphere is not None: + return self.sphere + if self.mesh is not None: + return self.mesh + return None + + @property + def meshes(self): + """list of :class:`~trimesh.base.Trimesh` : The geometry's triangular + mesh representation(s). + """ + return self.geometry.meshes + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Geometry` + A deep copy. + """ + v = Geometry( + box=(self.box.copy(prefix=prefix, scale=scale) if self.box else None), + cylinder=(self.cylinder.copy(prefix=prefix, scale=scale) if self.cylinder else None), + sphere=(self.sphere.copy(prefix=prefix, scale=scale) if self.sphere else None), + mesh=(self.mesh.copy(prefix=prefix, scale=scale) if self.mesh else None), + ) + return v + + +class Texture(URDFType): + """An image-based texture. + + Parameters + ---------- + filename : str + The path to the image that contains this texture. This can be + relative to the top-level URDF or an absolute path. + image : :class:`PIL.Image.Image`, optional + The image for the texture. + If not specified, it is loaded automatically from the filename. + """ + + _ATTRIBS = { + 'filename': (str, True) + } + _TAG = 'texture' + + def __init__(self, filename, image=None): + if image is None: + image = PIL.image.open(filename) + self.filename = filename + self.image = image + + @property + def filename(self): + """str : Path to the image for this texture. + """ + return self._filename + + @filename.setter + def filename(self, value): + self._filename = str(value) + + @property + def image(self): + """:class:`PIL.Image.Image` : The image for this texture. + """ + return self._image + + @image.setter + def image(self, value): + if isinstance(value, str): + value = PIL.Image.open(value) + if isinstance(value, np.ndarray): + value = PIL.Image.fromarray(value) + elif not isinstance(value, PIL.Image.Image): + raise ValueError('Texture only supports numpy arrays ' + 'or PIL images') + self._image = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + + # Load image + fn = get_filename(path, kwargs['filename']) + kwargs['image'] = PIL.Image.open(fn) + + return Texture(**kwargs) + + def _to_xml(self, parent, path): + # Save the image + filepath = get_filename(path, self.filename, makedirs=True) + self.image.save(filepath) + + return self._unparse(path) + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Texture` + A deep copy. + """ + v = Texture( + filename=self.filename, + image=self.image.copy() + ) + return v + + +class Material(URDFType): + """A material for some geometry. + + Parameters + ---------- + name : str + The name of the material. + color : (4,) float, optional + The RGBA color of the material in the range [0,1]. + texture : :class:`.Texture`, optional + A texture for the material. + """ + _ATTRIBS = { + 'name': (str, True) + } + _ELEMENTS = { + 'texture': (Texture, False, False), + } + _TAG = 'material' + + def __init__(self, name, color=None, texture=None): + self.name = name + self.color = color + self.texture = texture + + @property + def name(self): + """str : The name of the material. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def color(self): + """(4,) float : The RGBA color of the material, in the range [0,1]. + """ + return self._color + + @color.setter + def color(self, value): + if value is not None: + value = np.asanyarray(value).astype(np.float64) + value = np.clip(value, 0.0, 1.0) + if value.shape != (4,): + raise ValueError('Color must be a (4,) float') + self._color = value + + @property + def texture(self): + """:class:`.Texture` : The texture for the material. + """ + return self._texture + + @texture.setter + def texture(self, value): + if value is not None: + if isinstance(value, six.string_types): + image = PIL.Image.open(value) + value = Texture(filename=value, image=image) + elif not isinstance(value, Texture): + raise ValueError('Invalid type for texture -- expect path to ' + 'image or Texture') + self._texture = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + + # Extract the color -- it's weirdly an attribute of a subelement + color = node.find('color') + if color is not None: + color = np.fromstring(color.attrib['rgba'], sep=' ', dtype=np.float64) + kwargs['color'] = color + + return Material(**kwargs) + + def _to_xml(self, parent, path): + # Simplify materials by collecting them at the top level. + + # For top-level elements, save the full material specification + if parent.tag == 'robot': + node = self._unparse(path) + if self.color is not None: + color = ET.Element('color') + color.attrib['rgba'] = np.array2string(self.color)[1:-1] + node.append(color) + + # For non-top-level elements just save the material with a name + else: + node = ET.Element('material') + node.attrib['name'] = self.name + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy of the material with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Material` + A deep copy of the material. + """ + return Material( + name='{}{}'.format(prefix, self.name), + color=self.color, + texture=self.texture + ) + + +class Collision(URDFType): + """Collision properties of a link. + + Parameters + ---------- + geometry : :class:`.Geometry` + The geometry of the element + name : str, optional + The name of the collision geometry. + origin : (4,4) float, optional + The pose of the collision element relative to the link frame. + Defaults to identity. + """ + + _ATTRIBS = { + 'name': (str, False) + } + _ELEMENTS = { + 'geometry': (Geometry, True, False), + } + _TAG = 'collision' + + def __init__(self, name, origin, geometry): + self.geometry = geometry + self.name = name + self.origin = origin + + @property + def geometry(self): + """:class:`.Geometry` : The geometry of this element. + """ + return self._geometry + + @geometry.setter + def geometry(self, value): + if not isinstance(value, Geometry): + raise TypeError('Must set geometry with Geometry object') + self._geometry = value + + @property + def name(self): + """str : The name of this collision element. + """ + return self._name + + @name.setter + def name(self, value): + if value is not None: + value = str(value) + self._name = value + + @property + def origin(self): + """(4,4) float : The pose of this element relative to the link frame. + """ + return self._origin + + @origin.setter + def origin(self, value): + self._origin = configure_origin(value) + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + kwargs['origin'] = parse_origin(node) + return Collision(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + node.append(unparse_origin(self.origin)) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Visual` + A deep copy of the visual. + """ + origin=self.origin.copy() + if scale is not None: + if not isinstance(scale, (list, np.ndarray)): + scale = np.repeat(scale, 3) + origin[:3,3] *= scale + return Collision( + name='{}{}'.format(prefix, self.name), + origin=origin, + geometry=self.geometry.copy(prefix=prefix, scale=scale), + ) + + +class Visual(URDFType): + """Visual properties of a link. + + Parameters + ---------- + geometry : :class:`.Geometry` + The geometry of the element + name : str, optional + The name of the visual geometry. + origin : (4,4) float, optional + The pose of the visual element relative to the link frame. + Defaults to identity. + material : :class:`.Material`, optional + The material of the element. + """ + _ATTRIBS = { + 'name': (str, False) + } + _ELEMENTS = { + 'geometry': (Geometry, True, False), + 'material': (Material, False, False), + } + _TAG = 'visual' + + def __init__(self, geometry, name=None, origin=None, material=None): + self.geometry = geometry + self.name = name + self.origin = origin + self.material = material + + @property + def geometry(self): + """:class:`.Geometry` : The geometry of this element. + """ + return self._geometry + + @geometry.setter + def geometry(self, value): + if not isinstance(value, Geometry): + raise TypeError('Must set geometry with Geometry object') + self._geometry = value + + @property + def name(self): + """str : The name of this visual element. + """ + return self._name + + @name.setter + def name(self, value): + if value is not None: + value = str(value) + self._name = value + + @property + def origin(self): + """(4,4) float : The pose of this element relative to the link frame. + """ + return self._origin + + @origin.setter + def origin(self, value): + self._origin = configure_origin(value) + + @property + def material(self): + """:class:`.Material` : The material for this element. + """ + return self._material + + @material.setter + def material(self, value): + if value is not None: + if not isinstance(value, Material): + raise TypeError('Must set material with Material object') + self._material = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + kwargs['origin'] = parse_origin(node) + return Visual(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + node.append(unparse_origin(self.origin)) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Visual` + A deep copy of the visual. + """ + origin=self.origin.copy() + if scale is not None: + if not isinstance(scale, (list, np.ndarray)): + scale = np.repeat(scale, 3) + origin[:3,3] *= scale + return Visual( + geometry=self.geometry.copy(prefix=prefix, scale=scale), + name='{}{}'.format(prefix, self.name), + origin=origin, + material=(self.material.copy(prefix=prefix) if self.material else None), + ) + + +class Inertial(URDFType): + """The inertial properties of a link. + + Parameters + ---------- + mass : float + The mass of the link in kilograms. + inertia : (3,3) float + The 3x3 symmetric rotational inertia matrix. + origin : (4,4) float, optional + The pose of the inertials relative to the link frame. + Defaults to identity if not specified. + """ + _TAG = 'inertial' + + def __init__(self, mass, inertia, origin=None): + self.mass = mass + self.inertia = inertia + self.origin = origin + + @property + def mass(self): + """float : The mass of the link in kilograms. + """ + return self._mass + + @mass.setter + def mass(self, value): + self._mass = float(value) + + @property + def inertia(self): + """(3,3) float : The 3x3 symmetric rotational inertia matrix. + """ + return self._inertia + + @inertia.setter + def inertia(self, value): + value = np.asanyarray(value).astype(np.float64) + if not np.allclose(value, value.T): + raise ValueError('Inertia must be a symmetric matrix') + self._inertia = value + + @property + def origin(self): + """(4,4) float : The pose of the inertials relative to the link frame. + """ + return self._origin + + @origin.setter + def origin(self, value): + self._origin = configure_origin(value) + + @classmethod + def _from_xml(cls, node, path): + origin = parse_origin(node) + mass = float(node.find('mass').attrib['value']) + n = node.find('inertia') + xx = float(n.attrib['ixx']) + xy = float(n.attrib['ixy']) + xz = float(n.attrib['ixz']) + yy = float(n.attrib['iyy']) + yz = float(n.attrib['iyz']) + zz = float(n.attrib['izz']) + inertia = np.array([ + [xx, xy, xz], + [xy, yy, yz], + [xz, yz, zz] + ], dtype=np.float64) + return Inertial(mass=mass, inertia=inertia, origin=origin) + + def _to_xml(self, parent, path): + node = ET.Element('inertial') + node.append(unparse_origin(self.origin)) + mass = ET.Element('mass') + mass.attrib['value'] = str(self.mass) + node.append(mass) + inertia = ET.Element('inertia') + inertia.attrib['ixx'] = str(self.inertia[0,0]) + inertia.attrib['ixy'] = str(self.inertia[0,1]) + inertia.attrib['ixz'] = str(self.inertia[0,2]) + inertia.attrib['iyy'] = str(self.inertia[1,1]) + inertia.attrib['iyz'] = str(self.inertia[1,2]) + inertia.attrib['izz'] = str(self.inertia[2,2]) + node.append(inertia) + return node + + def copy(self, prefix='', mass=None, origin=None, inertia=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Inertial` + A deep copy of the visual. + """ + if mass is None: + mass = self.mass + if origin is None: + origin = self.origin.copy() + if inertia is None: + inertia = self.inertia.copy() + return Inertial( + mass=mass, + inertia=inertia, + origin=origin, + ) + + +############################################################################### +# Joint types +############################################################################### + + +class JointCalibration(URDFType): + """The reference positions of the joint. + + Parameters + ---------- + rising : float, optional + When the joint moves in a positive direction, this position will + trigger a rising edge. + falling : + When the joint moves in a positive direction, this position will + trigger a falling edge. + """ + _ATTRIBS = { + 'rising': (float, False), + 'falling': (float, False) + } + _TAG = 'calibration' + + def __init__(self, rising=None, falling=None): + self.rising = rising + self.falling = falling + + @property + def rising(self): + """float : description. + """ + return self._rising + + @rising.setter + def rising(self, value): + if value is not None: + value = float(value) + self._rising = value + + @property + def falling(self): + """float : description. + """ + return self._falling + + @falling.setter + def falling(self, value): + if value is not None: + value = float(value) + self._falling = value + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.JointCalibration` + A deep copy of the visual. + """ + return JointCalibration( + rising=self.rising, + falling=self.falling, + ) + + +class JointDynamics(URDFType): + """The dynamic properties of the joint. + + Parameters + ---------- + damping : float + The damping value of the joint (Ns/m for prismatic joints, + Nms/rad for revolute). + friction : float + The static friction value of the joint (N for prismatic joints, + Nm for revolute). + """ + _ATTRIBS = { + 'damping': (float, False), + 'friction': (float, False), + } + _TAG = 'dynamics' + + def __init__(self, damping, friction): + self.damping = damping + self.friction = friction + + @property + def damping(self): + """float : The damping value of the joint. + """ + return self._damping + + @damping.setter + def damping(self, value): + if value is not None: + value = float(value) + self._damping = value + + @property + def friction(self): + """float : The static friction value of the joint. + """ + return self._friction + + @friction.setter + def friction(self, value): + if value is not None: + value = float(value) + self._friction = value + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.JointDynamics` + A deep copy of the visual. + """ + return JointDynamics( + damping=self.damping, + friction=self.friction, + ) + + +class JointLimit(URDFType): + """The limits of the joint. + + Parameters + ---------- + effort : float + The maximum joint effort (N for prismatic joints, Nm for revolute). + velocity : float + The maximum joint velocity (m/s for prismatic joints, rad/s for + revolute). + lower : float, optional + The lower joint limit (m for prismatic joints, rad for revolute). + upper : float, optional + The upper joint limit (m for prismatic joints, rad for revolute). + """ + + _ATTRIBS = { + 'effort': (float, False), + 'velocity': (float, False), + 'lower': (float, False), + 'upper': (float, False), + } + _TAG = 'limit' + + def __init__(self, effort=None, velocity=None, lower=None, upper=None): + self.effort = effort + self.velocity = velocity + self.lower = lower + self.upper = upper + + @property + def effort(self): + """float : The maximum joint effort. + """ + return self._effort + + @effort.setter + def effort(self, value): + self._effort = float(value) if value else 0.0 + + @property + def velocity(self): + """float : The maximum joint velocity. + """ + return self._velocity + + @velocity.setter + def velocity(self, value): + self._velocity = float(value) if value else 0.0 + + @property + def lower(self): + """float : The lower joint limit. + """ + return self._lower + + @lower.setter + def lower(self, value): + if value is not None: + value = float(value) + self._lower = value + + @property + def upper(self): + """float : The upper joint limit. + """ + return self._upper + + @upper.setter + def upper(self, value): + if value is not None: + value = float(value) + self._upper = value + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.JointLimit` + A deep copy of the visual. + """ + return JointLimit( + effort=self.effort, + velocity=self.velocity, + lower=self.lower, + upper=self.upper, + ) + + +class JointMimic(URDFType): + """A mimicry tag for a joint, which forces its configuration to + mimic another joint's. + + This joint's configuration value is set equal to + ``multiplier * other_joint_cfg + offset``. + + Parameters + ---------- + joint : str + The name of the joint to mimic. + multiplier : float + The joint configuration multiplier. Defaults to 1.0. + offset : float, optional + The joint configuration offset. Defaults to 0.0. + """ + _ATTRIBS = { + 'joint': (str, True), + 'multiplier': (float, False), + 'offset': (float, False), + } + _TAG = 'mimic' + + def __init__(self, joint, multiplier=None, offset=None): + self.joint = joint + self.multiplier = multiplier + self.offset = offset + + @property + def joint(self): + """float : The name of the joint to mimic. + """ + return self._joint + + @joint.setter + def joint(self, value): + self._joint = str(value) + + @property + def multiplier(self): + """float : The multiplier for the joint configuration. + """ + return self._multiplier + + @multiplier.setter + def multiplier(self, value): + if value is not None: + value = float(value) + else: + value = 1.0 + self._multiplier = value + + @property + def offset(self): + """float : The offset for the joint configuration + """ + return self._offset + + @offset.setter + def offset(self, value): + if value is not None: + value = float(value) + else: + value = 0.0 + self._offset = value + + def copy(self, prefix='', scale=None): + """Create a deep copy of the joint mimic with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.JointMimic` + A deep copy of the joint mimic. + """ + return JointMimic( + joint='{}{}'.format(prefix, self.joint), + multiplier=self.multiplier, + offset=self.offset + ) + + +class SafetyController(URDFType): + """A controller for joint movement safety. + + Parameters + ---------- + k_velocity : float + An attribute specifying the relation between the effort and velocity + limits. + k_position : float, optional + An attribute specifying the relation between the position and velocity + limits. Defaults to 0.0. + soft_lower_limit : float, optional + The lower joint boundary where the safety controller kicks in. + Defaults to 0.0. + soft_upper_limit : float, optional + The upper joint boundary where the safety controller kicks in. + Defaults to 0.0. + """ + _ATTRIBS = { + 'k_velocity': (float, True), + 'k_position': (float, False), + 'soft_lower_limit': (float, False), + 'soft_upper_limit': (float, False), + } + _TAG = 'safety_controller' + + def __init__(self, k_velocity, k_position=None, soft_lower_limit=None, + soft_upper_limit=None): + self.k_velocity = k_velocity + self.k_position = k_position + self.soft_lower_limit = soft_lower_limit + self.soft_upper_limit = soft_upper_limit + + @property + def soft_lower_limit(self): + """float : The soft lower limit where the safety controller kicks in. + """ + return self._soft_lower_limit + + @soft_lower_limit.setter + def soft_lower_limit(self, value): + if value is not None: + value = float(value) + else: + value = 0.0 + self._soft_lower_limit = value + + @property + def soft_upper_limit(self): + """float : The soft upper limit where the safety controller kicks in. + """ + return self._soft_upper_limit + + @soft_upper_limit.setter + def soft_upper_limit(self, value): + if value is not None: + value = float(value) + else: + value = 0.0 + self._soft_upper_limit = value + + @property + def k_position(self): + """float : A relation between the position and velocity limits. + """ + return self._k_position + + @k_position.setter + def k_position(self, value): + if value is not None: + value = float(value) + else: + value = 0.0 + self._k_position = value + + @property + def k_velocity(self): + """float : A relation between the effort and velocity limits. + """ + return self._k_velocity + + @k_velocity.setter + def k_velocity(self, value): + self._k_velocity = float(value) + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.SafetyController` + A deep copy of the visual. + """ + return SafetyController( + k_velocity=self.k_velocity, + k_position=self.k_position, + soft_lower_limit=self.soft_lower_limit, + soft_upper_limit=self.soft_upper_limit, + ) + + +############################################################################### +# Transmission types +############################################################################### + + +class Actuator(URDFType): + """An actuator. + + Parameters + ---------- + name : str + The name of this actuator. + mechanicalReduction : str, optional + A specifier for the mechanical reduction at the joint/actuator + transmission. + hardwareInterfaces : list of str, optional + The supported hardware interfaces to the actuator. + """ + _ATTRIBS = { + 'name': (str, True), + } + _TAG = 'actuator' + + def __init__(self, name, mechanicalReduction=None, + hardwareInterfaces=None): + self.name = name + self.mechanicalReduction = mechanicalReduction + self.hardwareInterfaces = hardwareInterfaces + + @property + def name(self): + """str : The name of this actuator. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def mechanicalReduction(self): + """str : A specifier for the type of mechanical reduction. + """ + return self._mechanicalReduction + + @mechanicalReduction.setter + def mechanicalReduction(self, value): + if value is not None: + value = str(value) + self._mechanicalReduction = value + + @property + def hardwareInterfaces(self): + """list of str : The supported hardware interfaces. + """ + return self._hardwareInterfaces + + @hardwareInterfaces.setter + def hardwareInterfaces(self, value): + if value is None: + value = [] + else: + value = list(value) + for i, v in enumerate(value): + value[i] = str(v) + self._hardwareInterfaces = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + mr = node.find('mechanicalReduction') + if mr is not None: + mr = float(mr.text) + kwargs['mechanicalReduction'] = mr + hi = node.findall('hardwareInterface') + if len(hi) > 0: + hi = [h.text for h in hi] + kwargs['hardwareInterfaces'] = hi + return Actuator(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + if self.mechanicalReduction is not None: + mr = ET.Element('mechanicalReduction') + mr.text = str(self.mechanicalReduction) + node.append(mr) + if len(self.hardwareInterfaces) > 0: + for hi in self.hardwareInterfaces: + h = ET.Element('hardwareInterface') + h.text = hi + node.append(h) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy of the visual with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Actuator` + A deep copy of the visual. + """ + return Actuator( + name='{}{}'.format(prefix, self.name), + mechanicalReduction=self.mechanicalReduction, + hardwareInterfaces=self.hardwareInterfaces.copy(), + ) + + +class TransmissionJoint(URDFType): + """A transmission joint specification. + + Parameters + ---------- + name : str + The name of this actuator. + hardwareInterfaces : list of str, optional + The supported hardware interfaces to the actuator. + """ + _ATTRIBS = { + 'name': (str, True), + } + _TAG = 'joint' + + def __init__(self, name, hardwareInterfaces): + self.name = name + self.hardwareInterfaces = hardwareInterfaces + + @property + def name(self): + """str : The name of this transmission joint. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def hardwareInterfaces(self): + """list of str : The supported hardware interfaces. + """ + return self._hardwareInterfaces + + @hardwareInterfaces.setter + def hardwareInterfaces(self, value): + if value is None: + value = [] + else: + value = list(value) + for i, v in enumerate(value): + value[i] = str(v) + self._hardwareInterfaces = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + hi = node.findall('hardwareInterface') + if len(hi) > 0: + hi = [h.text for h in hi] + kwargs['hardwareInterfaces'] = hi + return TransmissionJoint(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + if len(self.hardwareInterfaces) > 0: + for hi in self.hardwareInterfaces: + h = ET.Element('hardwareInterface') + h.text = hi + node.append(h) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.TransmissionJoint` + A deep copy. + """ + return TransmissionJoint( + name='{}{}'.format(prefix, self.name), + hardwareInterfaces=self.hardwareInterfaces.copy(), + ) + + +############################################################################### +# Top-level types +############################################################################### + + +class Transmission(URDFType): + """An element that describes the relationship between an actuator and a + joint. + + Parameters + ---------- + name : str + The name of this transmission. + trans_type : str + The type of this transmission. + joints : list of :class:`.TransmissionJoint` + The joints connected to this transmission. + actuators : list of :class:`.Actuator` + The actuators connected to this transmission. + """ + _ATTRIBS = { + 'name': (str, True), + } + _ELEMENTS = { + 'joints': (TransmissionJoint, True, True), + 'actuators': (Actuator, True, True), + } + _TAG = 'transmission' + + def __init__(self, name, trans_type, joints=None, actuators=None): + self.name = name + self.trans_type = trans_type + self.joints = joints + self.actuators = actuators + + @property + def name(self): + """str : The name of this transmission. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def trans_type(self): + """str : The type of this transmission. + """ + return self._trans_type + + @trans_type.setter + def trans_type(self, value): + self._trans_type = str(value) + + @property + def joints(self): + """:class:`.TransmissionJoint` : The joints the transmission is + connected to. + """ + return self._joints + + @joints.setter + def joints(self, value): + if value is None: + value = [] + else: + value = list(value) + for v in value: + if not isinstance(v, TransmissionJoint): + raise TypeError( + 'Joints expects a list of TransmissionJoint' + ) + self._joints = value + + @property + def actuators(self): + """:class:`.Actuator` : The actuators the transmission is connected to. + """ + return self._actuators + + @actuators.setter + def actuators(self, value): + if value is None: + value = [] + else: + value = list(value) + for v in value: + if not isinstance(v, Actuator): + raise TypeError( + 'Actuators expects a list of Actuator' + ) + self._actuators = value + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + kwargs['trans_type'] = node.find('type').text + return Transmission(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + ttype = ET.Element('type') + ttype.text = self.trans_type + node.append(ttype) + return node + + def copy(self, prefix='', scale=None): + """Create a deep copy with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all names. + + Returns + ------- + :class:`.Transmission` + A deep copy. + """ + return Transmission( + name='{}{}'.format(prefix, self.name), + trans_type=self.trans_type, + joints=[j.copy(prefix) for j in self.joints], + actuators=[a.copy(prefix) for a in self.actuators], + ) + + +class Joint(URDFType): + """A connection between two links. + + There are several types of joints, including: + + - ``fixed`` - a joint that cannot move. + - ``prismatic`` - a joint that slides along the joint axis. + - ``revolute`` - a hinge joint that rotates about the axis with a limited + range of motion. + - ``continuous`` - a hinge joint that rotates about the axis with an + unlimited range of motion. + - ``planar`` - a joint that moves in the plane orthogonal to the axis. + - ``floating`` - a joint that can move in 6DoF. + + Parameters + ---------- + name : str + The name of this joint. + parent : str + The name of the parent link of this joint. + child : str + The name of the child link of this joint. + joint_type : str + The type of the joint. Must be one of :obj:`.Joint.TYPES`. + axis : (3,) float, optional + The axis of the joint specified in joint frame. Defaults to + ``[1,0,0]``. + origin : (4,4) float, optional + The pose of the child link with respect to the parent link's frame. + The joint frame is defined to be coincident with the child link's + frame, so this is also the pose of the joint frame with respect to + the parent link's frame. + limit : :class:`.JointLimit`, optional + Limit for the joint. Only required for revolute and prismatic + joints. + dynamics : :class:`.JointDynamics`, optional + Dynamics for the joint. + safety_controller : :class`.SafetyController`, optional + The safety controller for this joint. + calibration : :class:`.JointCalibration`, optional + Calibration information for the joint. + mimic : :class:`JointMimic`, optional + Joint mimicry information. + """ + TYPES = ['fixed', 'prismatic', 'revolute', + 'continuous', 'floating', 'planar'] + _ATTRIBS = { + 'name': (str, True), + } + _ELEMENTS = { + 'dynamics': (JointDynamics, False, False), + 'limit': (JointLimit, False, False), + 'mimic': (JointMimic, False, False), + 'safety_controller': (SafetyController, False, False), + 'calibration': (JointCalibration, False, False), + } + _TAG = 'joint' + + def __init__(self, name, joint_type, parent, child, axis=None, origin=None, + limit=None, dynamics=None, safety_controller=None, + calibration=None, mimic=None): + self.name = name + self.parent = parent + self.child = child + self.joint_type = joint_type + self.axis = axis + self.origin = origin + self.limit = limit + self.dynamics = dynamics + self.safety_controller = safety_controller + self.calibration = calibration + self.mimic = mimic + + @property + def name(self): + """str : Name for this joint. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def joint_type(self): + """str : The type of this joint. + """ + return self._joint_type + + @joint_type.setter + def joint_type(self, value): + value = str(value) + if value not in Joint.TYPES: + raise ValueError('Unsupported joint type {}'.format(value)) + self._joint_type = value + + @property + def parent(self): + """str : The name of the parent link. + """ + return self._parent + + @parent.setter + def parent(self, value): + self._parent = str(value) + + @property + def child(self): + """str : The name of the child link. + """ + return self._child + + @child.setter + def child(self, value): + self._child = str(value) + + @property + def axis(self): + """(3,) float : The joint axis in the joint frame. + """ + return self._axis + + @axis.setter + def axis(self, value): + if value is None: + value = np.array([1.0, 0.0, 0.0], dtype=np.float64) + elif np.linalg.norm(value) < 1e-4: + value = np.array([1.0, 0.0, 0.0], dtype=np.float64) + else: + value = np.asanyarray(value, dtype=np.float64) + if value.shape != (3,): + raise ValueError('Invalid shape for axis, should be (3,)') + value = value / np.linalg.norm(value) + self._axis = value + + @property + def origin(self): + """(4,4) float : The pose of child and joint frames relative to the + parent link's frame. + """ + return self._origin + + @origin.setter + def origin(self, value): + self._origin = configure_origin(value) + + @property + def limit(self): + """:class:`.JointLimit` : The limits for this joint. + """ + return self._limit + + @limit.setter + def limit(self, value): + if value is None: + if self.joint_type in ['prismatic', 'revolute']: + raise ValueError('Require joint limit for prismatic and ' + 'revolute joints') + elif not isinstance(value, JointLimit): + raise TypeError('Expected JointLimit type') + self._limit = value + + @property + def dynamics(self): + """:class:`.JointDynamics` : The dynamics for this joint. + """ + return self._dynamics + + @dynamics.setter + def dynamics(self, value): + if value is not None: + if not isinstance(value, JointDynamics): + raise TypeError('Expected JointDynamics type') + self._dynamics = value + + @property + def safety_controller(self): + """:class:`.SafetyController` : The safety controller for this joint. + """ + return self._safety_controller + + @safety_controller.setter + def safety_controller(self, value): + if value is not None: + if not isinstance(value, SafetyController): + raise TypeError('Expected SafetyController type') + self._safety_controller = value + + @property + def calibration(self): + """:class:`.JointCalibration` : The calibration for this joint. + """ + return self._calibration + + @calibration.setter + def calibration(self, value): + if value is not None: + if not isinstance(value, JointCalibration): + raise TypeError('Expected JointCalibration type') + self._calibration = value + + @property + def mimic(self): + """:class:`.JointMimic` : The mimic for this joint. + """ + return self._mimic + + @mimic.setter + def mimic(self, value): + if value is not None: + if not isinstance(value, JointMimic): + raise TypeError('Expected JointMimic type') + self._mimic = value + + def is_valid(self, cfg): + """Check if the provided configuration value is valid for this joint. + + Parameters + ---------- + cfg : float, (2,) float, (6,) float, or (4,4) float + The configuration of the joint. + + Returns + ------- + is_valid : bool + True if the configuration is valid, and False otherwise. + """ + if self.joint_type not in ['fixed', 'revolute']: + return True + if self.joint_limit is None: + return True + cfg = float(cfg) + lower = -np.infty + upper = np.infty + if self.limit.lower is not None: + lower = self.limit.lower + if self.limit.upper is not None: + upper = self.limit.upper + return (cfg >= lower and cfg <= upper) + + def get_child_pose(self, cfg=None): + """Computes the child pose relative to a parent pose for a given + configuration value. + + Parameters + ---------- + cfg : float, (2,) float, (6,) float, or (4,4) float + The configuration values for this joint. They are interpreted + based on the joint type as follows: + + - ``fixed`` - not used. + - ``prismatic`` - a translation along the axis in meters. + - ``revolute`` - a rotation about the axis in radians. + - ``continuous`` - a rotation about the axis in radians. + - ``planar`` - the x and y translation values in the plane. + - ``floating`` - the xyz values followed by the rpy values, + or a (4,4) matrix. + + If ``cfg`` is ``None``, then this just returns the joint pose. + + Returns + ------- + pose : (4,4) float + The pose of the child relative to the parent. + """ + if cfg is None: + return self.origin + elif self.joint_type == 'fixed': + return self.origin + elif self.joint_type in ['revolute', 'continuous']: + if cfg is None: + cfg = 0.0 + else: + cfg = float(cfg) + R = trimesh.transformations.rotation_matrix(cfg, self.axis) + return self.origin.dot(R) + elif self.joint_type == 'prismatic': + if cfg is None: + cfg = 0.0 + else: + cfg = float(cfg) + translation = np.eye(4, dtype=np.float64) + translation[:3,3] = self.axis * cfg + return self.origin.dot(translation) + elif self.joint_type == 'planar': + if cfg is None: + cfg = np.zeros(2, dtype=np.float64) + else: + cfg = np.asanyarray(cfg, dtype=np.float64) + if cfg.shape != (2,): + raise ValueError( + '(2,) float configuration required for planar joints' + ) + translation = np.eye(4, dtype=np.float64) + translation[:3,3] = self.origin[:3,:2].dot(cfg) + return self.origin.dot(translation) + elif self.joint_type == 'floating': + if cfg is None: + cfg = np.zeros(6, dtype=np.float64) + else: + cfg = configure_origin(cfg) + if cfg is None: + raise ValueError('Invalid configuration for floating joint') + return self.origin.dot(cfg) + else: + raise ValueError('Invalid configuration') + + def get_child_poses(self, cfg, n_cfgs): + """Computes the child pose relative to a parent pose for a given set of + configuration values. + + Parameters + ---------- + cfg : (n,) float or None + The configuration values for this joint. They are interpreted + based on the joint type as follows: + + - ``fixed`` - not used. + - ``prismatic`` - a translation along the axis in meters. + - ``revolute`` - a rotation about the axis in radians. + - ``continuous`` - a rotation about the axis in radians. + - ``planar`` - Not implemented. + - ``floating`` - Not implemented. + + If ``cfg`` is ``None``, then this just returns the joint pose. + + Returns + ------- + poses : (n,4,4) float + The poses of the child relative to the parent. + """ + if cfg is None: + return np.tile(self.origin, (n_cfgs, 1, 1)) + elif self.joint_type == 'fixed': + return np.tile(self.origin, (n_cfgs, 1, 1)) + elif self.joint_type in ['revolute', 'continuous']: + if cfg is None: + cfg = np.zeros(n_cfgs) + return np.matmul(self.origin, self._rotation_matrices(cfg, self.axis)) + elif self.joint_type == 'prismatic': + if cfg is None: + cfg = np.zeros(n_cfgs) + translation = np.tile(np.eye(4), (n_cfgs, 1, 1)) + translation[:,:3,3] = self.axis * cfg[:,np.newaxis] + return np.matmul(self.origin, translation) + elif self.joint_type == 'planar': + raise NotImplementedError() + elif self.joint_type == 'floating': + raise NotImplementedError() + else: + raise ValueError('Invalid configuration') + + @classmethod + def _from_xml(cls, node, path): + kwargs = cls._parse(node, path) + kwargs['joint_type'] = str(node.attrib['type']) + kwargs['parent'] = node.find('parent').attrib['link'] + kwargs['child'] = node.find('child').attrib['link'] + axis = node.find('axis') + if axis is not None: + axis = np.fromstring(axis.attrib['xyz'], sep=' ') + kwargs['axis'] = axis + kwargs['origin'] = parse_origin(node) + return Joint(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + parent = ET.Element('parent') + parent.attrib['link'] = self.parent + node.append(parent) + child = ET.Element('child') + child.attrib['link'] = self.child + node.append(child) + if self.axis is not None: + axis = ET.Element('axis') + axis.attrib['xyz'] = np.array2string(self.axis)[1:-1] + node.append(axis) + node.append(unparse_origin(self.origin)) + node.attrib['type'] = self.joint_type + return node + + def _rotation_matrices(self, angles, axis): + """Compute rotation matrices from angle/axis representations. + + Parameters + ---------- + angles : (n,) float + The angles. + axis : (3,) float + The axis. + + Returns + ------- + rots : (n,4,4) + The rotation matrices + """ + axis = axis / np.linalg.norm(axis) + sina = np.sin(angles) + cosa = np.cos(angles) + M = np.tile(np.eye(4), (len(angles), 1, 1)) + M[:,0,0] = cosa + M[:,1,1] = cosa + M[:,2,2] = cosa + M[:,:3,:3] += ( + np.tile(np.outer(axis, axis), (len(angles), 1, 1)) * + (1.0 - cosa)[:, np.newaxis, np.newaxis] + ) + M[:,:3,:3] += np.tile(np.array([ + [0.0, -axis[2], axis[1]], + [axis[2], 0.0, -axis[0]], + [-axis[1], axis[0], 0.0]] + ), (len(angles), 1, 1)) * sina[:, np.newaxis, np.newaxis] + return M + + def copy(self, prefix='', scale=None): + """Create a deep copy of the joint with the prefix applied to all names. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + :class:`.Joint` + A deep copy of the joint. + """ + origin = self.origin.copy() + if scale is not None: + if not isinstance(scale, (list, np.ndarray)): + scale = np.repeat(scale, 3) + origin[:3,3] *= scale + cpy = Joint( + name='{}{}'.format(prefix, self.name), + joint_type=self.joint_type, + parent='{}{}'.format(prefix, self.parent), + child='{}{}'.format(prefix, self.child), + axis=self.axis.copy(), + origin=origin, + limit=(self.limit.copy(prefix, scale) if self.limit else None), + dynamics=(self.dynamics.copy(prefix,scale) if self.dynamics else None), + safety_controller=(self.safety_controller.copy(prefix, scale) if + self.safety_controller else None), + calibration=(self.calibration.copy(prefix, scale) if self.calibration else None), + mimic=(self.mimic.copy(prefix=prefix, scale=scale) if self.mimic else None) + ) + return cpy + + +class Link(URDFType): + """A link of a rigid object. + + Parameters + ---------- + name : str + The name of the link. + inertial : :class:`.Inertial`, optional + The inertial properties of the link. + visuals : list of :class:`.Visual`, optional + The visual properties of the link. + collsions : list of :class:`.Collision`, optional + The collision properties of the link. + """ + + _ATTRIBS = { + 'name': (str, True), + } + _ELEMENTS = { + 'inertial': (Inertial, False, False), + 'visuals': (Visual, False, True), + 'collisions': (Collision, False, True), + } + _TAG = 'link' + + def __init__(self, name, inertial, visuals, collisions): + self.name = name + self.inertial = inertial + self.visuals = visuals + self.collisions = collisions + + self._collision_mesh = None + + @property + def name(self): + """str : The name of this link. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def inertial(self): + """:class:`.Inertial` : Inertial properties of the link. + """ + return self._inertial + + @inertial.setter + def inertial(self, value): + if value is not None and not isinstance(value, Inertial): + raise TypeError('Expected Inertial object') + # Set default inertial + if value is None: + value = Inertial(mass=1.0, inertia=np.eye(3)) + self._inertial = value + + @property + def visuals(self): + """list of :class:`.Visual` : The visual properties of this link. + """ + return self._visuals + + @visuals.setter + def visuals(self, value): + if value is None: + value = [] + else: + value = list(value) + for v in value: + if not isinstance(v, Visual): + raise ValueError('Expected list of Visual objects') + self._visuals = value + + @property + def collisions(self): + """list of :class:`.Collision` : The collision properties of this link. + """ + return self._collisions + + @collisions.setter + def collisions(self, value): + if value is None: + value = [] + else: + value = list(value) + for v in value: + if not isinstance(v, Collision): + raise ValueError('Expected list of Collision objects') + self._collisions = value + + @property + def collision_mesh(self): + """:class:`~trimesh.base.Trimesh` : A single collision mesh for + the link, specified in the link frame, or None if there isn't one. + """ + if len(self.collisions) == 0: + return None + if self._collision_mesh is None: + meshes = [] + for c in self.collisions: + for m in c.geometry.meshes: + m = m.copy() + pose = c.origin + if c.geometry.mesh is not None: + if c.geometry.mesh.scale is not None: + S = np.eye(4) + S[:3,:3] = np.diag(c.geometry.mesh.scale) + pose = pose.dot(S) + m.apply_transform(pose) + meshes.append(m) + if len(meshes) == 0: + return None + self._collision_mesh = (meshes[0] + meshes[1:]) + return self._collision_mesh + + def copy(self, prefix='', scale=None, collision_only=False): + """Create a deep copy of the link. + + Parameters + ---------- + prefix : str + A prefix to apply to all joint and link names. + + Returns + ------- + link : :class:`.Link` + A deep copy of the Link. + """ + inertial = self.inertial.copy() if self.inertial is not None else None + cm = self._collision_mesh + if scale is not None: + if self.collision_mesh is not None and self.inertial is not None: + sm = np.eye(4) + if not isinstance(scale, (list, np.ndarray)): + scale = np.repeat(scale, 3) + sm[:3,:3] = np.diag(scale) + cm = self.collision_mesh.copy() + cm.density = self.inertial.mass / cm.volume + cm.apply_transform(sm) + cmm = np.eye(4) + cmm[:3,3] = cm.center_mass + inertial = Inertial(mass=cm.mass, inertia=cm.moment_inertia, + origin=cmm) + + visuals = None + if not collision_only: + visuals=[v.copy(prefix=prefix, scale=scale) for v in self.visuals] + + cpy = Link( + name='{}{}'.format(prefix, self.name), + inertial=inertial, + visuals=visuals, + collisions=[v.copy(prefix=prefix, scale=scale) for v in self.collisions], + ) + cpy._collision_mesh = cm + return cpy + + +class URDF(URDFType): + """The top-level URDF specification. + + The URDF encapsulates an articulated object, such as a robot or a gripper. + It is made of links and joints that tie them together and define their + relative motions. + + Parameters + ---------- + name : str + The name of the URDF. + links : list of :class:`.Link` + The links of the URDF. + joints : list of :class:`.Joint`, optional + The joints of the URDF. + transmissions : list of :class:`.Transmission`, optional + The transmissions of the URDF. + materials : list of :class:`.Material`, optional + The materials for the URDF. + other_xml : str, optional + A string containing any extra XML for extensions. + """ + _ATTRIBS = { + 'name': (str, True), + } + _ELEMENTS = { + 'links': (Link, True, True), + 'joints': (Joint, False, True), + 'transmissions': (Transmission, False, True), + 'materials': (Material, False, True), + } + _TAG = 'robot' + + def __init__(self, name, links, joints=None, + transmissions=None, materials=None, other_xml=None): + if joints is None: + joints = [] + if transmissions is None: + transmissions = [] + if materials is None: + materials = [] + + self.name = name + self.other_xml = other_xml + + # No setters for these + self._links = list(links) + self._joints = list(joints) + self._transmissions = list(transmissions) + self._materials = list(materials) + + # Set up private helper maps from name to value + self._link_map = {} + self._joint_map = {} + self._transmission_map = {} + self._material_map = {} + + for x in self._links: + if x.name in self._link_map: + raise ValueError('Two links with name {} found'.format(x.name)) + self._link_map[x.name] = x + + for x in self._joints: + if x.name in self._joint_map: + raise ValueError('Two joints with name {} ' + 'found'.format(x.name)) + self._joint_map[x.name] = x + + for x in self._transmissions: + if x.name in self._transmission_map: + raise ValueError('Two transmissions with name {} ' + 'found'.format(x.name)) + self._transmission_map[x.name] = x + + for x in self._materials: + if x.name in self._material_map: + raise ValueError('Two materials with name {} ' + 'found'.format(x.name)) + self._material_map[x.name] = x + + # Synchronize materials between links and top-level set + self._merge_materials() + + # Validate the joints and transmissions + actuated_joints = self._validate_joints() + self._validate_transmissions() + + # Create the link graph and base link/end link sets + self._G = nx.DiGraph() + + # Add all links + for link in self.links: + self._G.add_node(link) + + # Add all edges from CHILDREN TO PARENTS, with joints as their object + for joint in self.joints: + parent = self._link_map[joint.parent] + child = self._link_map[joint.child] + self._G.add_edge(child, parent, joint=joint) + + # Validate the graph and get the base and end links + self._base_link, self._end_links = self._validate_graph() + + # Cache the paths to the base link + self._paths_to_base = nx.shortest_path( + self._G, target=self._base_link + ) + + self._actuated_joints = self._sort_joints(actuated_joints) + + # Cache the reverse topological order (useful for speeding up FK, + # as we want to start at the base and work outward to cache + # computation. + self._reverse_topo = list(reversed(list(nx.topological_sort(self._G)))) + + @property + def name(self): + """str : The name of the URDF. + """ + return self._name + + @name.setter + def name(self, value): + self._name = str(value) + + @property + def links(self): + """list of :class:`.Link` : The links of the URDF. + + This returns a copy of the links array which cannot be edited + directly. If you want to add or remove links, use + the appropriate functions. + """ + return copy.copy(self._links) + + @property + def link_map(self): + """dict : Map from link names to the links themselves. + + This returns a copy of the link map which cannot be edited + directly. If you want to add or remove links, use + the appropriate functions. + """ + return copy.copy(self._link_map) + + @property + def joints(self): + """list of :class:`.Joint` : The links of the URDF. + + This returns a copy of the joints array which cannot be edited + directly. If you want to add or remove joints, use + the appropriate functions. + """ + return copy.copy(self._joints) + + @property + def joint_map(self): + """dict : Map from joint names to the joints themselves. + + This returns a copy of the joint map which cannot be edited + directly. If you want to add or remove joints, use + the appropriate functions. + """ + return copy.copy(self._joint_map) + + @property + def transmissions(self): + """list of :class:`.Transmission` : The transmissions of the URDF. + + This returns a copy of the transmissions array which cannot be edited + directly. If you want to add or remove transmissions, use + the appropriate functions. + """ + return copy.copy(self._transmissions) + + @property + def transmission_map(self): + """dict : Map from transmission names to the transmissions themselves. + + This returns a copy of the transmission map which cannot be edited + directly. If you want to add or remove transmissions, use + the appropriate functions. + """ + return copy.copy(self._transmission_map) + + @property + def materials(self): + """list of :class:`.Material` : The materials of the URDF. + + This returns a copy of the materials array which cannot be edited + directly. If you want to add or remove materials, use + the appropriate functions. + """ + return copy.copy(self._materials) + + @property + def material_map(self): + """dict : Map from material names to the materials themselves. + + This returns a copy of the material map which cannot be edited + directly. If you want to add or remove materials, use + the appropriate functions. + """ + return copy.copy(self._material_map) + + @property + def other_xml(self): + """str : Any extra XML that belongs with the URDF. + """ + return self._other_xml + + @other_xml.setter + def other_xml(self, value): + self._other_xml = value + + @property + def actuated_joints(self): + """list of :class:`.Joint` : The joints that are independently + actuated. + + This excludes mimic joints and fixed joints. The joints are listed + in topological order, starting from the base-most joint. + """ + return self._actuated_joints + + @property + def actuated_joint_names(self): + """list of :class:`.Joint` : The names of joints that are independently + actuated. + + This excludes mimic joints and fixed joints. The joints are listed + in topological order, starting from the base-most joint. + """ + return [j.name for j in self._actuated_joints] + + def cfg_to_vector(self, cfg): + """Convert a configuration dictionary into a configuration vector. + + Parameters + ---------- + cfg : dict or None + The configuration value. + + Returns + ------- + vec : (n,) float + The configuration vector, or None if no actuated joints present. + """ + if cfg is None: + if len(self.actuated_joints) > 0: + return np.zeros(len(self.actuated_joints)) + else: + return None + elif isinstance(cfg, (list, tuple, np.ndarray)): + return np.asanyarray(cfg) + elif isinstance(cfg, dict): + vec = np.zeros(len(self.actuated_joints)) + for i, jn in enumerate(self.actuated_joint_names): + if jn in cfg: + vec[i] = cfg[jn] + return vec + else: + raise ValueError('Invalid configuration: {}'.format(cfg)) + + @property + def base_link(self): + """:class:`.Link`: The base link for the URDF. + + The base link is the single link that has no parent. + """ + return self._base_link + + @property + def end_links(self): + """list of :class:`.Link`: The end links for the URDF. + + The end links are the links that have no children. + """ + return self._end_links + + @property + def joint_limit_cfgs(self): + """tuple of dict : The lower-bound and upper-bound joint configuration + maps. + + The first map is the lower-bound map, which maps limited joints to + their lower joint limits. + The second map is the upper-bound map, which maps limited joints to + their upper joint limits. + """ + lb = {} + ub = {} + for joint in self.actuated_joints: + if joint.limit is not None: + if joint.limit.lower is not None: + lb[joint] = joint.limit.lower + if joint.limit.upper is not None: + ub[joint] = joint.limit.upper + return (lb, ub) + + @property + def joint_limits(self): + """(n,2) float : A lower and upper limit for each joint. + """ + limits = [] + for joint in self.actuated_joints: + limit = [-np.infty, np.infty] + if joint.limit is not None: + if joint.limit.lower is not None: + limit[0] = joint.limit.lower + if joint.limit.upper is not None: + limit[1] = joint.limit.upper + limits.append(limit) + return np.array(limits) + + def link_fk(self, cfg=None, link=None, links=None, use_names=False): + """Computes the poses of the URDF's links via forward kinematics. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + link : str or :class:`.Link` + A single link or link name to return a pose for. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only these links will be in the returned map. If neither + link nor links are specified all links are returned. + use_names : bool + If True, the returned dictionary will have keys that are string + link names rather than the links themselves. + + Returns + ------- + fk : dict or (4,4) float + A map from links to 4x4 homogenous transform matrices that + position them relative to the base link's frame, or a single + 4x4 matrix if ``link`` is specified. + """ + # Process config value + joint_cfg = self._process_cfg(cfg) + + # Process link set + link_set = set() + if link is not None: + if isinstance(link, six.string_types): + link_set.add(self._link_map[link]) + elif isinstance(link, Link): + link_set.add(link) + elif links is not None: + for lnk in links: + if isinstance(lnk, six.string_types): + link_set.add(self._link_map[lnk]) + elif isinstance(lnk, Link): + link_set.add(lnk) + else: + raise TypeError('Got object of type {} in links list' + .format(type(lnk))) + else: + link_set = self.links + + # Compute forward kinematics in reverse topological order + fk = OrderedDict() + for lnk in self._reverse_topo: + if lnk not in link_set: + continue + pose = np.eye(4, dtype=np.float64) + path = self._paths_to_base[lnk] + for i in range(len(path) - 1): + child = path[i] + parent = path[i + 1] + joint = self._G.get_edge_data(child, parent)['joint'] + + cfg = None + if joint.mimic is not None: + mimic_joint = self._joint_map[joint.mimic.joint] + if mimic_joint in joint_cfg: + cfg = joint_cfg[mimic_joint] + cfg = joint.mimic.multiplier * cfg + joint.mimic.offset + elif joint in joint_cfg: + cfg = joint_cfg[joint] + pose = joint.get_child_pose(cfg).dot(pose) + + # Check existing FK to see if we can exit early + if parent in fk: + pose = fk[parent].dot(pose) + break + fk[lnk] = pose + + if link: + if isinstance(link, six.string_types): + return fk[self._link_map[link]] + else: + return fk[link] + if use_names: + return {ell.name: fk[ell] for ell in fk} + return fk + + def link_fk_batch(self, cfgs=None, link=None, links=None, use_names=False): + """Computes the poses of the URDF's links via forward kinematics in a batch. + + Parameters + ---------- + cfgs : dict, list of dict, or (n,m), float + One of the following: (A) a map from joints or joint names to vectors + of joint configuration values, (B) a list of maps from joints or joint names + to single configuration values, or (C) a list of ``n`` configuration vectors, + each of which has a vector with an entry for each actuated joint. + link : str or :class:`.Link` + A single link or link name to return a pose for. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only these links will be in the returned map. If neither + link nor links are specified all links are returned. + use_names : bool + If True, the returned dictionary will have keys that are string + link names rather than the links themselves. + + Returns + ------- + fk : dict or (n,4,4) float + A map from links to a (n,4,4) vector of homogenous transform matrices that + position the links relative to the base link's frame, or a single + nx4x4 matrix if ``link`` is specified. + """ + joint_cfgs, n_cfgs = self._process_cfgs(cfgs) + + # Process link set + link_set = set() + if link is not None: + if isinstance(link, six.string_types): + link_set.add(self._link_map[link]) + elif isinstance(link, Link): + link_set.add(link) + elif links is not None: + for lnk in links: + if isinstance(lnk, six.string_types): + link_set.add(self._link_map[lnk]) + elif isinstance(lnk, Link): + link_set.add(lnk) + else: + raise TypeError('Got object of type {} in links list' + .format(type(lnk))) + else: + link_set = self.links + + # Compute FK mapping each link to a vector of matrices, one matrix per cfg + fk = OrderedDict() + for lnk in self._reverse_topo: + if lnk not in link_set: + continue + poses = np.tile(np.eye(4, dtype=np.float64), (n_cfgs, 1, 1)) + path = self._paths_to_base[lnk] + for i in range(len(path) - 1): + child = path[i] + parent = path[i + 1] + joint = self._G.get_edge_data(child, parent)['joint'] + + cfg_vals = None + if joint.mimic is not None: + mimic_joint = self._joint_map[joint.mimic.joint] + if mimic_joint in joint_cfgs: + cfg_vals = joint_cfgs[mimic_joint] + cfg_vals = joint.mimic.multiplier * cfg_vals + joint.mimic.offset + elif joint in joint_cfgs: + cfg_vals = joint_cfgs[joint] + poses = np.matmul(joint.get_child_poses(cfg_vals, n_cfgs), poses) + + if parent in fk: + poses = np.matmul(fk[parent], poses) + break + fk[lnk] = poses + + if link: + if isinstance(link, six.string_types): + return fk[self._link_map[link]] + else: + return fk[link] + if use_names: + return {ell.name: fk[ell] for ell in fk} + return fk + + def visual_geometry_fk(self, cfg=None, links=None): + """Computes the poses of the URDF's visual geometries using fk. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only geometries from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`Geometry` objects that are part of the visual + elements of the specified links to the 4x4 homogenous transform + matrices that position them relative to the base link's frame. + """ + lfk = self.link_fk(cfg=cfg, links=links) + + fk = OrderedDict() + for link in lfk: + for visual in link.visuals: + fk[visual.geometry] = lfk[link].dot(visual.origin) + return fk + + def visual_geometry_fk_batch(self, cfgs=None, links=None): + """Computes the poses of the URDF's visual geometries using fk. + + Parameters + ---------- + cfgs : dict, list of dict, or (n,m), float + One of the following: (A) a map from joints or joint names to vectors + of joint configuration values, (B) a list of maps from joints or joint names + to single configuration values, or (C) a list of ``n`` configuration vectors, + each of which has a vector with an entry for each actuated joint. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only geometries from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`Geometry` objects that are part of the visual + elements of the specified links to the 4x4 homogenous transform + matrices that position them relative to the base link's frame. + """ + lfk = self.link_fk_batch(cfgs=cfgs, links=links) + + fk = OrderedDict() + for link in lfk: + for visual in link.visuals: + fk[visual.geometry] = np.matmul(lfk[link], visual.origin) + return fk + + def visual_trimesh_fk(self, cfg=None, links=None): + """Computes the poses of the URDF's visual trimeshes using fk. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only trimeshes from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`~trimesh.base.Trimesh` objects that are + part of the visual geometry of the specified links to the + 4x4 homogenous transform matrices that position them relative + to the base link's frame. + """ + lfk = self.link_fk(cfg=cfg, links=links) + + fk = OrderedDict() + for link in lfk: + for visual in link.visuals: + for mesh in visual.geometry.meshes: + pose = lfk[link].dot(visual.origin) + if visual.geometry.mesh is not None: + if visual.geometry.mesh.scale is not None: + S = np.eye(4, dtype=np.float64) + S[:3,:3] = np.diag(visual.geometry.mesh.scale) + pose = pose.dot(S) + fk[mesh] = pose + return fk + + def visual_trimesh_fk_batch(self, cfgs=None, links=None): + """Computes the poses of the URDF's visual trimeshes using fk. + + Parameters + ---------- + cfgs : dict, list of dict, or (n,m), float + One of the following: (A) a map from joints or joint names to vectors + of joint configuration values, (B) a list of maps from joints or joint names + to single configuration values, or (C) a list of ``n`` configuration vectors, + each of which has a vector with an entry for each actuated joint. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only trimeshes from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`~trimesh.base.Trimesh` objects that are + part of the visual geometry of the specified links to the + 4x4 homogenous transform matrices that position them relative + to the base link's frame. + """ + lfk = self.link_fk_batch(cfgs=cfgs, links=links) + + fk = OrderedDict() + for link in lfk: + for visual in link.visuals: + for mesh in visual.geometry.meshes: + poses = np.matmul(lfk[link], visual.origin) + if visual.geometry.mesh is not None: + if visual.geometry.mesh.scale is not None: + S = np.eye(4, dtype=np.float64) + S[:3,:3] = np.diag(visual.geometry.mesh.scale) + poses = np.matmul(poses, S) + fk[mesh] = poses + return fk + + def collision_geometry_fk(self, cfg=None, links=None): + """Computes the poses of the URDF's collision geometries using fk. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only geometries from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`Geometry` objects that are part of the collision + elements of the specified links to the 4x4 homogenous transform + matrices that position them relative to the base link's frame. + """ + lfk = self.link_fk(cfg=cfg, links=links) + + fk = OrderedDict() + for link in lfk: + for collision in link.collisions: + fk[collision] = lfk[link].dot(collision.origin) + return fk + + def collision_geometry_fk_batch(self, cfgs=None, links=None): + """Computes the poses of the URDF's collision geometries using fk. + + Parameters + ---------- + cfgs : dict, list of dict, or (n,m), float + One of the following: (A) a map from joints or joint names to vectors + of joint configuration values, (B) a list of maps from joints or joint names + to single configuration values, or (C) a list of ``n`` configuration vectors, + each of which has a vector with an entry for each actuated joint. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only geometries from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`Geometry` objects that are part of the collision + elements of the specified links to the 4x4 homogenous transform + matrices that position them relative to the base link's frame. + """ + lfk = self.link_fk_batch(cfgs=cfgs, links=links) + + fk = OrderedDict() + for link in lfk: + for collision in link.collisions: + fk[collision] = np.matmul(lfk[link], collision.origin) + return fk + + def collision_trimesh_fk(self, cfg=None, links=None): + """Computes the poses of the URDF's collision trimeshes using fk. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only trimeshes from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`~trimesh.base.Trimesh` objects that are + part of the collision geometry of the specified links to the + 4x4 homogenous transform matrices that position them relative + to the base link's frame. + """ + lfk = self.link_fk(cfg=cfg, links=links) + + fk = OrderedDict() + for link in lfk: + pose = lfk[link] + cm = link.collision_mesh + if cm is not None: + fk[cm] = pose + return fk + + def collision_trimesh_fk_batch(self, cfgs=None, links=None): + """Computes the poses of the URDF's collision trimeshes using fk. + + Parameters + ---------- + cfgs : dict, list of dict, or (n,m), float + One of the following: (A) a map from joints or joint names to vectors + of joint configuration values, (B) a list of maps from joints or joint names + to single configuration values, or (C) a list of ``n`` configuration vectors, + each of which has a vector with an entry for each actuated joint. + links : list of str or list of :class:`.Link` + The links or names of links to perform forward kinematics on. + Only trimeshes from these links will be in the returned map. + If not specified, all links are returned. + + Returns + ------- + fk : dict + A map from :class:`~trimesh.base.Trimesh` objects that are + part of the collision geometry of the specified links to the + 4x4 homogenous transform matrices that position them relative + to the base link's frame. + """ + lfk = self.link_fk_batch(cfgs=cfgs, links=links) + + fk = OrderedDict() + for link in lfk: + poses = lfk[link] + cm = link.collision_mesh + if cm is not None: + fk[cm] = poses + return fk + + def animate(self, cfg_trajectory=None, loop_time=3.0, use_collision=False): + """Animate the URDF through a configuration trajectory. + + Parameters + ---------- + cfg_trajectory : dict or (m,n) float + A map from joints or joint names to lists of configuration values + for each joint along the trajectory, or a vector of + vectors where the second dimension contains a value for each joint. + If not specified, all joints will articulate from limit to limit. + The trajectory steps are assumed to be equally spaced out in time. + loop_time : float + The time to loop the animation for, in seconds. The trajectory + will play fowards and backwards during this time, ending + at the inital configuration. + use_collision : bool + If True, the collision geometry is visualized instead of + the visual geometry. + + Examples + -------- + + You can run this without specifying a ``cfg_trajectory`` to view + the full articulation of the URDF + + >>> robot = URDF.load('ur5.urdf') + >>> robot.animate() + + .. image:: /_static/ur5.gif + + >>> ct = {'shoulder_pan_joint': [0.0, 2 * np.pi]} + >>> robot.animate(cfg_trajectory=ct) + + .. image:: /_static/ur5_shoulder.gif + + >>> ct = { + ... 'shoulder_pan_joint' : [-np.pi / 4, np.pi / 4], + ... 'shoulder_lift_joint' : [0.0, -np.pi / 2.0], + ... 'elbow_joint' : [0.0, np.pi / 2.0] + ... } + >>> robot.animate(cfg_trajectory=ct) + + .. image:: /_static/ur5_three_joints.gif + + """ + import pyrender # Save pyrender import for here for CI + + ct = cfg_trajectory + + traj_len = None # Length of the trajectory in steps + ct_np = {} # Numpyified trajectory + + # If trajectory not specified, articulate between the limits. + if ct is None: + lb, ub = self.joint_limit_cfgs + if len(lb) > 0: + traj_len = 2 + ct_np = {k: np.array([lb[k], ub[k]]) for k in lb} + + # If it is specified, parse it and extract the trajectory length. + elif isinstance(ct, dict): + if len(ct) > 0: + for k in ct: + val = np.asanyarray(ct[k]).astype(np.float64) + if traj_len is None: + traj_len = len(val) + elif traj_len != len(val): + raise ValueError('Trajectories must be same length') + ct_np[k] = val + elif isinstance(ct, (list, tuple, np.ndarray)): + ct = np.asanyarray(ct).astype(np.float64) + if ct.ndim == 1: + ct = ct.reshape(-1, 1) + if ct.ndim != 2 or ct.shape[1] != len(self.actuated_joints): + raise ValueError('Cfg trajectory must have entry for each joint') + ct_np = {j: ct[:,i] for i, j in enumerate(self.actuated_joints)} + else: + raise TypeError('Invalid type for cfg_trajectory: {}' + .format(type(cfg_trajectory))) + + # If there isn't a trajectory to render, just show the model and exit + if len(ct_np) == 0 or traj_len < 2: + self.show(use_collision=use_collision) + return + + # Create an array of times that loops from 0 to 1 and back to 0 + fps = 30.0 + n_steps = int(loop_time * fps / 2.0) + times = np.linspace(0.0, 1.0, n_steps) + times = np.hstack((times, np.flip(times))) + + # Create bin edges in the range [0, 1] for each trajectory step + bins = np.arange(traj_len) / (float(traj_len) - 1.0) + + # Compute alphas for each time + right_inds = np.digitize(times, bins, right=True) + right_inds[right_inds == 0] = 1 + alphas = ((bins[right_inds] - times) / + (bins[right_inds] - bins[right_inds - 1])) + + # Create the new interpolated trajectory + new_ct = {} + for k in ct_np: + new_ct[k] = (alphas * ct_np[k][right_inds - 1] + + (1.0 - alphas) * ct_np[k][right_inds]) + + # Create the scene + if use_collision: + fk = self.collision_trimesh_fk() + else: + fk = self.visual_trimesh_fk() + + node_map = {} + scene = pyrender.Scene() + for tm in fk: + pose = fk[tm] + mesh = pyrender.Mesh.from_trimesh(tm, smooth=False) + node = scene.add(mesh, pose=pose) + node_map[tm] = node + + # Get base pose to focus on + blp = self.link_fk(links=[self.base_link])[self.base_link] + + # Pop the visualizer asynchronously + v = pyrender.Viewer(scene, run_in_thread=True, + use_raymond_lighting=True, + view_center=blp[:3,3]) + + # Now, run our loop + i = 0 + while v.is_active: + cfg = {k: new_ct[k][i] for k in new_ct} + i = (i + 1) % len(times) + + if use_collision: + fk = self.collision_trimesh_fk(cfg=cfg) + else: + fk = self.visual_trimesh_fk(cfg=cfg) + + v.render_lock.acquire() + for mesh in fk: + pose = fk[mesh] + node_map[mesh].matrix = pose + v.render_lock.release() + + time.sleep(1.0 / fps) + + def show(self, cfg=None, use_collision=False): + """Visualize the URDF in a given configuration. + + Parameters + ---------- + cfg : dict or (n), float + A map from joints or joint names to configuration values for + each joint, or a list containing a value for each actuated joint + in sorted order from the base link. + If not specified, all joints are assumed to be in their default + configurations. + use_collision : bool + If True, the collision geometry is visualized instead of + the visual geometry. + """ + import pyrender # Save pyrender import for here for CI + + if use_collision: + fk = self.collision_trimesh_fk(cfg=cfg) + else: + fk = self.visual_trimesh_fk(cfg=cfg) + + scene = pyrender.Scene() + for tm in fk: + pose = fk[tm] + mesh = pyrender.Mesh.from_trimesh(tm, smooth=False) + scene.add(mesh, pose=pose) + pyrender.Viewer(scene, use_raymond_lighting=True) + + def copy(self, name=None, prefix='', scale=None, collision_only=False): + """Make a deep copy of the URDF. + + Parameters + ---------- + name : str, optional + A name for the new URDF. If not specified, ``self.name`` is used. + prefix : str, optional + A prefix to apply to all names except for the base URDF name. + scale : float or (3,) float, optional + A scale to apply to the URDF. + collision_only : bool, optional + If True, all visual geometry is redirected to the collision geometry. + + Returns + ------- + copy : :class:`.URDF` + The copied URDF. + """ + return URDF( + name = (name if name else self.name), + links=[v.copy(prefix, scale, collision_only) for v in self.links], + joints=[v.copy(prefix, scale) for v in self.joints], + transmissions=[v.copy(prefix, scale) for v in self.transmissions], + materials=[v.copy(prefix, scale) for v in self.materials], + other_xml=self.other_xml + ) + + def save(self, file_obj): + """Save this URDF to a file. + + Parameters + ---------- + file_obj : str or file-like object + The file to save the URDF to. Should be the path to the + ``.urdf`` XML file. Any paths in the URDF should be specified + as relative paths to the ``.urdf`` file instead of as ROS + resources. + + Returns + ------- + urdf : :class:`.URDF` + The parsed URDF. + """ + if isinstance(file_obj, six.string_types): + path, _ = os.path.split(file_obj) + else: + path, _ = os.path.split(os.path.realpath(file_obj.name)) + + node = self._to_xml(None, path) + tree = ET.ElementTree(node) + tree.write(file_obj, pretty_print=True, + xml_declaration=True, encoding='utf-8') + + def join(self, other, link, origin=None, name=None, prefix=''): + """Join another URDF to this one by rigidly fixturing the two at a link. + + Parameters + ---------- + other : :class:.`URDF` + Another URDF to fuze to this one. + link : :class:`.Link` or str + The link of this URDF to attach the other URDF to. + origin : (4,4) float, optional + The location in this URDF's link frame to attach the base link of the other + URDF at. + name : str, optional + A name for the new URDF. + prefix : str, optional + If specified, all joints and links from the (other) mesh will be pre-fixed + with this value to avoid name clashes. + + Returns + ------- + :class:`.URDF` + The new URDF. + """ + myself = self.copy() + other = other.copy(prefix=prefix) + + # Validate + link_names = set(myself.link_map.keys()) + other_link_names = set(other.link_map.keys()) + if len(link_names.intersection(other_link_names)) > 0: + raise ValueError('Cannot merge two URDFs with shared link names') + + joint_names = set(myself.joint_map.keys()) + other_joint_names = set(other.joint_map.keys()) + if len(joint_names.intersection(other_joint_names)) > 0: + raise ValueError('Cannot merge two URDFs with shared joint names') + + links = myself.links + other.links + joints = myself.joints + other.joints + transmissions = myself.transmissions + other.transmissions + materials = myself.materials + other.materials + + if name is None: + name = self.name + + # Create joint that links the two rigidly + joints.append(Joint( + name='{}_join_{}{}_joint'.format(self.name, prefix, other.name), + joint_type='fixed', + parent=link if isinstance(link, str) else link.name, + child=other.base_link.name, + origin=origin + )) + + return URDF(name=name, links=links, joints=joints, transmissions=transmissions, + materials=materials) + + def _merge_materials(self): + """Merge the top-level material set with the link materials. + """ + for link in self.links: + for v in link.visuals: + if v.material is None: + continue + if v.material.name in self.material_map: + v.material = self._material_map[v.material.name] + else: + self._materials.append(v.material) + self._material_map[v.material.name] = v.material + + @staticmethod + def load(file_obj): + """Load a URDF from a file. + + Parameters + ---------- + file_obj : str or file-like object + The file to load the URDF from. Should be the path to the + ``.urdf`` XML file. Any paths in the URDF should be specified + as relative paths to the ``.urdf`` file instead of as ROS + resources. + + Returns + ------- + urdf : :class:`.URDF` + The parsed URDF. + """ + if isinstance(file_obj, six.string_types): + if os.path.isfile(file_obj): + parser = ET.XMLParser(remove_comments=True, + remove_blank_text=True) + tree = ET.parse(file_obj, parser=parser) + path, _ = os.path.split(file_obj) + else: + raise ValueError('{} is not a file'.format(file_obj)) + else: + parser = ET.XMLParser(remove_comments=True, remove_blank_text=True) + tree = ET.parse(file_obj, parser=parser) + path, _ = os.path.split(file_obj.name) + + node = tree.getroot() + return URDF._from_xml(node, path) + + def _validate_joints(self): + """Raise an exception of any joints are invalidly specified. + + Checks for the following: + + - Joint parents are valid link names. + - Joint children are valid link names that aren't the same as parent. + - Joint mimics have valid joint names that aren't the same joint. + + Returns + ------- + actuated_joints : list of :class:`.Joint` + The joints in the model that are independently controllable. + """ + actuated_joints = [] + for joint in self.joints: + if joint.parent not in self._link_map: + raise ValueError('Joint {} has invalid parent link name {}' + .format(joint.name, joint.parent)) + if joint.child not in self._link_map: + raise ValueError('Joint {} has invalid child link name {}' + .format(joint.name, joint.child)) + if joint.child == joint.parent: + raise ValueError('Joint {} has matching parent and child' + .format(joint.name)) + if joint.mimic is not None: + if joint.mimic.joint not in self._joint_map: + raise ValueError( + 'Joint {} has an invalid mimic joint name {}' + .format(joint.name, joint.mimic.joint) + ) + if joint.mimic.joint == joint.name: + raise ValueError( + 'Joint {} set up to mimic itself' + .format(joint.mimic.joint) + ) + elif joint.joint_type != 'fixed': + actuated_joints.append(joint) + + # Do a depth-first search + return actuated_joints + + def _sort_joints(self, joints): + """Sort joints by ascending distance from the base link (topologically). + + Parameters + ---------- + joints : list of :class:`.Joint` + The joints to sort. + + Returns + ------- + joints : list of :class:`.Joint` + The sorted joints. + """ + lens = [] + for joint in joints: + child_link = self._link_map[joint.child] + lens.append(len(self._paths_to_base[child_link])) + order = np.argsort(lens) + return np.array(joints)[order].tolist() + + def _validate_transmissions(self): + """Raise an exception of any transmissions are invalidly specified. + + Checks for the following: + + - Transmission joints have valid joint names. + """ + for t in self.transmissions: + for joint in t.joints: + if joint.name not in self._joint_map: + raise ValueError('Transmission {} has invalid joint name ' + '{}'.format(t.name, joint.name)) + + def _validate_graph(self): + """Raise an exception if the link-joint structure is invalid. + + Checks for the following: + + - The graph is connected in the undirected sense. + - The graph is acyclic in the directed sense. + - The graph has only one base link. + + Returns + ------- + base_link : :class:`.Link` + The base link of the URDF. + end_links : list of :class:`.Link` + The end links of the URDF. + """ + + # Check that the link graph is weakly connected + if not nx.is_weakly_connected(self._G): + link_clusters = [] + for cc in nx.weakly_connected_components(self._G): + cluster = [] + for n in cc: + cluster.append(n.name) + link_clusters.append(cluster) + message = ('Links are not all connected. ' + 'Connected components are:') + for lc in link_clusters: + message += '\n\t' + for n in lc: + message += ' {}'.format(n) + raise ValueError(message) + + # Check that link graph is acyclic + if not nx.is_directed_acyclic_graph(self._G): + raise ValueError('There are cycles in the link graph') + + # Ensure that there is exactly one base link, which has no parent + base_link = None + end_links = [] + for n in self._G: + if len(nx.descendants(self._G, n)) == 0: + if base_link is None: + base_link = n + else: + raise ValueError('Links {} and {} are both base links!' + .format(n.name, base_link.name)) + if len(nx.ancestors(self._G, n)) == 0: + end_links.append(n) + return base_link, end_links + + def _process_cfg(self, cfg): + """Process a joint configuration spec into a dictionary mapping + joints to configuration values. + """ + joint_cfg = {} + if cfg is None: + return joint_cfg + if isinstance(cfg, dict): + for joint in cfg: + if isinstance(joint, six.string_types): + joint_cfg[self._joint_map[joint]] = cfg[joint] + elif isinstance(joint, Joint): + joint_cfg[joint] = cfg[joint] + elif isinstance(cfg, (list, tuple, np.ndarray)): + if len(cfg) != len(self.actuated_joints): + raise ValueError('Cfg must have same length as actuated joints ' + 'if specified as a numerical array') + for joint, value in zip(self.actuated_joints, cfg): + joint_cfg[joint] = value + else: + raise TypeError('Invalid type for config') + return joint_cfg + + def _process_cfgs(self, cfgs): + """Process a list of joint configurations into a dictionary mapping joints to + configuration values. + + This should result in a dict mapping each joint to a list of cfg values, one + per joint. + """ + joint_cfg = {j : [] for j in self.actuated_joints} + n_cfgs = None + if isinstance(cfgs, dict): + for joint in cfgs: + if isinstance(joint, six.string_types): + joint_cfg[self._joint_map[joint]] = cfgs[joint] + else: + joint_cfg[joint] = cfgs[joint] + if n_cfgs is None: + n_cfgs = len(cfgs[joint]) + elif isinstance(cfgs, (list, tuple, np.ndarray)): + n_cfgs = len(cfgs) + if isinstance(cfgs[0], dict): + for cfg in cfgs: + for joint in cfg: + if isinstance(joint, six.string_types): + joint_cfg[self._joint_map[joint]].append(cfg[joint]) + else: + joint_cfg[joint].append(cfg[joint]) + elif cfgs[0] is None: + pass + else: + cfgs = np.asanyarray(cfgs, dtype=np.float64) + for i, j in enumerate(self.actuated_joints): + joint_cfg[j] = cfgs[:,i] + else: + raise ValueError('Incorrectly formatted config array') + + for j in joint_cfg: + if len(joint_cfg[j]) == 0: + joint_cfg[j] = None + elif len(joint_cfg[j]) != n_cfgs: + raise ValueError('Inconsistent number of configurations for joints') + + return joint_cfg, n_cfgs + + @classmethod + def _from_xml(cls, node, path): + valid_tags = set(['joint', 'link', 'transmission', 'material']) + kwargs = cls._parse(node, path) + + extra_xml_node = ET.Element('extra') + for child in node: + if child.tag not in valid_tags: + extra_xml_node.append(child) + + data = ET.tostring(extra_xml_node) + kwargs['other_xml'] = data + return URDF(**kwargs) + + def _to_xml(self, parent, path): + node = self._unparse(path) + if self.other_xml: + extra_tree = ET.fromstring(self.other_xml) + for child in extra_tree: + node.append(child) + return node + + +def rpy_to_matrix(coords): + """Convert roll-pitch-yaw coordinates to a 3x3 homogenous rotation matrix. + + The roll-pitch-yaw axes in a typical URDF are defined as a + rotation of ``r`` radians around the x-axis followed by a rotation of + ``p`` radians around the y-axis followed by a rotation of ``y`` radians + around the z-axis. These are the Z1-Y2-X3 Tait-Bryan angles. See + Wikipedia_ for more information. + + .. _Wikipedia: https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix + + Parameters + ---------- + coords : (3,) float + The roll-pitch-yaw coordinates in order (x-rot, y-rot, z-rot). + + Returns + ------- + R : (3,3) float + The corresponding homogenous 3x3 rotation matrix. + """ + coords = np.asanyarray(coords, dtype=np.float64) + c3, c2, c1 = np.cos(coords) + s3, s2, s1 = np.sin(coords) + + return np.array([ + [c1 * c2, (c1 * s2 * s3) - (c3 * s1), (s1 * s3) + (c1 * c3 * s2)], + [c2 * s1, (c1 * c3) + (s1 * s2 * s3), (c3 * s1 * s2) - (c1 * s3)], + [-s2, c2 * s3, c2 * c3] + ], dtype=np.float64) + + +def matrix_to_rpy(R, solution=1): + """Convert a 3x3 transform matrix to roll-pitch-yaw coordinates. + + The roll-pitchRyaw axes in a typical URDF are defined as a + rotation of ``r`` radians around the x-axis followed by a rotation of + ``p`` radians around the y-axis followed by a rotation of ``y`` radians + around the z-axis. These are the Z1-Y2-X3 Tait-Bryan angles. See + Wikipedia_ for more information. + + .. _Wikipedia: https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix + + There are typically two possible roll-pitch-yaw coordinates that could have + created a given rotation matrix. Specify ``solution=1`` for the first one + and ``solution=2`` for the second one. + + Parameters + ---------- + R : (3,3) float + A 3x3 homogenous rotation matrix. + solution : int + Either 1 or 2, indicating which solution to return. + + Returns + ------- + coords : (3,) float + The roll-pitch-yaw coordinates in order (x-rot, y-rot, z-rot). + """ + R = np.asanyarray(R, dtype=np.float64) + r = 0.0 + p = 0.0 + y = 0.0 + + if np.abs(R[2,0]) >= 1.0 - 1e-12: + y = 0.0 + if R[2,0] < 0: + p = np.pi / 2 + r = np.arctan2(R[0,1], R[0,2]) + else: + p = -np.pi / 2 + r = np.arctan2(-R[0,1], -R[0,2]) + else: + if solution == 1: + p = -np.arcsin(R[2,0]) + else: + p = np.pi + np.arcsin(R[2,0]) + r = np.arctan2(R[2,1] / np.cos(p), R[2,2] / np.cos(p)) + y = np.arctan2(R[1,0] / np.cos(p), R[0,0] / np.cos(p)) + + return np.array([r, p, y], dtype=np.float64) + + +def matrix_to_xyz_rpy(matrix): + """Convert a 4x4 homogenous matrix to xyzrpy coordinates. + + Parameters + ---------- + matrix : (4,4) float + The homogenous transform matrix. + + Returns + ------- + xyz_rpy : (6,) float + The xyz_rpy vector. + """ + xyz = matrix[:3,3] + rpy = matrix_to_rpy(matrix[:3,:3]) + return np.hstack((xyz, rpy)) + + +def xyz_rpy_to_matrix(xyz_rpy): + """Convert xyz_rpy coordinates to a 4x4 homogenous matrix. + + Parameters + ---------- + xyz_rpy : (6,) float + The xyz_rpy vector. + + Returns + ------- + matrix : (4,4) float + The homogenous transform matrix. + """ + matrix = np.eye(4, dtype=np.float64) + matrix[:3,3] = xyz_rpy[:3] + matrix[:3,:3] = rpy_to_matrix(xyz_rpy[3:]) + return matrix + + +def parse_origin(node): + """Find the ``origin`` subelement of an XML node and convert it + into a 4x4 homogenous transformation matrix. + + Parameters + ---------- + node : :class`lxml.etree.Element` + An XML node which (optionally) has a child node with the ``origin`` + tag. + + Returns + ------- + matrix : (4,4) float + The 4x4 homogneous transform matrix that corresponds to this node's + ``origin`` child. Defaults to the identity matrix if no ``origin`` + child was found. + """ + matrix = np.eye(4, dtype=np.float64) + origin_node = node.find('origin') + if origin_node is not None: + if 'xyz' in origin_node.attrib: + matrix[:3,3] = np.fromstring(origin_node.attrib['xyz'], sep=' ') + if 'rpy' in origin_node.attrib: + rpy = np.fromstring(origin_node.attrib['rpy'], sep=' ') + matrix[:3,:3] = rpy_to_matrix(rpy) + return matrix + + +def unparse_origin(matrix): + """Turn a 4x4 homogenous matrix into an ``origin`` XML node. + + Parameters + ---------- + matrix : (4,4) float + The 4x4 homogneous transform matrix to convert into an ``origin`` + XML node. + + Returns + ------- + node : :class`lxml.etree.Element` + An XML node whose tag is ``origin``. The node has two attributes: + + - ``xyz`` - A string with three space-delimited floats representing + the translation of the origin. + - ``rpy`` - A string with three space-delimited floats representing + the rotation of the origin. + """ + node = ET.Element('origin') + node.attrib['xyz'] = '{} {} {}'.format(*matrix[:3,3]) + node.attrib['rpy'] = '{} {} {}'.format(*matrix_to_rpy(matrix[:3,:3])) + return node + + +def get_filename(base_path, file_path, makedirs=False): + """Formats a file path correctly for URDF loading. + + Parameters + ---------- + base_path : str + The base path to the URDF's folder. + file_path : str + The path to the file. + makedirs : bool, optional + If ``True``, the directories leading to the file will be created + if needed. + + Returns + ------- + resolved : str + The resolved filepath -- just the normal ``file_path`` if it was an + absolute path, otherwise that path joined to ``base_path``. + """ + fn = file_path + if not os.path.isabs(file_path): + fn = os.path.join(base_path, file_path) + if makedirs: + d, _ = os.path.split(fn) + if not os.path.exists(d): + os.makedirs(d) + return fn + + +def load_meshes(filename): + """Loads triangular meshes from a file. + + Parameters + ---------- + filename : str + Path to the mesh file. + + Returns + ------- + meshes : list of :class:`~trimesh.base.Trimesh` + The meshes loaded from the file. + """ + meshes = trimesh.load(filename) + + # If we got a scene, dump the meshes + if isinstance(meshes, trimesh.Scene): + meshes = list(meshes.dump()) + meshes = [g for g in meshes if isinstance(g, trimesh.Trimesh)] + + if isinstance(meshes, (list, tuple, set)): + meshes = list(meshes) + if len(meshes) == 0: + raise ValueError('At least one mesh must be pmeshesent in file') + for r in meshes: + if not isinstance(r, trimesh.Trimesh): + raise TypeError('Could not load meshes from file') + elif isinstance(meshes, trimesh.Trimesh): + meshes = [meshes] + else: + raise ValueError('Unable to load mesh from file') + + return meshes + + +def configure_origin(value): + """Convert a value into a 4x4 transform matrix. + + Parameters + ---------- + value : None, (6,) float, or (4,4) float + The value to turn into the matrix. + If (6,), interpreted as xyzrpy coordinates. + + Returns + ------- + matrix : (4,4) float or None + The created matrix. + """ + if value is None: + value = np.eye(4, dtype=np.float64) + elif isinstance(value, (list, tuple, np.ndarray)): + value = np.asanyarray(value, dtype=np.float64) + if value.shape == (6,): + value = xyz_rpy_to_matrix(value) + elif value.shape != (4,4): + raise ValueError('Origin must be specified as a 4x4 ' + 'homogenous transformation matrix') + else: + raise TypeError('Invalid type for origin, expect 4x4 matrix') + return value + diff --git a/setup.py b/setup.py index 9d7f8d331..47ad59117 100644 --- a/setup.py +++ b/setup.py @@ -47,6 +47,7 @@ "rtree>=1.2.0", "graphviz>=0.20", "matplotlib>=3.0.0", + "lxml", ], extras_require={ "dev": [ From 1b90faf263d6b306bbab3cfb33232793e4486110 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:03:20 -0800 Subject: [PATCH 18/61] update asset conversion utils for generalized asset imports --- omnigibson/utils/asset_conversion_utils.py | 514 +++++++++++++++++++-- 1 file changed, 471 insertions(+), 43 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 05f29abb8..2660f4410 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -23,6 +23,7 @@ from omnigibson.objects import DatasetObject from omnigibson.scenes import Scene from omnigibson.utils.ui_utils import create_module_logger +from omnigibson.utils.urdfpy_utils import URDF from omnigibson.utils.usd_utils import create_primitive_mesh # Create module logger @@ -959,13 +960,14 @@ def _process_glass_link(prim): ) -def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_channels=False): +def import_obj_metadata(usd_path, obj_category, obj_model, dataset_root, import_render_channels=False): """ Imports metadata for a given object model from the dataset. This metadata consist of information that is NOT included in the URDF file and instead included in the various JSON files shipped in iGibson and OmniGibson datasets. Args: + usd_path (str): Path to USD file obj_category (str): The category of the object. obj_model (str): The model name of the object. dataset_root (str): The root directory of the dataset. @@ -979,7 +981,6 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha """ # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" - usd_path = f"{model_root_path}/usd/{obj_model}.usd" log.debug("Loading", usd_path, "for metadata import.") # Load model @@ -1071,11 +1072,18 @@ def import_obj_metadata(obj_category, obj_model, dataset_root, import_render_cha if "glass" in link_tags: _process_glass_link(prim.GetChild(link)) + # Rename model to be named + old_prim_path = prim.GetPrimPath().pathString + new_prim_path = "/".join(old_prim_path.split("/")[:-1]) + f"/{obj_model}" + lazy.omni.kit.commands.execute("MovePrim", path_from=old_prim_path, path_to=new_prim_path) + + prim = stage.GetDefaultPrim() + # Save stage stage.Save() - # Delete stage reference and clear the sim stage variable, opening the dummy stage along the way - del stage + # Return the root prim + return prim def _recursively_replace_list_of_dict(dic): @@ -1145,7 +1153,7 @@ def _recursively_replace_list_of_dict(dic): return dic -def _create_urdf_import_config(): +def _create_urdf_import_config(use_convex_decomposition=False): """ Creates and configures a URDF import configuration. @@ -1155,6 +1163,10 @@ def _create_urdf_import_config(): density, drive strength, position drive damping, self-collision, up vector, default prim creation, and physics scene creation. + Args: + use_convex_decomposition (bool): Whether to have omniverse use internal convex decomposition + on any collision meshes + Returns: import_config: The configured URDF import configuration object. """ @@ -1165,7 +1177,7 @@ def _create_urdf_import_config(): ) # Hacky way to get class for default drive type, options are JOINT_DRIVE_{NONE / POSITION / VELOCITY} import_config.set_merge_fixed_joints(False) - import_config.set_convex_decomp(True) + import_config.set_convex_decomp(use_convex_decomposition) import_config.set_fix_base(False) import_config.set_import_inertia_tensor(False) import_config.set_distance_scale(1.0) @@ -1180,24 +1192,28 @@ def _create_urdf_import_config(): return import_config -def import_obj_urdf(obj_category, obj_model, dataset_root): +def import_obj_urdf(urdf_path, obj_category, obj_model, dataset_root=gm.EXTERNAL_DATASET_PATH, use_omni_convex_decomp=False, use_usda=False): """ Imports an object from a URDF file into the current stage. Args: + urdf_path (str): Path to URDF file to import obj_category (str): The category of the object. obj_model (str): The model name of the object. dataset_root (str): The root directory of the dataset. + use_omni_convex_decomp (bool): Whether to use omniverse's built-in convex decomposer for collision meshes + use_usda (bool): If set, will write files to .usda files instead of .usd + (bigger memory footprint, but human-readable) Returns: - None + str: Absolute path to the imported USD file """ # Preprocess input URDF to account for metalinks - urdf_path = _add_metalinks_to_urdf(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) + urdf_path = _add_metalinks_to_urdf(urdf_path=urdf_path, obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) # Import URDF - cfg = _create_urdf_import_config() + cfg = _create_urdf_import_config(use_convex_decomposition=use_omni_convex_decomp) # Check if filepath exists - usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd" + usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.{'usda' if use_usda else 'usd'}" if _SPLIT_COLLISION_MESHES: log.debug(f"Converting collision meshes from {obj_category}, {obj_model}...") urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split") @@ -1211,6 +1227,8 @@ def import_obj_urdf(obj_category, obj_model, dataset_root): ) log.debug(f"Imported {obj_category}, {obj_model}") + return usd_path + def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): """ @@ -1426,7 +1444,7 @@ def _save_xmltree_as_urdf(root_element, name, dirpath, unique_urdf=False): return fpath -def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): +def _add_metalinks_to_urdf(urdf_path, obj_category, obj_model, dataset_root): """ Adds meta links to a URDF file based on metadata. @@ -1434,6 +1452,7 @@ def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): saves an updated version of the URDF file with these meta links. Args: + urdf_path (str): Path to URDF obj_category (str): The category of the object. obj_model (str): The model name of the object. dataset_root (str): The root directory of the dataset. @@ -1443,7 +1462,6 @@ def _add_metalinks_to_urdf(obj_category, obj_model, dataset_root): """ # Check if filepath exists model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}" - urdf_path = f"{model_root_path}/{obj_model}.urdf" # Load urdf tree = ET.parse(urdf_path) @@ -1720,50 +1738,310 @@ def _get_joint_info(joint_element): return child, pos, quat, fixed_jnt -def generate_collision_meshes(trimesh_mesh, hull_count=32, discard_not_volume=True): +def make_mesh_positive(mesh_fpath, scale, output_suffix="mirror"): + assert "." not in mesh_fpath + for sc, letter in zip(scale, "xyz"): + if sc < 0: + output_suffix += f"_{letter}" + for filetype in [".obj", ".stl", ".dae"]: + fpath = f"{mesh_fpath}{filetype}" + out_fpath = f"{mesh_fpath}_{output_suffix}{filetype}" + kwargs = dict() + if filetype == ".dae": + kwargs["force"] = "mesh" + if os.path.exists(fpath): + try: + tm = trimesh.load(fpath, **kwargs) + tm.apply_scale(scale) + tm.export(out_fpath) + if filetype == ".obj": + # Update header lines + lines = [] + with open(fpath, "r") as f: + for line in f.readlines(): + if line.startswith("v "): + break + lines.append(line) + start = False + with open(out_fpath, "r") as f: + for line in f.readlines(): + if line.startswith("v "): + start = True + if start: + lines.append(line) + with open(out_fpath, "w+") as f: + f.writelines(lines) + except KeyError: + # Degenerate mesh, so immediately return + return None + return output_suffix + +def make_asset_positive(urdf_fpath, output_suffix="mirror"): + assert urdf_fpath.endswith(".urdf") + out_lines = [] + + with open(urdf_fpath, "r") as f: + for line in f.readlines(): + # print(line) + out_line = line + if " 0.90): + hulls = [trimesh_mesh.convex_hull] - # Get the vertices and faces - coacd_mesh = coacd.Mesh(trimesh_mesh.vertices, trimesh_mesh.faces) + elif method == "coacd": + try: + import coacd + except ImportError: + raise ImportError("Please install the `coacd` package to use this function.") - # Run CoACD with the hull count - result = coacd.run_coacd( - coacd_mesh, - max_convex_hull=hull_count, - ) + # Get the vertices and faces + coacd_mesh = coacd.Mesh(trimesh_mesh.vertices, trimesh_mesh.faces) - # Convert the returned vertices and faces to trimesh meshes - # and assert that they are volumes (and if not, discard them if required) - hulls = [] - for vs, fs in result: - hull = trimesh.Trimesh(vertices=vs, faces=fs, process=False) - if discard_not_volume and not hull.is_volume: - continue - hulls.append(hull) + # Run CoACD with the hull count + result = coacd.run_coacd( + coacd_mesh, + max_convex_hull=hull_count, + ) + + # Convert the returned vertices and faces to trimesh meshes + # and assert that they are volumes (and if not, discard them if required) + hulls = [] + coacd_vol = 0.0 + for vs, fs in result: + hull = trimesh.Trimesh(vertices=vs, faces=fs, process=False) + if discard_not_volume and not hull.is_volume: + continue + hulls.append(hull) + coacd_vol += hull.convex_hull.volume + + # Assert that we got _some_ collision meshes + assert len(hulls) > 0, "No collision meshes generated!" + + # Compare coacd's generation compared to the original mesh's convex hull + # If the difference is small (<10% volume difference), simply keep the convex hull + vol_ratio = coacd_vol / trimesh_mesh.convex_hull.volume + if 0.95 < vol_ratio < 1.05: + print("MINIMAL CHANGE -- USING CONVEX HULL INSTEAD") + # from IPython import embed; embed() + hulls = [trimesh_mesh.convex_hull] - # Assert that we got _some_ collision meshes - assert len(hulls) > 0, "No collision meshes generated!" + elif method == "convex": + hulls = [trimesh_mesh.convex_hull] + + else: + raise ValueError(f"Invalid collision mesh generation method specified: {method}") return hulls -def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): +def get_collision_approximation_for_urdf( + urdf_path, + collision_method="coacd", + hull_count=32, + coacd_links=None, + convex_links=None, + no_decompose_links=None, + visual_only_links=None, + ignore_links=None, +): + """ + Computes collision approximation for all collision meshes (which are assumed to be non-convex) in + the given URDF. + + NOTE: This is an in-place operation! It will overwrite @urdf_path + + Args: + urdf_path (str): Absolute path to the URDF to decompose + collision_method (str): Default collision method to use. Valid options are: {"coacd", "convex"} + hull_count (int): Maximum number of convex hulls to decompose individual visual meshes into. + Only relevant if @collision_method is "coacd" + coacd_links (None or list of str): If specified, links that should use CoACD to decompose collision meshes + convex_links (None or list of str): If specified, links that should use convex hull to decompose collision meshes + no_decompose_links (None or list of str): If specified, links that should not have any special collision + decomposition applied. This will only use the convex hull + visual_only_links (None or list of str): If specified, link names corresponding to links that should have + no collision associated with them (so any pre-existing collisions will be removed!) + ignore_links (None or list of str): If specified, link names corresponding to links that should be skipped + during collision generation process + """ + # Load URDF + tree = ET.parse(urdf_path) + root = tree.getroot() + + # Next, iterate over each visual mesh and define collision meshes for them + coacd_links = set() if coacd_links is None else set(coacd_links) + convex_links = set() if convex_links is None else set(convex_links) + no_decompose_links = set() if no_decompose_links is None else set(no_decompose_links) + visual_only_links = set() if visual_only_links is None else set(visual_only_links) + ignore_links = set() if ignore_links is None else set(ignore_links) + col_mesh_folder = pathlib.Path(os.path.dirname(urdf_path)) / "meshes" / "collision" + col_mesh_folder.mkdir(exist_ok=True, parents=True) + for link in root.findall("link"): + link_name = link.attrib["name"] + old_cols = link.findall("collision") + # Completely skip this link if this a link to explicitly skip or we have no collision tags + if link_name in ignore_links or len(old_cols) == 0: + continue + + print(f"Generating collision approximation for link {link_name}...") + generated_new_col = False + idx = 0 + if link_name not in visual_only_links: + for vis in link.findall(f"visual"): + # Get origin + origin = vis.find("origin") + # Check all geometries + geoms = vis.findall("geometry/*") + # We should only have a single geom, so assert here + assert len(geoms) == 1 + # Check whether we actually need to generate a collision approximation + # No need if the geom type is not a mesh (i.e.: it's a primitive -- so we assume if a collision is already + # specified, it's that same primitive) + geom = geoms[0] + if geom.tag != "mesh": + continue + mesh_path = os.path.join(os.path.dirname(urdf_path), geom.attrib["filename"]) + tm = trimesh.load(mesh_path, force="mesh", process=False) + + if link_name in coacd_links: + method = "coacd" + elif link_name in convex_links: + method = "convex" + elif link_name in no_decompose_links: + # Output will just be ignored, so skip + continue + else: + method = collision_method + + collision_meshes = generate_collision_meshes( + trimesh_mesh=tm, + method=method, + hull_count=hull_count, + ) + # Save and merge precomputed collision mesh + collision_filenames_and_scales = [] + for i, collision_mesh in enumerate(collision_meshes): + processed_collision_mesh = collision_mesh.copy() + processed_collision_mesh._cache.cache["vertex_normals"] = processed_collision_mesh.vertex_normals + collision_filename = f"{link_name}-col-{idx}.obj" + + # OmniGibson requires unit-bbox collision meshes, so here we do that scaling + bounding_box = processed_collision_mesh.bounding_box.extents + assert all(x > 0 for x in bounding_box), f"Bounding box extents are not all positive: {bounding_box}" + collision_scale = 1.0 / bounding_box + collision_scale_matrix = th.eye(4) + collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale)) + processed_collision_mesh.apply_transform(collision_scale_matrix.numpy()) + processed_collision_mesh.export(col_mesh_folder / collision_filename, file_type="obj") + collision_filenames_and_scales.append((collision_filename, 1 / collision_scale)) + + idx += 1 + + for collision_filename, collision_scale in collision_filenames_and_scales: + collision_xml = ET.SubElement(link, "collision") + collision_xml.attrib = {"name": collision_filename.replace(".obj", "")} + # Add origin info if defined + if origin is not None: + collision_xml.append(deepcopy(origin)) + collision_geometry_xml = ET.SubElement(collision_xml, "geometry") + collision_mesh_xml = ET.SubElement(collision_geometry_xml, "mesh") + collision_mesh_xml.attrib = { + "filename": str(col_mesh_folder / collision_filename), + "scale": " ".join([str(item) for item in collision_scale]), + } + + if link_name not in no_decompose_links: + generated_new_col = True + + # If we generated a new set of collision meshes, remove the old ones + if generated_new_col or link_name in visual_only_links: + for col in old_cols: + link.remove(col) + + # Save the URDF file + _save_xmltree_as_urdf( + root_element=root, + name=os.path.splitext(os.path.basename(urdf_path))[0], + dirpath=os.path.dirname(urdf_path), + unique_urdf=False, + ) + + +def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False): + # Create a directory for the object + obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl + if not overwrite: + assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" + obj_dir.mkdir(parents=True, exist_ok=True) + + # Copy over all relevant meshes to new obj directory + old_urdf_dir = pathlib.Path(os.path.dirname(urdf_path)) + + # Load urdf + tree = ET.parse(urdf_path) + root = tree.getroot() + + # Find all mesh paths, and replace them with new obj directory + new_dirs = set() + for mesh_type in ["visual", "collision"]: + for mesh_element in root.findall(f"link/{mesh_type}/geometry/mesh"): + mesh_root_dir = mesh_element.attrib["filename"].split("/")[0] + new_dirs.add(mesh_root_dir) + for new_dir in new_dirs: + shutil.copytree(old_urdf_dir / new_dir, obj_dir / new_dir, dirs_exist_ok=overwrite) + + # Export this URDF + return _save_xmltree_as_urdf( + root_element=root, + name=mdl, + dirpath=obj_dir, + unique_urdf=False, + ) + + +def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False): # Create a directory for the object - obj_dir = pathlib.Path(gm.USER_ASSETS_PATH) / "objects" / category / mdl - assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" - obj_dir.mkdir(parents=True) + obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl + if not overwrite: + assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" + obj_dir.mkdir(parents=True, exist_ok=True) obj_name = "-".join([category, mdl]) @@ -1926,8 +2204,49 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): with open(obj_dir / f"{mdl}.urdf", "wb") as f: tree.write(f, xml_declaration=True) - base_link_offset = visual_mesh.bounding_box.centroid - bbox_size = visual_mesh.bounding_box.extents + +def record_obj_metadata_from_urdf(urdf_path, obj_dir, joint_setting="zero", overwrite=False): + """ + Records object metadata and writes it to misc/metadata.json within the object directory. + + Args: + urdf_path (str): Path to object URDF + obj_dir (str): Absolute path to the object's root directory + joint_setting (str): Setting for joints when calculating canonical metadata. Valid options + are {"low", "zero", "high"} (i.e.: lower joint limit, all 0 values, or upper joint limit) + overwrite (bool): Whether to overwrite any pre-existing data + """ + # Load the URDF file into urdfpy + robot = URDF.load(urdf_path) + + # Do FK with everything at desired configuration + if joint_setting == "zero": + val = lambda jnt: 0.0 + elif joint_setting == "low": + val = lambda jnt: jnt.limit.lower + elif joint_setting == "high": + val = lambda jnt: jnt.limit.upper + else: + raise ValueError(f"Got invalid joint_setting: {joint_setting}! Valid options are ['low', 'zero', 'high']") + joint_cfg = { + joint.name: val(joint) + for joint in robot.joints + if joint.joint_type in ("prismatic", "revolute") + } + vfk = robot.visual_trimesh_fk(cfg=joint_cfg) + + scene = trimesh.Scene() + for mesh, transform in vfk.items(): + scene.add_geometry(geometry=mesh, transform=transform) + + # Calculate relevant metadata + + # Base link offset is pos offset from robot root link -> overall AABB center + # Since robot is placed at origin, this is simply the AABB centroid + base_link_offset = scene.bounding_box.centroid + + # BBox size is simply the extent of the overall AABB + bbox_size = scene.bounding_box.extents # Save metadata json out_metadata = { @@ -1938,7 +2257,116 @@ def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl): "bbox_size": bbox_size.tolist(), "orientations": [], } - misc_dir = obj_dir / "misc" - misc_dir.mkdir() + misc_dir = pathlib.Path(obj_dir) / "misc" + misc_dir.mkdir(exist_ok=overwrite) with open(misc_dir / "metadata.json", "w") as f: json.dump(out_metadata, f) + + +def import_og_asset_from_urdf( + category, + model, + urdf_path=None, + collision_method="coacd", + coacd_links=None, + convex_links=None, + no_decompose_links=None, + visual_only_links=None, + dataset_root=gm.EXTERNAL_DATASET_PATH, + hull_count=32, + overwrite=False, + use_usda=False, +): + """ + Imports an asset from URDF format into OmniGibson-compatible USD format. This will write the new USD + (and copy the URDF if it does not already exist within @dataset_root) to @dataset_root + + Args: + category (str): Category to assign to imported asset + model (str): Model name to assign to imported asset + urdf_path (None or str): If specified, external URDF that should be copied into the dataset first before + converting into USD format. Otherwise, assumes that the urdf file already exists within @dataset_root dir + collision_method (None or str): If specified, collision decomposition method to use to generate + OmniGibson-compatible collision meshes. Valid options are {"coacd", "convex"} + coacd_links (None or list of str): If specified, links that should use CoACD to decompose collision meshes + convex_links (None or list of str): If specified, links that should use convex hull to decompose collision meshes + no_decompose_links (None or list of str): If specified, links that should not have any special collision + decomposition applied. This will only use the convex hull + visual_only_links (None or list of str): If specified, links that should have no colliders associated with it + dataset_root (str): Dataset root directory to use for writing imported USD file. Default is external dataset + path set from the global macros + hull_count (int): Maximum number of convex hulls to decompose individual visual meshes into. + Only relevant if @collision_method is "coacd" + overwrite (bool): If set, will overwrite any pre-existing files + use_usda (bool): If set, will write files to .usda files instead of .usd + (bigger memory footprint, but human-readable) + + Returns: + 2-tuple: + - str: Absolute path to generated USD file + - Usd.Prim: Generated root USD prim (currently on active stage) + """ + # If URDF already exists, write it to the dataset + if urdf_path is not None: + print(f"Copying URDF to dataset root {dataset_root}...") + urdf_path = copy_urdf_to_dataset( + urdf_path=urdf_path, + category=category, + mdl=model, + dataset_root=dataset_root, + overwrite=overwrite, + ) + else: + # Verify that the object exists at the expected location + # This is /objects///.urdf + urdf_path = os.path.join(dataset_root, "objects", category, model, f"{model}.urdf") + assert os.path.exists(urdf_path), f"Expected urdf at dataset location {urdf_path}, but none was found!" + + # Make sure all scaling is positive + urdf_path = make_asset_positive(urdf_fpath=urdf_path) + + # Update collisions if requested + if collision_method is not None: + print("Generating collision approximation for URDF...") + get_collision_approximation_for_urdf( + urdf_path=urdf_path, + collision_method=collision_method, + hull_count=hull_count, + coacd_links=coacd_links, + convex_links=convex_links, + no_decompose_links=no_decompose_links, + visual_only_links=visual_only_links, + ) + + # Generate metadata + print("Recording object metadata from URDF...") + record_obj_metadata_from_urdf( + urdf_path=urdf_path, + obj_dir=os.path.dirname(urdf_path), + joint_setting="zero", + overwrite=overwrite, + ) + + # Convert to USD + print("Converting obj URDF to USD...") + og.launch() + assert len(og.sim.scenes) == 0 + usd_path = import_obj_urdf( + urdf_path=urdf_path, + obj_category=category, + obj_model=model, + dataset_root=dataset_root, + use_omni_convex_decomp=False, # We already pre-decomposed the values, so don' use omni convex decomp + use_usda=use_usda, + ) + + prim = import_obj_metadata( + usd_path=usd_path, + obj_category=category, + obj_model=model, + dataset_root=dataset_root, + import_render_channels=False, # TODO: Make this True once we find a systematic / robust way to import materials of different source formats + ) + print(f"\nConversion complete! Object has been successfully imported into OmniGibson-compatible USD, located at:\n\n{usd_path}\n") + + return usd_path, prim From b2bfcae6fc678bc1f4f4aaf86571f96bf71ff825 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:04:32 -0800 Subject: [PATCH 19/61] remove old asset import script --- omnigibson/import_asset.py | 130 ------------------------------------- 1 file changed, 130 deletions(-) delete mode 100644 omnigibson/import_asset.py diff --git a/omnigibson/import_asset.py b/omnigibson/import_asset.py deleted file mode 100644 index 0a32d3ec7..000000000 --- a/omnigibson/import_asset.py +++ /dev/null @@ -1,130 +0,0 @@ -""" -Helper script to download OmniGibson dataset and assets. -""" - -import math -import pathlib -from typing import List, Literal, Optional, Union - -import click -import trimesh - -import omnigibson as og -from omnigibson.macros import gm -from omnigibson.utils.asset_conversion_utils import ( - generate_collision_meshes, - generate_urdf_for_obj, - import_obj_metadata, - import_obj_urdf, -) - - -@click.command() -@click.argument("visual_mesh_path", type=click.Path(exists=True, dir_okay=False)) -@click.argument("category", type=click.STRING) -@click.argument("model", type=click.STRING) -@click.option( - "--collision-mesh-path", - type=click.Path(exists=True, dir_okay=False), - default=None, - help="Path to the collision mesh file. If not provided, a collision mesh will be generated using the provided collision generation method.", -) -@click.option( - "--collision-method", - type=click.Choice(["coacd", "convex"]), - default="coacd", - help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull.", -) -@click.option("--scale", type=float, default=1.0, help="Scale factor to apply to the mesh.") -@click.option("--up-axis", type=click.Choice(["z", "y"]), default="z", help="Up axis for the mesh.") -@click.option("--headless", is_flag=True, help="Run the script in headless mode.") -@click.option("--confirm-bbox", default=True, help="Whether to confirm the scale factor.") -def import_asset( - visual_mesh_path: str, - category: str, - model: str, - collision_mesh_path: Optional[Union[str, pathlib.Path, List[Union[str, pathlib.Path]]]], - collision_method: Literal["coacd", "convex"], - scale: float, - up_axis: Literal["z", "y"], - headless: bool, - confirm_bbox: bool, -): - assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." - - # Load the mesh - visual_mesh: trimesh.Trimesh = trimesh.load(visual_mesh_path, force="mesh", process=False) - - # Either load, or generate, the collision mesh - if collision_mesh_path is not None: - # If it's a single path, we import it and try to split - if isinstance(collision_mesh_path, (str, pathlib.Path)): - collision_meshes = trimesh.load(collision_mesh_path, force="mesh", process=False).split() - else: - # Otherwise, we assume it's a list of paths and we load each one - collision_meshes = [ - trimesh.load(collision_file, force="mesh", process=False) for collision_file in collision_mesh_path - ] - else: - if collision_method == "coacd": - collision_meshes = generate_collision_meshes(visual_mesh) - elif collision_method == "convex": - collision_meshes = [visual_mesh.convex_hull] - else: - raise ValueError(f"Unsupported collision generation option: {collision_method}") - - # If the up axis is y, we need to rotate the meshes - if up_axis == "y": - rotation_matrix = trimesh.transformations.rotation_matrix(math.pi / 2, [1, 0, 0]) - visual_mesh.apply_transform(rotation_matrix) - for collision_mesh in collision_meshes: - collision_mesh.apply_transform(rotation_matrix) - - # If the scale is nonzero, we apply it to the meshes - if scale != 1.0: - scale_transform = trimesh.transformations.scale_matrix(scale) - visual_mesh.apply_transform(scale_transform) - for collision_mesh in collision_meshes: - collision_mesh.apply_transform(scale_transform) - - # Check the bounding box size and complain if it's larger than 3 meters - bbox_size = visual_mesh.bounding_box.extents - click.echo(f"Visual mesh bounding box size: {bbox_size}") - - if confirm_bbox: - if any(size > 3.0 for size in bbox_size): - click.echo( - f"Warning: The bounding box sounds a bit large. Are you sure you don't need to scale? " - f"We just wanted to confirm this is intentional. You can skip this check by passing --no-confirm-bbox." - ) - click.confirm("Do you want to continue?", abort=True) - - # Generate the URDF - click.echo(f"Generating URDF for {category}/{model}...") - generate_urdf_for_obj(visual_mesh, collision_meshes, category, model) - click.echo("URDF generation complete!") - - # Convert to USD - click.echo("Converting to USD...") - og.launch() - import_obj_urdf( - obj_category=category, - obj_model=model, - dataset_root=gm.USER_ASSETS_PATH, - ) - import_obj_metadata( - obj_category=category, - obj_model=model, - dataset_root=gm.USER_ASSETS_PATH, - import_render_channels=True, - ) - click.echo("Conversion complete!") - - if not headless: - click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") - while True: - og.sim.render() - - -if __name__ == "__main__": - import_asset() From 54df6543632fe2cd6422ec0763527d8c1630d7d0 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:04:57 -0800 Subject: [PATCH 20/61] update utils to support usda imports --- omnigibson/utils/usd_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/omnigibson/utils/usd_utils.py b/omnigibson/utils/usd_utils.py index 07421c0e6..ee3cfb6bc 100644 --- a/omnigibson/utils/usd_utils.py +++ b/omnigibson/utils/usd_utils.py @@ -1638,8 +1638,8 @@ def add_asset_to_stage(asset_path, prim_path): Usd.Prim: Loaded prim as a USD prim """ # Make sure this is actually a supported asset type - assert asset_path[-4:].lower() in {".usd", ".obj"}, f"Cannot load a non-USD or non-OBJ file as a USD prim!" - asset_type = asset_path[-3:] + asset_type = asset_path.split(".")[-1] + assert asset_type in {"usd", "usda", "obj"}, f"Cannot load a non-USD or non-OBJ file as a USD prim!" # Make sure the path exists assert os.path.exists(asset_path), f"Cannot load {asset_type.upper()} file {asset_path} because it does not exist!" From 1f8ed23c597b1d301be1c20c0122f1334edcd5d9 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:05:29 -0800 Subject: [PATCH 21/61] standardize object imports from external datasets --- omnigibson/macros.py | 4 ++-- omnigibson/objects/dataset_object.py | 24 ++++++++++++++++-------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/omnigibson/macros.py b/omnigibson/macros.py index 07635049f..b2dfed47f 100644 --- a/omnigibson/macros.py +++ b/omnigibson/macros.py @@ -56,11 +56,11 @@ def determine_gm_path(default_path, env_var_name): # Path (either relative to OmniGibson/omnigibson directory or global absolute path) for data -# Assets correspond to non-objects / scenes (e.g.: robots), and dataset incliudes objects + scene +# Assets correspond to non-objects / scenes (e.g.: robots), and dataset includes objects + scene # can override assets_path and dataset_path from environment variable gm.ASSET_PATH = determine_gm_path(os.path.join("data", "assets"), "OMNIGIBSON_ASSET_PATH") gm.DATASET_PATH = determine_gm_path(os.path.join("data", "og_dataset"), "OMNIGIBSON_DATASET_PATH") -gm.USER_ASSETS_PATH = determine_gm_path(os.path.join("data", "user_assets"), "OMNIGIBSON_USER_ASSETS_PATH") +gm.EXTERNAL_DATASET_PATH = determine_gm_path(os.path.join("data", "external_dataset"), "OMNIGIBSON_EXTERNAL_DATASET_PATH") gm.KEY_PATH = determine_gm_path(os.path.join("data", "omnigibson.key"), "OMNIGIBSON_KEY_PATH") # Which GPU to use -- None will result in omni automatically using an appropriate GPU. Otherwise, set with either diff --git a/omnigibson/objects/dataset_object.py b/omnigibson/objects/dataset_object.py index bee8111d5..a2058af2b 100644 --- a/omnigibson/objects/dataset_object.py +++ b/omnigibson/objects/dataset_object.py @@ -1,6 +1,7 @@ import math import os import random +from enum import IntEnum import torch as th @@ -24,6 +25,11 @@ m.MIN_OBJ_MASS = 0.4 +class DatasetType(IntEnum): + BEHAVIOR = 0 + EXTERNAL = 1 + + class DatasetObject(USDObject): """ DatasetObjects are instantiated from a USD file. It is an object that is assumed to come from an iG-supported @@ -37,6 +43,7 @@ def __init__( relative_prim_path=None, category="object", model=None, + dataset_type=DatasetType.BEHAVIOR, scale=None, visible=True, fixed_base=False, @@ -49,7 +56,6 @@ def __init__( include_default_states=True, bounding_box=None, in_rooms=None, - user_added=False, **kwargs, ): """ @@ -63,6 +69,9 @@ def __init__( {og_dataset_path}/objects/{category}/{model}/usd/{model}.usd Otherwise, will randomly sample a model given @category + dataset_type (DatasetType): Dataset to search for this object. Default is BEHAVIOR, corresponding to the + proprietary (encrypted) BEHAVIOR-1K dataset (gm.DATASET_PATH). Possible values are {BEHAVIOR, EXTERNAL}. + If EXTERNAL, assumes asset is found at gm.EXTERNAL_DATASET_PATH and additionally not encrypted. scale (None or float or 3-array): if specified, sets either the uniform (float) or x,y,z (3-array) scale for this object. A single number corresponds to uniform scaling along the x,y,z axes, whereas a 3-array specifies per-axis scaling. @@ -85,8 +94,6 @@ def __init__( -- not both! in_rooms (None or str or list): If specified, sets the room(s) that this object should belong to. Either a list of room type(s) or a single room type - user_added (bool): Whether this object was added by the user or not. If True, then this object will be found - in the user_assets dir instead of og_dataset, and will not be encrypted. kwargs (dict): Additional keyword arguments that are used for other super() calls from subclasses, allowing for flexible compositions of various object subclasses (e.g.: Robot is USDObject + ControllableObject). """ @@ -102,7 +109,7 @@ def __init__( # Add info to load config load_config = dict() if load_config is None else load_config load_config["bounding_box"] = bounding_box - load_config["user_added"] = user_added + load_config["dataset_type"] = dataset_type # All DatasetObjects should have xform properties pre-loaded load_config["xform_props_pre_loaded"] = True @@ -123,13 +130,13 @@ def __init__( ) self._model = model - usd_path = self.get_usd_path(category=category, model=model, user_added=user_added) + usd_path = self.get_usd_path(category=category, model=model, dataset_type=dataset_type) # Run super init super().__init__( relative_prim_path=relative_prim_path, usd_path=usd_path, - encrypted=not user_added, + encrypted=dataset_type == DatasetType.BEHAVIOR, name=name, category=category, scale=scale, @@ -146,7 +153,7 @@ def __init__( ) @classmethod - def get_usd_path(cls, category, model, user_added=False): + def get_usd_path(cls, category, model, dataset_type=DatasetType.BEHAVIOR): """ Grabs the USD path for a DatasetObject corresponding to @category and @model. @@ -155,11 +162,12 @@ def get_usd_path(cls, category, model, user_added=False): Args: category (str): Category for the object model (str): Specific model ID of the object + dataset_type (DatasetType): Dataset type, used to infer dataset directory to search for @category and @model Returns: str: Absolute filepath to the corresponding USD asset file """ - dataset_path = gm.USER_ASSETS_PATH if user_added else gm.DATASET_PATH + dataset_path = gm.DATASET_PATH if dataset_type == DatasetType.BEHAVIOR else gm.EXTERNAL_DATASET_PATH return os.path.join(dataset_path, "objects", category, model, "usd", f"{model}.usd") def sample_orientation(self): From fa06d235af6fd07dd15c689416b42db4f075c747 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:06:16 -0800 Subject: [PATCH 22/61] add updated import custom object example script --- .../examples/objects/import_custom_object.py | 132 ++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 omnigibson/examples/objects/import_custom_object.py diff --git a/omnigibson/examples/objects/import_custom_object.py b/omnigibson/examples/objects/import_custom_object.py new file mode 100644 index 000000000..db5dbf94c --- /dev/null +++ b/omnigibson/examples/objects/import_custom_object.py @@ -0,0 +1,132 @@ +""" +Helper script to download OmniGibson dataset and assets. +""" +import math +from typing import Literal + +import click +import trimesh + +import omnigibson as og +from omnigibson.utils.python_utils import assert_valid_key +from omnigibson.utils.asset_conversion_utils import ( + generate_collision_meshes, + generate_urdf_for_obj, + import_og_asset_from_urdf, +) + + +@click.command() +@click.option("--asset-path", required=True, type=click.Path(exists=True, dir_okay=False), help="Absolute path to asset file to import. This can be a raw visual mesh (for single-bodied, static objects), e.g. .obj, .glb, etc., or a more complex (such as articulated) objects defined in .urdf format.") +@click.option("--category", required=True, type=click.STRING, help="Category name to assign to the imported asset") +@click.option("--model", required=True, type=click.STRING, help="Model name to assign to the imported asset. This MUST be a 6-character long string that exclusively contains letters, and must be unique within the given @category") +@click.option( + "--collision-method", + type=click.Choice(["coacd", "convex", "none"]), + default="coacd", + help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull. 'none' will not generate any explicit mesh", +) +@click.option("--hull-count", type=int, default=32, help="Maximum number of convex hulls to decompose individual visual meshes into. Only relevant if --collision-method=coacd") +@click.option("--scale", type=float, default=1.0, help="Scale factor to apply to the mesh.") +@click.option("--up-axis", type=click.Choice(["z", "y"]), default="z", help="Up axis for the mesh.") +@click.option("--headless", is_flag=True, help="Run the script in headless mode.") +@click.option("--confirm-bbox", default=True, help="Whether to confirm the scale factor.") +@click.option("--overwrite", is_flag=True, help="Overwrite any pre-existing files") +def import_custom_object( + asset_path: str, + category: str, + model: str, + collision_method: Literal["coacd", "convex", "none"], + hull_count: int, + scale: float, + up_axis: Literal["z", "y"], + headless: bool, + confirm_bbox: bool, + overwrite: bool, +): + """ + Imports an externally-defined object asset into an OmniGibson-compatible USD format and saves the imported asset + files to the external dataset directory (gm.EXTERNAL_DATASET_PATH) + """ + assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." + collision_method = None if collision_method == "none" else collision_method + + # Sanity check mesh type + valid_formats = trimesh.available_formats() + mesh_format = asset_path.split(".")[-1] + + # If we're not a URDF, import the mesh directly first + if mesh_format != "urdf": + assert_valid_key(key=mesh_format, valid_keys=valid_formats, name="mesh format") + + # Load the mesh + visual_mesh: trimesh.Trimesh = trimesh.load(asset_path, force="mesh", process=False) + + # Generate collision meshes if requested + collision_meshes = generate_collision_meshes(visual_mesh, method=collision_method, hull_count=hull_count) \ + if collision_method is not None else [] + + # If the up axis is y, we need to rotate the meshes + if up_axis == "y": + rotation_matrix = trimesh.transformations.rotation_matrix(math.pi / 2, [1, 0, 0]) + visual_mesh.apply_transform(rotation_matrix) + for collision_mesh in collision_meshes: + collision_mesh.apply_transform(rotation_matrix) + + # If the scale is nonzero, we apply it to the meshes + if scale != 1.0: + scale_transform = trimesh.transformations.scale_matrix(scale) + visual_mesh.apply_transform(scale_transform) + for collision_mesh in collision_meshes: + collision_mesh.apply_transform(scale_transform) + + # Check the bounding box size and complain if it's larger than 3 meters + bbox_size = visual_mesh.bounding_box.extents + click.echo(f"Visual mesh bounding box size: {bbox_size}") + + if confirm_bbox: + if any(size > 3.0 for size in bbox_size): + click.echo( + f"Warning: The bounding box sounds a bit large. Are you sure you don't need to scale? " + f"We just wanted to confirm this is intentional. You can skip this check by passing --no-confirm-bbox." + ) + click.confirm("Do you want to continue?", abort=True) + + elif any(size < 0.01 for size in bbox_size): + click.echo( + f"Warning: The bounding box sounds a bit small. Are you sure you don't need to scale? " + f"We just wanted to confirm this is intentional. You can skip this check by passing --no-confirm-bbox." + ) + click.confirm("Do you want to continue?", abort=True) + + # Generate the URDF + click.echo(f"Generating URDF for {category}/{model}...") + generate_urdf_for_obj(visual_mesh, collision_meshes, category, model, overwrite=overwrite) + click.echo("URDF generation complete!") + + urdf_path = None + collision_method = None + else: + urdf_path = asset_path + collision_method = collision_method + + # Convert to USD + import_og_asset_from_urdf( + category=category, + model=model, + urdf_path=urdf_path, + collision_method=collision_method, + hull_count=hull_count, + overwrite=overwrite, + use_usda=False, + ) + + # Visualize if not headless + if not headless: + click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") + while True: + og.sim.render() + + +if __name__ == "__main__": + import_custom_object() From 970a9ea2f865ac6b87b8bff946d937fdba950d61 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 25 Nov 2024 15:06:51 -0800 Subject: [PATCH 23/61] add import custom robot script (WIP) --- .../examples/robots/import_custom_robot.py | 693 ++++++++++++++++++ 1 file changed, 693 insertions(+) create mode 100644 omnigibson/examples/robots/import_custom_robot.py diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py new file mode 100644 index 000000000..bab70909b --- /dev/null +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -0,0 +1,693 @@ +""" +Helper script to download OmniGibson dataset and assets. +""" +from pathlib import Path +from copy import deepcopy + +import yaml +yaml.Dumper.ignore_aliases = lambda *args : True +from addict import Dict + +import click +import torch as th + +import omnigibson as og +import omnigibson.lazy as lazy +from omnigibson.macros import gm +import omnigibson.utils.transform_utils as T +from omnigibson.utils.python_utils import assert_valid_key +from omnigibson.utils.usd_utils import create_primitive_mesh, create_joint +from omnigibson.utils.asset_conversion_utils import ( + import_og_asset_from_urdf, + _add_xform_properties, +) + +# Make sure flatcache is NOT used so we write directly to USD +gm.ENABLE_FLATCACHE = False + + +_DOCSTRING = """ +Imports an externally-defined robot URDF asset into an OmniGibson-compatible USD format and saves the imported asset +files to the external dataset directory (gm.EXTERNAL_DATASET_PATH) + +Note that @config is expected to follow the following format (fetch config shown as an example): + +\b +urdf_path: XXX # (str) Absolute path to robot URDF to import +name: XXX # (str) Name to assign to robot +collision_method: # (str) [coacd, convex, or null] collision decomposition method +hull_count: # (int) max hull count to use during decomposition, only relevant for coacd +headless: # (bool) if set, run without GUI +overwrite: # (bool) if set, overwrite any existing files +visual_only_links: # (list of str) links that will have any associated collision meshes removed + - laser_link + - estop_link +sphere_links: # (list of str) links that should have sphere collision approximation (eg: wheels) + - l_wheel_link + - r_wheel_link +camera_links: # (list of dict) information for adding cameras to robot + - link: eyes # (str) link name to add camera. Must exist if @parent_link is null, else will be + # added as a child of the parent + parent_link: null # (str) optional parent link to use if adding new link + offset: # (dict) local pos,ori offset values. if @parent_link is specified, defines offset + # between @parent_link and @link specified in @parent_link's frame. + # Otherwise, specifies offset of generated prim relative to @link's frame + position: [0, 0, 0] # (3-tuple) (x,y,z) offset + orientation: [0, 0, 0, 1.0] # (4-tuple) (x,y,z,w) offset + - link: wrist + parent_link: null + offset: + position: [0, 0, 0] + orientation: [0, 0, 0, 1.0] +lidar_links: # (list of dict) information for adding cameras to robot + - link: laser_link # same format as @camera_links + parent_link: null + offset: + position: [0, 0, 0] + orientation: [0, 0, 0, 1.0] +eef_vis_links: # (list of dict) information for adding cameras to robot + - link: gripper_vis_link # same format as @camera_links + parent_link: gripper_link + offset: + position: [0, 0, 0.1] + orientation: [0, 0, 0, 1.0] + +""" + + +def create_rigid_prim(stage, link_prim_path): + """ + Creates a new rigid link prim nested under @root_prim + + Args: + stage (Usd.Stage): Current active omniverse stage + link_prim_path (str): Prim path at which link will be created. Should not already exist on the stage + + Returns: + Usd.Prim: Newly created rigid prim + """ + # Make sure link prim does NOT already exist (this should be a new link) + link_prim_exists = lazy.omni.isaac.core.utils.prims.is_prim_path_valid(link_prim_path) + assert not link_prim_exists, \ + f"Cannot create new link because there already exists a link at prim path {link_prim_path}!" + + # Manually create a new prim (specified offset) + link_prim = lazy.pxr.UsdGeom.Xform.Define(stage, link_prim_path).GetPrim() + _add_xform_properties(prim=link_prim) + + # Add rigid prim API to new link prim + lazy.pxr.UsdPhysics.RigidBodyAPI.Apply(link_prim) + lazy.pxr.PhysxSchema.PhysxRigidBodyAPI.Apply(link_prim) + + return link_prim + + +def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, pos_offset=None, ori_offset=None): + """ + Adds sensor to robot. This is an in-place operation on @root_prim + + Args: + stage (Usd.Stage): Current active omniverse stage + root_prim (Usd.Prim): Root prim of the current robot, assumed to be on the current stage + sensor_type (str): Sensor to create. Valid options are: {Camera, Lidar, VisualSphere} + link_name (str): Link to attach the created sensor prim to. If this link does not already exist in the robot's + current set of links, a new one will be created as a child of @parent_link_name's link + parent_link_name (None or str): If specified, parent link from which to create a new child link @link_name. If + set, @link_name should NOT be a link already found on the robot! + pos_offset (None or 3-tuple): If specified, (x,y,z) local translation offset to apply. + If @parent_link_name is specified, defines offset of @link_name wrt @parent_link_name + If only @link_name is specified, defines offset of the sensor prim wrt @link_name + ori_offset (None or 3-tuple): If specified, (x,y,z,w) quaternion rotation offset to apply. + If @parent_link_name is specified, defines offset of @link_name wrt @parent_link_name + If only @link_name is specified, defines offset of the sensor prim wrt @link_name + """ + # Make sure pos and ori offsets are defined + if pos_offset is None or pos_offset == {}: # May be {} from empty addict key + pos_offset = (0, 0, 0) + if ori_offset is None or ori_offset == {}: # May be {} from empty addict key + ori_offset = (0, 0, 0, 1) + + pos_offset = th.tensor(pos_offset, dtype=th.float) + ori_offset = th.tensor(ori_offset, dtype=th.float) + + # Sanity check link / parent link combos + root_prim_path = root_prim.GetPrimPath().pathString + if parent_link_name is None or parent_link_name == {}: # May be {} from empty addict key + parent_link_prim = None + else: + parent_path = f"{root_prim_path}/{parent_link_name}" + assert lazy.omni.isaac.core.utils.prims.is_prim_path_valid(parent_path), f"Could not find parent link within robot with name {parent_link_name}!" + parent_link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(parent_path) + + # If parent link is defined, link prim should NOT exist (this should be a new link) + link_prim_path = f"{root_prim_path}/{link_name}" + link_prim_exists = lazy.omni.isaac.core.utils.prims.is_prim_path_valid(link_prim_path) + if parent_link_prim is not None: + assert not link_prim_exists, f"Since parent link is defined, link_name {link_name} must be a link that is NOT pre-existing within the robot's set of links!" + # Manually create a new prim (specified offset) + link_prim = create_rigid_prim( + stage=stage, + link_prim_path=link_prim_path, + ) + link_prim_xform = lazy.omni.isaac.core.prims.xform_prim.XFormPrim(prim_path=link_prim_path) + + # Create fixed joint to connect the two together + create_joint( + prim_path=f"{parent_path}/{parent_link_name}_{link_name}_joint", + joint_type="FixedJoint", + body0=parent_path, + body1=link_prim_path, + joint_frame_in_parent_frame_pos=pos_offset, + joint_frame_in_parent_frame_quat=ori_offset, + ) + + # Set child prim to the appropriate local pose -- this should be the parent local pose transformed by + # the additional offset + parent_prim_xform = lazy.omni.isaac.core.prims.xform_prim.XFormPrim(prim_path=parent_path) + parent_pos, parent_quat = parent_prim_xform.get_local_pose() + parent_quat = parent_quat[[1, 2, 3, 0]] + parent_pose = T.pose2mat((th.tensor(parent_pos), th.tensor(parent_quat))) + offset_pose = T.pose2mat((pos_offset, ori_offset)) + child_pose = parent_pose @ offset_pose + link_pos, link_quat = T.mat2pose(child_pose) + link_prim_xform.set_local_pose(link_pos, link_quat[[3, 0, 1, 2]]) + + else: + # Otherwise, link prim MUST exist + assert link_prim_exists, f"Since no parent link is defined, link_name {link_name} must be a link that IS pre-existing within the robot's set of links!" + link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(link_prim_path) + + # Define functions to generate the desired sensor prim + if sensor_type == "Camera": + create_sensor_prim = lambda parent_prim_path: lazy.pxr.UsdGeom.Camera.Define(stage, f"{parent_prim_path}/Camera").GetPrim() + elif sensor_type == "Lidar": + create_sensor_prim = lambda parent_prim_path: lazy.omni.kit.commands.execute( + "RangeSensorCreateLidar", + path="/Lidar", + parent=parent_prim_path, + min_range=0.4, + max_range=100.0, + draw_points=False, + draw_lines=False, + horizontal_fov=360.0, + vertical_fov=30.0, + horizontal_resolution=0.4, + vertical_resolution=4.0, + rotation_rate=0.0, + high_lod=False, + yaw_offset=0.0, + enable_semantics=False + )[1].GetPrim() + elif sensor_type == "VisualSphere": + create_sensor_prim = lambda parent_prim_path: create_primitive_mesh( + prim_path=f"{parent_prim_path}/VisualSphere", + primitive_type="Sphere", + extents=0.01, + stage=stage, + ).GetPrim() + else: + raise ValueError(f"Got unknown sensor type: {sensor_type}!") + + # Create the new prim as a child of the link prim + sensor_prim = create_sensor_prim(parent_prim_path=link_prim_path) + _add_xform_properties(sensor_prim) + + # If sensor prim is a camera, set some default values + if sensor_type == "Camera": + sensor_prim.GetAttribute("focalLength").Set(17.0) + sensor_prim.GetAttribute("clippingRange").Set(lazy.pxr.Gf.Vec2f(0.001, 1000000.0)) + # Refresh visibility + lazy.pxr.UsdGeom.Imageable(sensor_prim).MakeInvisible() + og.sim.render() + lazy.pxr.UsdGeom.Imageable(sensor_prim).MakeVisible() + + # If we didn't have a parent prim defined, we need to add the offset directly to this sensor + if parent_link_prim is None: + sensor_prim.GetAttribute("xformOp:translate").Set(lazy.pxr.Gf.Vec3d(*pos_offset.tolist())) + sensor_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatd(*ori_offset[[3, 0, 1, 2]].tolist())) # expects (w,x,y,z) + + +def _find_prim_with_condition(condition, root_prim): + """ + Recursively searches children of @root_prim to find first instance of prim satisfying @condition + + Args: + condition (function): Condition to check. Should satisfy function signature: + + def condition(prim: Usd.Prim) -> bool + + which returns True if the condition is met, else False + + root_prim (Usd.Prim): Root prim to search + + Returns: + None or Usd.Prim: If found, first prim whose prim name includes @name + """ + if condition(root_prim): + return root_prim + + for child in root_prim.GetChildren(): + found_prim = _find_prim_with_condition(condition=condition, root_prim=child) + if found_prim is not None: + return found_prim + + +def _find_prims_with_condition(condition, root_prim): + """ + Recursively searches children of @root_prim to find all instances of prim satisfying @condition + + Args: + condition (function): Condition to check. Should satisfy function signature: + + def condition(prim: Usd.Prim) -> bool + + which returns True if the condition is met, else False + + root_prim (Usd.Prim): Root prim to search + + Returns: + None or Usd.Prim: If found, first prim whose prim name includes @name + """ + found_prims = [] + if condition(root_prim): + found_prims.append(root_prim) + + for child in root_prim.GetChildren(): + found_prims += _find_prims_with_condition(condition=condition, root_prim=child) + + return found_prims + + +def find_prim_with_name(name, root_prim): + """ + Recursively searches children of @root_prim to find first instance of prim including string @name + + Args: + name (str): Name of the prim to search + root_prim (Usd.Prim): Root prim to search + + Returns: + None or Usd.Prim: If found, first prim whose prim name includes @name + """ + return _find_prim_with_condition(condition=lambda prim: name in prim.GetName(), root_prim=root_prim) + + +def find_articulation_root_prim(root_prim): + """ + Recursively searches children of @root_prim to find the articulation root + + Args: + root_prim (Usd.Prim): Root prim to search + + Returns: + None or Usd.Prim: If found, articulation root prim + """ + return _find_prim_with_condition(condition=lambda prim: prim.HasAPI(lazy.pxr.UsdPhysics.ArticulationRootAPI), root_prim=root_prim) + + +def find_all_prim_children_with_type(prim_type, root_prim): + """ + Recursively searches children of @root_prim to find all instances of prim that satisfy type @prim_type + + Args: + prim_type (str): Type of the prim to search + root_prim (Usd.Prim): Root prim to search + + Returns: + list of Usd.Prim: All found prims whose prim type includes @prim_type + """ + found_prims = [] + for child in root_prim.GetChildren(): + if prim_type in child.GetTypeName(): + found_prims.append(child) + found_prims += find_all_prim_children_with_type(prim_type=prim_type, root_prim=child) + + return found_prims + + +def make_joint_fixed(stage, root_prim, joint_name): + """ + Converts a revolute / prismatic joint @joint_name into a fixed joint + + NOTE: This is an in-place operation! + + Args: + stage (Usd.Stage): Current active omniverse stage + root_prim (Usd.Prim): Root prim of the current robot, assumed to be on the current stage + joint_name (str): Joint to convert to be fixed + """ + joint_prim = find_prim_with_name(name=joint_name, root_prim=root_prim) + assert joint_prim is not None, f"Could not find joint prim with name {joint_name}!" + + # Remove its Joint APIs and add Fixed Joint API + joint_type = joint_prim.GetTypeName() + if joint_type != "PhysicsFixedJoint": + assert joint_type in {"PhysicsRevoluteJoint", "PhysicsPrismaticJoint"}, \ + f"Got invalid joint type: {joint_type}. Only PhysicsRevoluteJoint and PhysicsPrismaticJoint are supported!" + + lazy.omni.kit.commands.execute("RemovePhysicsComponentCommand", usd_prim=joint_prim, component=joint_type) + lazy.pxr.UsdPhysics.FixedJoint.Define(stage, joint_prim.GetPrimPath().pathString) + + +def set_link_collision_approximation(stage, root_prim, link_name, approximation_type): + """ + Sets all collision geoms under @link_name to be @approximation type + Args: + approximation_type (str): approximation used for collision. + Can be one of: {"none", "convexHull", "convexDecomposition", "meshSimplification", "sdf", + "boundingSphere", "boundingCube"} + If None, the approximation will use the underlying triangle mesh. + """ + # Sanity check approximation type + assert_valid_key( + key=approximation_type, + valid_keys={ + "none", + "convexHull", + "convexDecomposition", + "meshSimplification", + "sdf", + "boundingSphere", + "boundingCube", + }, + name="collision approximation type", + ) + + # Find the link prim first + link_prim = find_prim_with_name(name=link_name, root_prim=root_prim) + assert link_prim is not None, f"Could not find link prim with name {link_name}!" + + # Iterate through all children that are mesh prims + mesh_prims = find_all_prim_children_with_type(prim_type="Mesh", root_prim=link_prim) + + # For each mesh prim, check if it is collision -- if so, update the approximation type appropriately + for mesh_prim in mesh_prims: + if not mesh_prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI): + # This is a visual mesh, so skip + continue + mesh_collision_api = lazy.pxr.UsdPhysics.MeshCollisionAPI(mesh_prim) + + # Make sure to add the appropriate API if we're setting certain values + if approximation_type == "convexHull" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI.Apply(mesh_prim) + elif approximation_type == "convexDecomposition" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI.Apply(mesh_prim) + elif approximation_type == "meshSimplification" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI.Apply(mesh_prim) + elif approximation_type == "sdf" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI): + lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI.Apply(mesh_prim) + elif approximation_type == "none" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI): + lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI.Apply(mesh_prim) + + if approximation_type == "convexHull": + pch_api = lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI(mesh_prim) + # Also make sure the maximum vertex count is 60 (max number compatible with GPU) + # https://docs.omniverse.nvidia.com/app_create/prod_extensions/ext_physics/rigid-bodies.html#collision-settings + if pch_api.GetHullVertexLimitAttr().Get() is None: + pch_api.CreateHullVertexLimitAttr() + pch_api.GetHullVertexLimitAttr().Set(60) + + mesh_collision_api.GetApproximationAttr().Set(approximation_type) + + +def create_curobo_cfgs(robot_prim, curobo_cfg, root_link, save_dir, is_holonomic=False): + """ + Creates a set of curobo configs based on @robot_prim and @curobo_cfg + + Args: + robot_prim (Usd.Prim): Top-level prim defining the robot in the current USD stage + curobo_cfg (Dict): Dictionary of relevant curobo information + root_link (str): Name of the robot's root link, BEFORE any holonomic joints are applied + save_dir (str): Path to the directory to save generated curobo files + is_holonomic (bool): Whether the robot has a holonomic base applied or not + """ + robot_name = robot_prim.GetName() + ee_links = list(curobo_cfg.eef_to_gripper_info.keys()) + + def get_joint_upper_limit(root_prim, joint_name): + joint_prim = find_prim_with_name(name=joint_name, root_prim=root_prim) + assert joint_prim is not None, f"Could not find joint prim with name {joint_name}!" + return joint_prim.GetAttribute("physics:upperLimit").Get() + + all_links = _find_prims_with_condition( + condition=lambda prim: prim.HasAPI(lazy.pxr.UsdPhysics.RigidBodyAPI), + root_prim=robot_prim, + ) + + all_collision_links = [link for link in all_links if _find_prim_with_condition( + condition=lambda prim: prim.GetPrimTypeInfo().GetTypeName() == "Mesh" and prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI), + root_prim=link, + ) is not None] + all_collision_link_names = [link.GetName() for link in all_collision_links] + + def is_articulated_joint(prim): + prim_type = prim.GetPrimTypeInfo().GetTypeName().lower() + return "joint" in prim_type and "fixed" not in prim_type + + joint_prims = _find_prims_with_condition( + condition=is_articulated_joint, + root_prim=robot_prim, + ) + all_joint_names = [joint_prim.GetName() for joint_prim in joint_prims] + retract_cfg = curobo_cfg.default_qpos + lock_joints = dict(curobo_cfg.lock_joints) + if is_holonomic: + # Move the final six joints to the beginning, since the holonomic joints are added at the end + all_joint_names = list(reversed(all_joint_names[-6:])) + all_joint_names[:-6] + retract_cfg = [0] * 6 + retract_cfg + lock_joints["base_footprint_z_joint"] = 0.0 + lock_joints["base_footprint_rx_joint"] = 0.0 + lock_joints["base_footprint_ry_joint"] = 0.0 + + joint_to_default_q = {jnt_name: q for jnt_name, q in zip(all_joint_names, retract_cfg)} + + default_generated_cfg = { + "robot_cfg": { + "usd_robot_root": f"/{robot_prim.GetName()}", + "usd_flip_joints": {}, + "usd_flip_joint_limits": [], + "base_link": "base_footprint_x" if is_holonomic else root_link, + "ee_link": ee_links[0], + "link_names": ee_links[1:], + "lock_joints": lock_joints, + "extra_links": {}, + "collision_link_names": all_collision_link_names, + "collision_spheres": dict(curobo_cfg.collision_spheres), + "collision_sphere_buffer": 0.002, + "extra_collision_spheres": {}, + "self_collision_ignore": dict(curobo_cfg.self_collision_ignore), + "self_collision_buffer": {root_link: 0.02}, + "use_global_cumul": True, + "mesh_link_names": all_collision_link_names, + "external_asset_path": None, + "cspace": all_joint_names, + "retract_config": retract_cfg, + "null_space_weight": [1] * len(all_joint_names), + "cspace_distance_weight": [1] * len(all_joint_names), + "max_jerk": 500.0, + "max_acceleration": 15.0, + } + } + + for eef_link_name, gripper_info in curobo_cfg.eef_to_gripper_info.items(): + attached_obj_link_name = f"attached_object_{eef_link_name}" + for jnt_name in gripper_info["joints"]: + default_generated_cfg["robot_cfg"]["lock_joints"][jnt_name] = get_joint_upper_limit(robot_prim, jnt_name) + default_generated_cfg["robot_cfg"]["extra_links"][attached_obj_link_name] = { + "parent_link_name": eef_link_name, + "link_name": attached_obj_link_name, + "fixed_transform": [0, 0, 0, 1, 0, 0, 0], # (x,y,z,w,x,y,z) + "joint_type": "FIXED", + "joint_name": f"{attached_obj_link_name}_joint", + } + default_generated_cfg["robot_cfg"]["collision_link_names"].append(attached_obj_link_name) + default_generated_cfg["robot_cfg"]["extra_collision_spheres"][attached_obj_link_name] = 32 + for link_name in gripper_info["links"]: + if link_name not in default_generated_cfg["robot_cfg"]["self_collision_ignore"]: + default_generated_cfg["robot_cfg"]["self_collision_ignore"][link_name] = [] + default_generated_cfg["robot_cfg"]["self_collision_ignore"][link_name].append(attached_obj_link_name) + default_generated_cfg["robot_cfg"]["mesh_link_names"].append(attached_obj_link_name) + + # Save generated file + Path(save_dir).mkdir(parents=True, exist_ok=True) + save_fpath = f"{save_dir}/{robot_name}_description_curobo_default.yaml" + with open(save_fpath, "w+") as f: yaml.dump(default_generated_cfg, f, default_flow_style=False) + + # Permute the default config to have additional base only / arm only configs + # Only relevant if robot is holonomic + if is_holonomic: + # Create base only config + base_only_cfg = deepcopy(default_generated_cfg) + base_only_cfg["robot_cfg"]["ee_link"] = root_link + base_only_cfg["robot_cfg"]["link_names"] = [] + for jnt_name, default_q in joint_to_default_q.items(): + if jnt_name not in base_only_cfg["robot_cfg"]["lock_joints"] and "base_footprint" not in jnt_name: + # Lock this joint + base_only_cfg["robot_cfg"]["lock_joints"][jnt_name] = default_q + save_base_fpath = f"{save_dir}/{robot_name}_description_curobo_base.yaml" + with open(save_base_fpath, "w+") as f: + yaml.dump(base_only_cfg, f) + + # Create arm only config + arm_only_cfg = deepcopy(default_generated_cfg) + for jnt_name in {"base_footprint_x_joint", "base_footprint_y_joint", "base_footprint_rz_joint"}: + arm_only_cfg["robot_cfg"]["lock_joints"][jnt_name] = 0.0 + save_arm_fpath = f"{save_dir}/{robot_name}_description_curobo_arm.yaml" + with open(save_arm_fpath, "w+") as f: + yaml.dump(arm_only_cfg, f) + + +@click.command(help=_DOCSTRING) +@click.option("--config", required=True, type=click.Path(exists=True, dir_okay=False), help="Absolute path to robot URDF file to import") +def import_custom_robot(config): + # Load config + with open(config, "r") as f: + cfg = Dict(yaml.load(f, yaml.Loader)) + + # Convert URDF -> USD + usd_path, prim = import_og_asset_from_urdf( + category="robot", + model=cfg.name, + urdf_path=cfg.urdf_path, + collision_method=cfg.collision.decompose_method, + coacd_links=cfg.collision.coacd_links, + convex_links=cfg.collision.convex_links, + no_decompose_links=cfg.collision.no_decompose_links, + visual_only_links=cfg.collision.no_collision_links, + hull_count=cfg.collision.hull_count, + overwrite=cfg.overwrite, + use_usda=True, + ) + + # Get current stage + stage = lazy.omni.isaac.core.utils.stage.get_current_stage() + + # Add cameras, lidars, and visual spheres + for eef_vis_info in cfg.eef_vis_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="VisualSphere", + link_name=eef_vis_info.link, + parent_link_name=eef_vis_info.parent_link, + pos_offset=eef_vis_info.offset.position, + ori_offset=eef_vis_info.offset.orientation, + ) + for camera_info in cfg.camera_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="Camera", + link_name=camera_info.link, + parent_link_name=camera_info.parent_link, + pos_offset=camera_info.offset.position, + ori_offset=camera_info.offset.orientation, + ) + for lidar_info in cfg.lidar_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="Lidar", + link_name=lidar_info.link, + parent_link_name=lidar_info.parent_link, + pos_offset=lidar_info.offset.position, + ori_offset=lidar_info.offset.orientation, + ) + + # Make wheels sphere approximations if requested + if cfg.base_motion.use_sphere_wheels: + for wheel_link in cfg.base_motion.wheel_links: + set_link_collision_approximation( + stage=stage, + root_prim=prim, + link_name=wheel_link, + approximation_type="boundingSphere", + ) + + # Get reference to articulation root link + articulation_root_prim = find_articulation_root_prim(root_prim=prim) + assert articulation_root_prim is not None, "Could not find any valid articulation root prim!" + root_prim_name = articulation_root_prim.GetName() + + # Add holonomic base if requested + if cfg.base_motion.use_holonomic_joints: + # Convert all wheel joints into fixed joints + for wheel_joint in cfg.base_motion.wheel_joints: + make_joint_fixed( + stage=stage, + root_prim=prim, + joint_name=wheel_joint, + ) + + # Remove the articulation root from the original root link + articulation_root_prim.RemoveAPI(lazy.pxr.UsdPhysics.ArticulationRootAPI) + + # Add 6DOF virtual joints ("base_footprint_") + # Create in backwards order so that the child always exists + child_prim = articulation_root_prim + robot_prim_path = prim.GetPrimPath().pathString + for prefix, joint_type, drive_type in zip(("r", ""), ("Revolute", "Prismatic"), ("angular", "linear")): + for axis in ("z", "y", "x"): + joint_suffix = f"{prefix}{axis}" + parent_name = f"base_footprint_{joint_suffix}" + # Create new link + parent_prim_path = f"{robot_prim_path}/{parent_name}" + parent_prim = create_rigid_prim( + stage=stage, + link_prim_path=parent_prim_path, + ) + + # Create new joint + joint_prim_path = f"{parent_prim_path}/{parent_name}_joint" + joint = create_joint( + prim_path=joint_prim_path, + joint_type=f"{joint_type}Joint", + body0=parent_prim_path, + body1=child_prim.GetPrimPath().pathString, + ) + joint.GetAttribute("physics:axis").Set(axis.upper()) + + # Add JointState API, and also Drive API only if the joint is in {x,y,rz} + lazy.pxr.PhysxSchema.JointStateAPI.Apply(joint, drive_type) + if joint_suffix in {"x", "y", "rz"}: + lazy.pxr.UsdPhysics.DriveAPI.Apply(joint, drive_type) + + # Update child + child_prim = parent_prim + + # Re-add the articulation root API to the new virtual footprint link + lazy.pxr.UsdPhysics.ArticulationRootAPI.Apply(parent_prim) + + # Save stage + stage.Save() + + # Import auxiliary files necessary for CuRobo motion planning + if bool(cfg.curobo): + create_curobo_cfgs( + robot_prim=prim, + root_link=root_prim_name, + curobo_cfg=cfg.curobo, + save_dir="/".join(usd_path.split("/")[:-2]), + is_holonomic=cfg.base_motion.use_holonomic_joints, + ) + + # Visualize if not headless + if not cfg.headless: + click.echo("The asset has been successfully imported. You can view it and make changes and save if you'd like.") + while True: + og.sim.render() + + +# TODO: Create configs and import all robots +# TODO: Make manual updates to the new robots so that they match the original robots +# TODO: Add function to merge specified fixed links and / or remove extraneous ones +# TODO: Import textures properly + +if __name__ == "__main__": + import_custom_robot() From 2a09f014f514d0534327ff2010be5e8f817f185d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 23:52:46 +0000 Subject: [PATCH 24/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../examples/objects/import_custom_object.py | 31 +- .../examples/robots/import_custom_robot.py | 213 ++-- omnigibson/macros.py | 4 +- omnigibson/utils/asset_conversion_utils.py | 60 +- omnigibson/utils/urdfpy_utils.py | 1011 ++++++++--------- 5 files changed, 632 insertions(+), 687 deletions(-) diff --git a/omnigibson/examples/objects/import_custom_object.py b/omnigibson/examples/objects/import_custom_object.py index db5dbf94c..0624d6c05 100644 --- a/omnigibson/examples/objects/import_custom_object.py +++ b/omnigibson/examples/objects/import_custom_object.py @@ -1,6 +1,7 @@ """ Helper script to download OmniGibson dataset and assets. """ + import math from typing import Literal @@ -8,25 +9,40 @@ import trimesh import omnigibson as og -from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.asset_conversion_utils import ( generate_collision_meshes, generate_urdf_for_obj, import_og_asset_from_urdf, ) +from omnigibson.utils.python_utils import assert_valid_key @click.command() -@click.option("--asset-path", required=True, type=click.Path(exists=True, dir_okay=False), help="Absolute path to asset file to import. This can be a raw visual mesh (for single-bodied, static objects), e.g. .obj, .glb, etc., or a more complex (such as articulated) objects defined in .urdf format.") +@click.option( + "--asset-path", + required=True, + type=click.Path(exists=True, dir_okay=False), + help="Absolute path to asset file to import. This can be a raw visual mesh (for single-bodied, static objects), e.g. .obj, .glb, etc., or a more complex (such as articulated) objects defined in .urdf format.", +) @click.option("--category", required=True, type=click.STRING, help="Category name to assign to the imported asset") -@click.option("--model", required=True, type=click.STRING, help="Model name to assign to the imported asset. This MUST be a 6-character long string that exclusively contains letters, and must be unique within the given @category") +@click.option( + "--model", + required=True, + type=click.STRING, + help="Model name to assign to the imported asset. This MUST be a 6-character long string that exclusively contains letters, and must be unique within the given @category", +) @click.option( "--collision-method", type=click.Choice(["coacd", "convex", "none"]), default="coacd", help="Method to generate the collision mesh. 'coacd' generates a set of convex decompositions, while 'convex' generates a single convex hull. 'none' will not generate any explicit mesh", ) -@click.option("--hull-count", type=int, default=32, help="Maximum number of convex hulls to decompose individual visual meshes into. Only relevant if --collision-method=coacd") +@click.option( + "--hull-count", + type=int, + default=32, + help="Maximum number of convex hulls to decompose individual visual meshes into. Only relevant if --collision-method=coacd", +) @click.option("--scale", type=float, default=1.0, help="Scale factor to apply to the mesh.") @click.option("--up-axis", type=click.Choice(["z", "y"]), default="z", help="Up axis for the mesh.") @click.option("--headless", is_flag=True, help="Run the script in headless mode.") @@ -63,8 +79,11 @@ def import_custom_object( visual_mesh: trimesh.Trimesh = trimesh.load(asset_path, force="mesh", process=False) # Generate collision meshes if requested - collision_meshes = generate_collision_meshes(visual_mesh, method=collision_method, hull_count=hull_count) \ - if collision_method is not None else [] + collision_meshes = ( + generate_collision_meshes(visual_mesh, method=collision_method, hull_count=hull_count) + if collision_method is not None + else [] + ) # If the up axis is y, we need to rotate the meshes if up_axis == "y": diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index bab70909b..39ec8dcc0 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -1,26 +1,24 @@ """ Helper script to download OmniGibson dataset and assets. """ -from pathlib import Path + from copy import deepcopy +from pathlib import Path import yaml -yaml.Dumper.ignore_aliases = lambda *args : True -from addict import Dict +yaml.Dumper.ignore_aliases = lambda *args: True import click import torch as th +from addict import Dict import omnigibson as og import omnigibson.lazy as lazy -from omnigibson.macros import gm import omnigibson.utils.transform_utils as T +from omnigibson.macros import gm +from omnigibson.utils.asset_conversion_utils import _add_xform_properties, import_og_asset_from_urdf from omnigibson.utils.python_utils import assert_valid_key -from omnigibson.utils.usd_utils import create_primitive_mesh, create_joint -from omnigibson.utils.asset_conversion_utils import ( - import_og_asset_from_urdf, - _add_xform_properties, -) +from omnigibson.utils.usd_utils import create_joint, create_primitive_mesh # Make sure flatcache is NOT used so we write directly to USD gm.ENABLE_FLATCACHE = False @@ -88,8 +86,9 @@ def create_rigid_prim(stage, link_prim_path): """ # Make sure link prim does NOT already exist (this should be a new link) link_prim_exists = lazy.omni.isaac.core.utils.prims.is_prim_path_valid(link_prim_path) - assert not link_prim_exists, \ - f"Cannot create new link because there already exists a link at prim path {link_prim_path}!" + assert ( + not link_prim_exists + ), f"Cannot create new link because there already exists a link at prim path {link_prim_path}!" # Manually create a new prim (specified offset) link_prim = lazy.pxr.UsdGeom.Xform.Define(stage, link_prim_path).GetPrim() @@ -105,12 +104,12 @@ def create_rigid_prim(stage, link_prim_path): def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, pos_offset=None, ori_offset=None): """ Adds sensor to robot. This is an in-place operation on @root_prim - + Args: stage (Usd.Stage): Current active omniverse stage root_prim (Usd.Prim): Root prim of the current robot, assumed to be on the current stage sensor_type (str): Sensor to create. Valid options are: {Camera, Lidar, VisualSphere} - link_name (str): Link to attach the created sensor prim to. If this link does not already exist in the robot's + link_name (str): Link to attach the created sensor prim to. If this link does not already exist in the robot's current set of links, a new one will be created as a child of @parent_link_name's link parent_link_name (None or str): If specified, parent link from which to create a new child link @link_name. If set, @link_name should NOT be a link already found on the robot! @@ -122,9 +121,9 @@ def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, If only @link_name is specified, defines offset of the sensor prim wrt @link_name """ # Make sure pos and ori offsets are defined - if pos_offset is None or pos_offset == {}: # May be {} from empty addict key + if pos_offset is None or pos_offset == {}: # May be {} from empty addict key pos_offset = (0, 0, 0) - if ori_offset is None or ori_offset == {}: # May be {} from empty addict key + if ori_offset is None or ori_offset == {}: # May be {} from empty addict key ori_offset = (0, 0, 0, 1) pos_offset = th.tensor(pos_offset, dtype=th.float) @@ -132,18 +131,22 @@ def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, # Sanity check link / parent link combos root_prim_path = root_prim.GetPrimPath().pathString - if parent_link_name is None or parent_link_name == {}: # May be {} from empty addict key + if parent_link_name is None or parent_link_name == {}: # May be {} from empty addict key parent_link_prim = None else: parent_path = f"{root_prim_path}/{parent_link_name}" - assert lazy.omni.isaac.core.utils.prims.is_prim_path_valid(parent_path), f"Could not find parent link within robot with name {parent_link_name}!" + assert lazy.omni.isaac.core.utils.prims.is_prim_path_valid( + parent_path + ), f"Could not find parent link within robot with name {parent_link_name}!" parent_link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(parent_path) # If parent link is defined, link prim should NOT exist (this should be a new link) link_prim_path = f"{root_prim_path}/{link_name}" link_prim_exists = lazy.omni.isaac.core.utils.prims.is_prim_path_valid(link_prim_path) if parent_link_prim is not None: - assert not link_prim_exists, f"Since parent link is defined, link_name {link_name} must be a link that is NOT pre-existing within the robot's set of links!" + assert ( + not link_prim_exists + ), f"Since parent link is defined, link_name {link_name} must be a link that is NOT pre-existing within the robot's set of links!" # Manually create a new prim (specified offset) link_prim = create_rigid_prim( stage=stage, @@ -174,12 +177,16 @@ def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, else: # Otherwise, link prim MUST exist - assert link_prim_exists, f"Since no parent link is defined, link_name {link_name} must be a link that IS pre-existing within the robot's set of links!" + assert ( + link_prim_exists + ), f"Since no parent link is defined, link_name {link_name} must be a link that IS pre-existing within the robot's set of links!" link_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(link_prim_path) # Define functions to generate the desired sensor prim if sensor_type == "Camera": - create_sensor_prim = lambda parent_prim_path: lazy.pxr.UsdGeom.Camera.Define(stage, f"{parent_prim_path}/Camera").GetPrim() + create_sensor_prim = lambda parent_prim_path: lazy.pxr.UsdGeom.Camera.Define( + stage, f"{parent_prim_path}/Camera" + ).GetPrim() elif sensor_type == "Lidar": create_sensor_prim = lambda parent_prim_path: lazy.omni.kit.commands.execute( "RangeSensorCreateLidar", @@ -196,7 +203,7 @@ def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, rotation_rate=0.0, high_lod=False, yaw_offset=0.0, - enable_semantics=False + enable_semantics=False, )[1].GetPrim() elif sensor_type == "VisualSphere": create_sensor_prim = lambda parent_prim_path: create_primitive_mesh( @@ -224,7 +231,9 @@ def add_sensor(stage, root_prim, sensor_type, link_name, parent_link_name=None, # If we didn't have a parent prim defined, we need to add the offset directly to this sensor if parent_link_prim is None: sensor_prim.GetAttribute("xformOp:translate").Set(lazy.pxr.Gf.Vec3d(*pos_offset.tolist())) - sensor_prim.GetAttribute("xformOp:orient").Set(lazy.pxr.Gf.Quatd(*ori_offset[[3, 0, 1, 2]].tolist())) # expects (w,x,y,z) + sensor_prim.GetAttribute("xformOp:orient").Set( + lazy.pxr.Gf.Quatd(*ori_offset[[3, 0, 1, 2]].tolist()) + ) # expects (w,x,y,z) def _find_prim_with_condition(condition, root_prim): @@ -302,7 +311,9 @@ def find_articulation_root_prim(root_prim): Returns: None or Usd.Prim: If found, articulation root prim """ - return _find_prim_with_condition(condition=lambda prim: prim.HasAPI(lazy.pxr.UsdPhysics.ArticulationRootAPI), root_prim=root_prim) + return _find_prim_with_condition( + condition=lambda prim: prim.HasAPI(lazy.pxr.UsdPhysics.ArticulationRootAPI), root_prim=root_prim + ) def find_all_prim_children_with_type(prim_type, root_prim): @@ -342,78 +353,80 @@ def make_joint_fixed(stage, root_prim, joint_name): # Remove its Joint APIs and add Fixed Joint API joint_type = joint_prim.GetTypeName() if joint_type != "PhysicsFixedJoint": - assert joint_type in {"PhysicsRevoluteJoint", "PhysicsPrismaticJoint"}, \ - f"Got invalid joint type: {joint_type}. Only PhysicsRevoluteJoint and PhysicsPrismaticJoint are supported!" + assert joint_type in { + "PhysicsRevoluteJoint", + "PhysicsPrismaticJoint", + }, f"Got invalid joint type: {joint_type}. Only PhysicsRevoluteJoint and PhysicsPrismaticJoint are supported!" lazy.omni.kit.commands.execute("RemovePhysicsComponentCommand", usd_prim=joint_prim, component=joint_type) lazy.pxr.UsdPhysics.FixedJoint.Define(stage, joint_prim.GetPrimPath().pathString) def set_link_collision_approximation(stage, root_prim, link_name, approximation_type): - """ - Sets all collision geoms under @link_name to be @approximation type - Args: - approximation_type (str): approximation used for collision. - Can be one of: {"none", "convexHull", "convexDecomposition", "meshSimplification", "sdf", - "boundingSphere", "boundingCube"} - If None, the approximation will use the underlying triangle mesh. - """ - # Sanity check approximation type - assert_valid_key( - key=approximation_type, - valid_keys={ - "none", - "convexHull", - "convexDecomposition", - "meshSimplification", - "sdf", - "boundingSphere", - "boundingCube", - }, - name="collision approximation type", - ) + """ + Sets all collision geoms under @link_name to be @approximation type + Args: + approximation_type (str): approximation used for collision. + Can be one of: {"none", "convexHull", "convexDecomposition", "meshSimplification", "sdf", + "boundingSphere", "boundingCube"} + If None, the approximation will use the underlying triangle mesh. + """ + # Sanity check approximation type + assert_valid_key( + key=approximation_type, + valid_keys={ + "none", + "convexHull", + "convexDecomposition", + "meshSimplification", + "sdf", + "boundingSphere", + "boundingCube", + }, + name="collision approximation type", + ) - # Find the link prim first - link_prim = find_prim_with_name(name=link_name, root_prim=root_prim) - assert link_prim is not None, f"Could not find link prim with name {link_name}!" - - # Iterate through all children that are mesh prims - mesh_prims = find_all_prim_children_with_type(prim_type="Mesh", root_prim=link_prim) - - # For each mesh prim, check if it is collision -- if so, update the approximation type appropriately - for mesh_prim in mesh_prims: - if not mesh_prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI): - # This is a visual mesh, so skip - continue - mesh_collision_api = lazy.pxr.UsdPhysics.MeshCollisionAPI(mesh_prim) - - # Make sure to add the appropriate API if we're setting certain values - if approximation_type == "convexHull" and not mesh_prim.HasAPI( - lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI - ): - lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI.Apply(mesh_prim) - elif approximation_type == "convexDecomposition" and not mesh_prim.HasAPI( - lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI - ): - lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI.Apply(mesh_prim) - elif approximation_type == "meshSimplification" and not mesh_prim.HasAPI( - lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI - ): - lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI.Apply(mesh_prim) - elif approximation_type == "sdf" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI): - lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI.Apply(mesh_prim) - elif approximation_type == "none" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI): - lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI.Apply(mesh_prim) - - if approximation_type == "convexHull": - pch_api = lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI(mesh_prim) - # Also make sure the maximum vertex count is 60 (max number compatible with GPU) - # https://docs.omniverse.nvidia.com/app_create/prod_extensions/ext_physics/rigid-bodies.html#collision-settings - if pch_api.GetHullVertexLimitAttr().Get() is None: - pch_api.CreateHullVertexLimitAttr() - pch_api.GetHullVertexLimitAttr().Set(60) - - mesh_collision_api.GetApproximationAttr().Set(approximation_type) + # Find the link prim first + link_prim = find_prim_with_name(name=link_name, root_prim=root_prim) + assert link_prim is not None, f"Could not find link prim with name {link_name}!" + + # Iterate through all children that are mesh prims + mesh_prims = find_all_prim_children_with_type(prim_type="Mesh", root_prim=link_prim) + + # For each mesh prim, check if it is collision -- if so, update the approximation type appropriately + for mesh_prim in mesh_prims: + if not mesh_prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI): + # This is a visual mesh, so skip + continue + mesh_collision_api = lazy.pxr.UsdPhysics.MeshCollisionAPI(mesh_prim) + + # Make sure to add the appropriate API if we're setting certain values + if approximation_type == "convexHull" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI.Apply(mesh_prim) + elif approximation_type == "convexDecomposition" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxConvexDecompositionCollisionAPI.Apply(mesh_prim) + elif approximation_type == "meshSimplification" and not mesh_prim.HasAPI( + lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI + ): + lazy.pxr.PhysxSchema.PhysxTriangleMeshSimplificationCollisionAPI.Apply(mesh_prim) + elif approximation_type == "sdf" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI): + lazy.pxr.PhysxSchema.PhysxSDFMeshCollisionAPI.Apply(mesh_prim) + elif approximation_type == "none" and not mesh_prim.HasAPI(lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI): + lazy.pxr.PhysxSchema.PhysxTriangleMeshCollisionAPI.Apply(mesh_prim) + + if approximation_type == "convexHull": + pch_api = lazy.pxr.PhysxSchema.PhysxConvexHullCollisionAPI(mesh_prim) + # Also make sure the maximum vertex count is 60 (max number compatible with GPU) + # https://docs.omniverse.nvidia.com/app_create/prod_extensions/ext_physics/rigid-bodies.html#collision-settings + if pch_api.GetHullVertexLimitAttr().Get() is None: + pch_api.CreateHullVertexLimitAttr() + pch_api.GetHullVertexLimitAttr().Set(60) + + mesh_collision_api.GetApproximationAttr().Set(approximation_type) def create_curobo_cfgs(robot_prim, curobo_cfg, root_link, save_dir, is_holonomic=False): @@ -440,10 +453,16 @@ def get_joint_upper_limit(root_prim, joint_name): root_prim=robot_prim, ) - all_collision_links = [link for link in all_links if _find_prim_with_condition( - condition=lambda prim: prim.GetPrimTypeInfo().GetTypeName() == "Mesh" and prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI), + all_collision_links = [ + link + for link in all_links + if _find_prim_with_condition( + condition=lambda prim: prim.GetPrimTypeInfo().GetTypeName() == "Mesh" + and prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI), root_prim=link, - ) is not None] + ) + is not None + ] all_collision_link_names = [link.GetName() for link in all_collision_links] def is_articulated_joint(prim): @@ -502,7 +521,7 @@ def is_articulated_joint(prim): default_generated_cfg["robot_cfg"]["extra_links"][attached_obj_link_name] = { "parent_link_name": eef_link_name, "link_name": attached_obj_link_name, - "fixed_transform": [0, 0, 0, 1, 0, 0, 0], # (x,y,z,w,x,y,z) + "fixed_transform": [0, 0, 0, 1, 0, 0, 0], # (x,y,z,w,x,y,z) "joint_type": "FIXED", "joint_name": f"{attached_obj_link_name}_joint", } @@ -517,7 +536,8 @@ def is_articulated_joint(prim): # Save generated file Path(save_dir).mkdir(parents=True, exist_ok=True) save_fpath = f"{save_dir}/{robot_name}_description_curobo_default.yaml" - with open(save_fpath, "w+") as f: yaml.dump(default_generated_cfg, f, default_flow_style=False) + with open(save_fpath, "w+") as f: + yaml.dump(default_generated_cfg, f, default_flow_style=False) # Permute the default config to have additional base only / arm only configs # Only relevant if robot is holonomic @@ -544,7 +564,12 @@ def is_articulated_joint(prim): @click.command(help=_DOCSTRING) -@click.option("--config", required=True, type=click.Path(exists=True, dir_okay=False), help="Absolute path to robot URDF file to import") +@click.option( + "--config", + required=True, + type=click.Path(exists=True, dir_okay=False), + help="Absolute path to robot URDF file to import", +) def import_custom_robot(config): # Load config with open(config, "r") as f: diff --git a/omnigibson/macros.py b/omnigibson/macros.py index b2dfed47f..de90df920 100644 --- a/omnigibson/macros.py +++ b/omnigibson/macros.py @@ -60,7 +60,9 @@ def determine_gm_path(default_path, env_var_name): # can override assets_path and dataset_path from environment variable gm.ASSET_PATH = determine_gm_path(os.path.join("data", "assets"), "OMNIGIBSON_ASSET_PATH") gm.DATASET_PATH = determine_gm_path(os.path.join("data", "og_dataset"), "OMNIGIBSON_DATASET_PATH") -gm.EXTERNAL_DATASET_PATH = determine_gm_path(os.path.join("data", "external_dataset"), "OMNIGIBSON_EXTERNAL_DATASET_PATH") +gm.EXTERNAL_DATASET_PATH = determine_gm_path( + os.path.join("data", "external_dataset"), "OMNIGIBSON_EXTERNAL_DATASET_PATH" +) gm.KEY_PATH = determine_gm_path(os.path.join("data", "omnigibson.key"), "OMNIGIBSON_KEY_PATH") # Which GPU to use -- None will result in omni automatically using an appropriate GPU. Otherwise, set with either diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 2660f4410..1f306bf54 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1192,7 +1192,14 @@ def _create_urdf_import_config(use_convex_decomposition=False): return import_config -def import_obj_urdf(urdf_path, obj_category, obj_model, dataset_root=gm.EXTERNAL_DATASET_PATH, use_omni_convex_decomp=False, use_usda=False): +def import_obj_urdf( + urdf_path, + obj_category, + obj_model, + dataset_root=gm.EXTERNAL_DATASET_PATH, + use_omni_convex_decomp=False, + use_usda=False, +): """ Imports an object from a URDF file into the current stage. @@ -1209,7 +1216,9 @@ def import_obj_urdf(urdf_path, obj_category, obj_model, dataset_root=gm.EXTERNAL str: Absolute path to the imported USD file """ # Preprocess input URDF to account for metalinks - urdf_path = _add_metalinks_to_urdf(urdf_path=urdf_path, obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) + urdf_path = _add_metalinks_to_urdf( + urdf_path=urdf_path, obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root + ) # Import URDF cfg = _create_urdf_import_config(use_convex_decomposition=use_omni_convex_decomp) # Check if filepath exists @@ -1776,6 +1785,7 @@ def make_mesh_positive(mesh_fpath, scale, output_suffix="mirror"): return None return output_suffix + def make_asset_positive(urdf_fpath, output_suffix="mirror"): assert urdf_fpath.endswith(".urdf") out_lines = [] @@ -1793,7 +1803,9 @@ def make_asset_positive(urdf_fpath, output_suffix="mirror"): base_fpath = f"{os.path.dirname(urdf_fpath)}/" mesh_abs_fpath = f"{base_fpath}{mesh_rel_fpath}" filetype = mesh_abs_fpath.split(".")[-1] - mesh_output_suffix = make_mesh_positive(mesh_abs_fpath.split(".")[0], scale.cpu().numpy(), output_suffix) + mesh_output_suffix = make_mesh_positive( + mesh_abs_fpath.split(".")[0], scale.cpu().numpy(), output_suffix + ) new_mesh_abs_fpath = mesh_abs_fpath.replace(f".{filetype}", f"_{mesh_output_suffix}.{filetype}") new_mesh_rel_fpath = new_mesh_abs_fpath.split(base_fpath)[1] out_line = line.replace(mesh_rel_fpath, new_mesh_rel_fpath).replace(scale_str, "1 1 1") @@ -1822,7 +1834,9 @@ def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, disca List[trimesh.Trimesh]: The collision meshes. """ # If the mesh is convex or the mesh is a proper volume and similar to its convex hull, simply return that directly - if trimesh_mesh.is_convex or (trimesh_mesh.is_volume and (trimesh_mesh.volume / trimesh_mesh.convex_hull.volume) > 0.90): + if trimesh_mesh.is_convex or ( + trimesh_mesh.is_volume and (trimesh_mesh.volume / trimesh_mesh.convex_hull.volume) > 0.90 + ): hulls = [trimesh_mesh.convex_hull] elif method == "coacd": @@ -1872,14 +1886,14 @@ def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, disca def get_collision_approximation_for_urdf( - urdf_path, - collision_method="coacd", - hull_count=32, - coacd_links=None, - convex_links=None, - no_decompose_links=None, - visual_only_links=None, - ignore_links=None, + urdf_path, + collision_method="coacd", + hull_count=32, + coacd_links=None, + convex_links=None, + no_decompose_links=None, + visual_only_links=None, + ignore_links=None, ): """ Computes collision approximation for all collision meshes (which are assumed to be non-convex) in @@ -1964,7 +1978,9 @@ def get_collision_approximation_for_urdf( # OmniGibson requires unit-bbox collision meshes, so here we do that scaling bounding_box = processed_collision_mesh.bounding_box.extents - assert all(x > 0 for x in bounding_box), f"Bounding box extents are not all positive: {bounding_box}" + assert all( + x > 0 for x in bounding_box + ), f"Bounding box extents are not all positive: {bounding_box}" collision_scale = 1.0 / bounding_box collision_scale_matrix = th.eye(4) collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale)) @@ -2036,7 +2052,9 @@ def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATA ) -def generate_urdf_for_obj(visual_mesh, collision_meshes, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False): +def generate_urdf_for_obj( + visual_mesh, collision_meshes, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False +): # Create a directory for the object obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl if not overwrite: @@ -2228,11 +2246,7 @@ def record_obj_metadata_from_urdf(urdf_path, obj_dir, joint_setting="zero", over val = lambda jnt: jnt.limit.upper else: raise ValueError(f"Got invalid joint_setting: {joint_setting}! Valid options are ['low', 'zero', 'high']") - joint_cfg = { - joint.name: val(joint) - for joint in robot.joints - if joint.joint_type in ("prismatic", "revolute") - } + joint_cfg = {joint.name: val(joint) for joint in robot.joints if joint.joint_type in ("prismatic", "revolute")} vfk = robot.visual_trimesh_fk(cfg=joint_cfg) scene = trimesh.Scene() @@ -2356,7 +2370,7 @@ def import_og_asset_from_urdf( obj_category=category, obj_model=model, dataset_root=dataset_root, - use_omni_convex_decomp=False, # We already pre-decomposed the values, so don' use omni convex decomp + use_omni_convex_decomp=False, # We already pre-decomposed the values, so don' use omni convex decomp use_usda=use_usda, ) @@ -2365,8 +2379,10 @@ def import_og_asset_from_urdf( obj_category=category, obj_model=model, dataset_root=dataset_root, - import_render_channels=False, # TODO: Make this True once we find a systematic / robust way to import materials of different source formats + import_render_channels=False, # TODO: Make this True once we find a systematic / robust way to import materials of different source formats + ) + print( + f"\nConversion complete! Object has been successfully imported into OmniGibson-compatible USD, located at:\n\n{usd_path}\n" ) - print(f"\nConversion complete! Object has been successfully imported into OmniGibson-compatible USD, located at:\n\n{usd_path}\n") return usd_path, prim diff --git a/omnigibson/utils/urdfpy_utils.py b/omnigibson/utils/urdfpy_utils.py index e9769c6ad..272521925 100644 --- a/omnigibson/utils/urdfpy_utils.py +++ b/omnigibson/utils/urdfpy_utils.py @@ -1,14 +1,14 @@ -from collections import OrderedDict import copy import os import time +from collections import OrderedDict -from lxml import etree as ET import networkx as nx import numpy as np import PIL -import trimesh import six +import trimesh +from lxml import etree as ET class URDFType(object): @@ -33,9 +33,10 @@ class URDFType(object): - ``_TAG`` - This is a string that represents the XML tag for the node containing this type of object. """ - _ATTRIBS = {} # Map from attrib name to (type, required) + + _ATTRIBS = {} # Map from attrib name to (type, required) _ELEMENTS = {} # Map from element name to (type, required, multiple) - _TAG = '' # XML tag for this element + _TAG = "" # XML tag for this element def __init__(self): pass @@ -57,7 +58,7 @@ def _parse_attrib(cls, val_type, val): The parsed attribute. """ if val_type == np.ndarray: - val = np.fromstring(val, sep=' ') + val = np.fromstring(val, sep=" ") else: val = val_type(val) return val @@ -86,8 +87,7 @@ def _parse_simple_attribs(cls, node): v = cls._parse_attrib(t, node.attrib[a]) except Exception: raise ValueError( - 'Missing required attribute {} when parsing an object ' - 'of type {}'.format(a, cls.__name__) + "Missing required attribute {} when parsing an object " "of type {}".format(a, cls.__name__) ) else: v = None @@ -126,10 +126,8 @@ def _parse_simple_elements(cls, node, path): vs = node.findall(t._TAG) if len(vs) == 0 and r: raise ValueError( - 'Missing required subelement(s) of type {} when ' - 'parsing an object of type {}'.format( - t.__name__, cls.__name__ - ) + "Missing required subelement(s) of type {} when " + "parsing an object of type {}".format(t.__name__, cls.__name__) ) v = [t._from_xml(n, path) for n in vs] kwargs[a] = v @@ -293,10 +291,8 @@ class Box(URDFType): The length, width, and height of the box in meters. """ - _ATTRIBS = { - 'size': (np.ndarray, True) - } - _TAG = 'box' + _ATTRIBS = {"size": (np.ndarray, True)} + _TAG = "box" def __init__(self, size): self.size = size @@ -304,8 +300,7 @@ def __init__(self, size): @property def size(self): - """(3,) float : The length, width, and height of the box in meters. - """ + """(3,) float : The length, width, and height of the box in meters.""" return self._size @size.setter @@ -322,7 +317,7 @@ def meshes(self): self._meshes = [trimesh.creation.box(extents=self.size)] return self._meshes - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -355,10 +350,10 @@ class Cylinder(URDFType): """ _ATTRIBS = { - 'radius': (float, True), - 'length': (float, True), + "radius": (float, True), + "length": (float, True), } - _TAG = 'cylinder' + _TAG = "cylinder" def __init__(self, radius, length): self.radius = radius @@ -367,8 +362,7 @@ def __init__(self, radius, length): @property def radius(self): - """float : The radius of the cylinder in meters. - """ + """float : The radius of the cylinder in meters.""" return self._radius @radius.setter @@ -378,8 +372,7 @@ def radius(self, value): @property def length(self): - """float : The length of the cylinder in meters. - """ + """float : The length of the cylinder in meters.""" return self._length @length.setter @@ -393,12 +386,10 @@ def meshes(self): that represent this object. """ if len(self._meshes) == 0: - self._meshes = [trimesh.creation.cylinder( - radius=self.radius, height=self.length - )] + self._meshes = [trimesh.creation.cylinder(radius=self.radius, height=self.length)] return self._meshes - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -415,7 +406,7 @@ def copy(self, prefix='', scale=None): scale = 1.0 if isinstance(scale, (list, np.ndarray)): if scale[0] != scale[1]: - raise ValueError('Cannot rescale cylinder geometry with asymmetry in x/y') + raise ValueError("Cannot rescale cylinder geometry with asymmetry in x/y") c = Cylinder( radius=self.radius * scale[0], length=self.length * scale[2], @@ -436,10 +427,11 @@ class Sphere(URDFType): radius : float The radius of the sphere in meters. """ + _ATTRIBS = { - 'radius': (float, True), + "radius": (float, True), } - _TAG = 'sphere' + _TAG = "sphere" def __init__(self, radius): self.radius = radius @@ -447,8 +439,7 @@ def __init__(self, radius): @property def radius(self): - """float : The radius of the sphere in meters. - """ + """float : The radius of the sphere in meters.""" return self._radius @radius.setter @@ -465,7 +456,7 @@ def meshes(self): self._meshes = [trimesh.creation.icosphere(radius=self.radius)] return self._meshes - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -482,7 +473,7 @@ def copy(self, prefix='', scale=None): scale = 1.0 if isinstance(scale, (list, np.ndarray)): if scale[0] != scale[1] or scale[0] != scale[2]: - raise ValueError('Spheres do not support non-uniform scaling!') + raise ValueError("Spheres do not support non-uniform scaling!") scale = scale[0] s = Sphere( radius=self.radius * scale, @@ -507,11 +498,9 @@ class Mesh(URDFType): might be composed of separate trimesh objects. If not specified, the mesh is loaded from the file using trimesh. """ - _ATTRIBS = { - 'filename': (str, True), - 'scale': (np.ndarray, False) - } - _TAG = 'mesh' + + _ATTRIBS = {"filename": (str, True), "scale": (np.ndarray, False)} + _TAG = "mesh" def __init__(self, filename, scale=None, meshes=None): if meshes is None: @@ -522,8 +511,7 @@ def __init__(self, filename, scale=None, meshes=None): @property def filename(self): - """str : The path to the mesh file for this object. - """ + """str : The path to the mesh file for this object.""" return self._filename @filename.setter @@ -532,8 +520,7 @@ def filename(self, value): @property def scale(self): - """(3,) float : A scaling for the mesh along its local XYZ axes. - """ + """(3,) float : A scaling for the mesh along its local XYZ axes.""" return self._scale @scale.setter @@ -556,15 +543,14 @@ def meshes(self, value): elif isinstance(value, (list, tuple, set, np.ndarray)): value = list(value) if len(value) == 0: - raise ValueError('Mesh must have at least one trimesh.Trimesh') + raise ValueError("Mesh must have at least one trimesh.Trimesh") for m in value: if not isinstance(m, trimesh.Trimesh): - raise TypeError('Mesh requires a trimesh.Trimesh or a ' - 'list of them') + raise TypeError("Mesh requires a trimesh.Trimesh or a " "list of them") elif isinstance(value, trimesh.Trimesh): value = [value] else: - raise TypeError('Mesh requires a trimesh.Trimesh') + raise TypeError("Mesh requires a trimesh.Trimesh") self._meshes = value @classmethod @@ -573,7 +559,7 @@ def _from_xml(cls, node, path): # Load the mesh, combining collision geometry meshes but keeping # visual ones separate to preserve colors and textures - fn = get_filename(path, kwargs['filename']) + fn = get_filename(path, kwargs["filename"]) combine = node.getparent().getparent().tag == Collision._TAG meshes = load_meshes(fn) if combine: @@ -581,7 +567,7 @@ def _from_xml(cls, node, path): for m in meshes: m.visual = trimesh.visual.ColorVisuals(mesh=m) meshes = [meshes[0] + meshes[1:]] - kwargs['meshes'] = meshes + kwargs["meshes"] = meshes return Mesh(**kwargs) @@ -593,7 +579,7 @@ def _to_xml(self, parent, path): meshes = self.meshes if len(meshes) == 1: meshes = meshes[0] - elif os.path.splitext(fn)[1] == '.glb': + elif os.path.splitext(fn)[1] == ".glb": meshes = trimesh.scene.Scene(geometry=meshes) trimesh.exchange.export.export_mesh(meshes, fn) @@ -601,7 +587,7 @@ def _to_xml(self, parent, path): node = self._unparse(path) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -618,17 +604,17 @@ def copy(self, prefix='', scale=None): if scale is not None: sm = np.eye(4) if isinstance(scale, (list, np.ndarray)): - sm[:3,:3] = np.diag(scale) + sm[:3, :3] = np.diag(scale) else: - sm[:3,:3] = np.diag(np.repeat(scale, 3)) + sm[:3, :3] = np.diag(np.repeat(scale, 3)) for i, m in enumerate(meshes): meshes[i] = m.apply_transform(sm) base, fn = os.path.split(self.filename) - fn = '{}{}'.format(prefix, self.filename) + fn = "{}{}".format(prefix, self.filename) m = Mesh( filename=os.path.join(base, fn), scale=(self.scale.copy() if self.scale is not None else None), - meshes=meshes + meshes=meshes, ) return m @@ -652,17 +638,16 @@ class Geometry(URDFType): """ _ELEMENTS = { - 'box': (Box, False, False), - 'cylinder': (Cylinder, False, False), - 'sphere': (Sphere, False, False), - 'mesh': (Mesh, False, False), + "box": (Box, False, False), + "cylinder": (Cylinder, False, False), + "sphere": (Sphere, False, False), + "mesh": (Mesh, False, False), } - _TAG = 'geometry' + _TAG = "geometry" def __init__(self, box=None, cylinder=None, sphere=None, mesh=None): - if (box is None and cylinder is None and - sphere is None and mesh is None): - raise ValueError('At least one geometry element must be set') + if box is None and cylinder is None and sphere is None and mesh is None: + raise ValueError("At least one geometry element must be set") self.box = box self.cylinder = cylinder self.sphere = sphere @@ -670,50 +655,46 @@ def __init__(self, box=None, cylinder=None, sphere=None, mesh=None): @property def box(self): - """:class:`.Box` : Box geometry. - """ + """:class:`.Box` : Box geometry.""" return self._box @box.setter def box(self, value): if value is not None and not isinstance(value, Box): - raise TypeError('Expected Box type') + raise TypeError("Expected Box type") self._box = value @property def cylinder(self): - """:class:`.Cylinder` : Cylinder geometry. - """ + """:class:`.Cylinder` : Cylinder geometry.""" return self._cylinder @cylinder.setter def cylinder(self, value): if value is not None and not isinstance(value, Cylinder): - raise TypeError('Expected Cylinder type') + raise TypeError("Expected Cylinder type") self._cylinder = value @property def sphere(self): - """:class:`.Sphere` : Spherical geometry. - """ + """:class:`.Sphere` : Spherical geometry.""" return self._sphere @sphere.setter def sphere(self, value): if value is not None and not isinstance(value, Sphere): - raise TypeError('Expected Sphere type') + raise TypeError("Expected Sphere type") self._sphere = value @property def mesh(self): - """:class:`.Mesh` : Mesh geometry. - """ + """:class:`.Mesh` : Mesh geometry.""" return self._mesh @mesh.setter def mesh(self, value): if value is not None and not isinstance(value, Mesh): - raise TypeError('Expected Mesh type') + raise TypeError("Expected Mesh type") self._mesh = value @property @@ -738,7 +719,7 @@ def meshes(self): """ return self.geometry.meshes - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -773,10 +754,8 @@ class Texture(URDFType): If not specified, it is loaded automatically from the filename. """ - _ATTRIBS = { - 'filename': (str, True) - } - _TAG = 'texture' + _ATTRIBS = {"filename": (str, True)} + _TAG = "texture" def __init__(self, filename, image=None): if image is None: @@ -786,8 +765,7 @@ def __init__(self, filename, image=None): @property def filename(self): - """str : Path to the image for this texture. - """ + """str : Path to the image for this texture.""" return self._filename @filename.setter @@ -796,8 +774,7 @@ def filename(self, value): @property def image(self): - """:class:`PIL.Image.Image` : The image for this texture. - """ + """:class:`PIL.Image.Image` : The image for this texture.""" return self._image @image.setter @@ -807,8 +784,7 @@ def image(self, value): if isinstance(value, np.ndarray): value = PIL.Image.fromarray(value) elif not isinstance(value, PIL.Image.Image): - raise ValueError('Texture only supports numpy arrays ' - 'or PIL images') + raise ValueError("Texture only supports numpy arrays " "or PIL images") self._image = value @classmethod @@ -816,8 +792,8 @@ def _from_xml(cls, node, path): kwargs = cls._parse(node, path) # Load image - fn = get_filename(path, kwargs['filename']) - kwargs['image'] = PIL.Image.open(fn) + fn = get_filename(path, kwargs["filename"]) + kwargs["image"] = PIL.Image.open(fn) return Texture(**kwargs) @@ -828,7 +804,7 @@ def _to_xml(self, parent, path): return self._unparse(path) - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -841,10 +817,7 @@ def copy(self, prefix='', scale=None): :class:`.Texture` A deep copy. """ - v = Texture( - filename=self.filename, - image=self.image.copy() - ) + v = Texture(filename=self.filename, image=self.image.copy()) return v @@ -860,13 +833,12 @@ class Material(URDFType): texture : :class:`.Texture`, optional A texture for the material. """ - _ATTRIBS = { - 'name': (str, True) - } + + _ATTRIBS = {"name": (str, True)} _ELEMENTS = { - 'texture': (Texture, False, False), + "texture": (Texture, False, False), } - _TAG = 'material' + _TAG = "material" def __init__(self, name, color=None, texture=None): self.name = name @@ -875,8 +847,7 @@ def __init__(self, name, color=None, texture=None): @property def name(self): - """str : The name of the material. - """ + """str : The name of the material.""" return self._name @name.setter @@ -885,8 +856,7 @@ def name(self, value): @property def color(self): - """(4,) float : The RGBA color of the material, in the range [0,1]. - """ + """(4,) float : The RGBA color of the material, in the range [0,1].""" return self._color @color.setter @@ -895,13 +865,12 @@ def color(self, value): value = np.asanyarray(value).astype(np.float64) value = np.clip(value, 0.0, 1.0) if value.shape != (4,): - raise ValueError('Color must be a (4,) float') + raise ValueError("Color must be a (4,) float") self._color = value @property def texture(self): - """:class:`.Texture` : The texture for the material. - """ + """:class:`.Texture` : The texture for the material.""" return self._texture @texture.setter @@ -911,8 +880,7 @@ def texture(self, value): image = PIL.Image.open(value) value = Texture(filename=value, image=image) elif not isinstance(value, Texture): - raise ValueError('Invalid type for texture -- expect path to ' - 'image or Texture') + raise ValueError("Invalid type for texture -- expect path to " "image or Texture") self._texture = value @classmethod @@ -920,10 +888,10 @@ def _from_xml(cls, node, path): kwargs = cls._parse(node, path) # Extract the color -- it's weirdly an attribute of a subelement - color = node.find('color') + color = node.find("color") if color is not None: - color = np.fromstring(color.attrib['rgba'], sep=' ', dtype=np.float64) - kwargs['color'] = color + color = np.fromstring(color.attrib["rgba"], sep=" ", dtype=np.float64) + kwargs["color"] = color return Material(**kwargs) @@ -931,20 +899,20 @@ def _to_xml(self, parent, path): # Simplify materials by collecting them at the top level. # For top-level elements, save the full material specification - if parent.tag == 'robot': + if parent.tag == "robot": node = self._unparse(path) if self.color is not None: - color = ET.Element('color') - color.attrib['rgba'] = np.array2string(self.color)[1:-1] + color = ET.Element("color") + color.attrib["rgba"] = np.array2string(self.color)[1:-1] node.append(color) # For non-top-level elements just save the material with a name else: - node = ET.Element('material') - node.attrib['name'] = self.name + node = ET.Element("material") + node.attrib["name"] = self.name return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the material with the prefix applied to all names. Parameters @@ -957,11 +925,7 @@ def copy(self, prefix='', scale=None): :class:`.Material` A deep copy of the material. """ - return Material( - name='{}{}'.format(prefix, self.name), - color=self.color, - texture=self.texture - ) + return Material(name="{}{}".format(prefix, self.name), color=self.color, texture=self.texture) class Collision(URDFType): @@ -978,13 +942,11 @@ class Collision(URDFType): Defaults to identity. """ - _ATTRIBS = { - 'name': (str, False) - } + _ATTRIBS = {"name": (str, False)} _ELEMENTS = { - 'geometry': (Geometry, True, False), + "geometry": (Geometry, True, False), } - _TAG = 'collision' + _TAG = "collision" def __init__(self, name, origin, geometry): self.geometry = geometry @@ -993,20 +955,18 @@ def __init__(self, name, origin, geometry): @property def geometry(self): - """:class:`.Geometry` : The geometry of this element. - """ + """:class:`.Geometry` : The geometry of this element.""" return self._geometry @geometry.setter def geometry(self, value): if not isinstance(value, Geometry): - raise TypeError('Must set geometry with Geometry object') + raise TypeError("Must set geometry with Geometry object") self._geometry = value @property def name(self): - """str : The name of this collision element. - """ + """str : The name of this collision element.""" return self._name @name.setter @@ -1017,8 +977,7 @@ def name(self, value): @property def origin(self): - """(4,4) float : The pose of this element relative to the link frame. - """ + """(4,4) float : The pose of this element relative to the link frame.""" return self._origin @origin.setter @@ -1028,7 +987,7 @@ def origin(self, value): @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - kwargs['origin'] = parse_origin(node) + kwargs["origin"] = parse_origin(node) return Collision(**kwargs) def _to_xml(self, parent, path): @@ -1036,7 +995,7 @@ def _to_xml(self, parent, path): node.append(unparse_origin(self.origin)) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1049,13 +1008,13 @@ def copy(self, prefix='', scale=None): :class:`.Visual` A deep copy of the visual. """ - origin=self.origin.copy() + origin = self.origin.copy() if scale is not None: if not isinstance(scale, (list, np.ndarray)): scale = np.repeat(scale, 3) - origin[:3,3] *= scale + origin[:3, 3] *= scale return Collision( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), origin=origin, geometry=self.geometry.copy(prefix=prefix, scale=scale), ) @@ -1076,14 +1035,13 @@ class Visual(URDFType): material : :class:`.Material`, optional The material of the element. """ - _ATTRIBS = { - 'name': (str, False) - } + + _ATTRIBS = {"name": (str, False)} _ELEMENTS = { - 'geometry': (Geometry, True, False), - 'material': (Material, False, False), + "geometry": (Geometry, True, False), + "material": (Material, False, False), } - _TAG = 'visual' + _TAG = "visual" def __init__(self, geometry, name=None, origin=None, material=None): self.geometry = geometry @@ -1093,20 +1051,18 @@ def __init__(self, geometry, name=None, origin=None, material=None): @property def geometry(self): - """:class:`.Geometry` : The geometry of this element. - """ + """:class:`.Geometry` : The geometry of this element.""" return self._geometry @geometry.setter def geometry(self, value): if not isinstance(value, Geometry): - raise TypeError('Must set geometry with Geometry object') + raise TypeError("Must set geometry with Geometry object") self._geometry = value @property def name(self): - """str : The name of this visual element. - """ + """str : The name of this visual element.""" return self._name @name.setter @@ -1117,8 +1073,7 @@ def name(self, value): @property def origin(self): - """(4,4) float : The pose of this element relative to the link frame. - """ + """(4,4) float : The pose of this element relative to the link frame.""" return self._origin @origin.setter @@ -1127,21 +1082,20 @@ def origin(self, value): @property def material(self): - """:class:`.Material` : The material for this element. - """ + """:class:`.Material` : The material for this element.""" return self._material @material.setter def material(self, value): if value is not None: if not isinstance(value, Material): - raise TypeError('Must set material with Material object') + raise TypeError("Must set material with Material object") self._material = value @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - kwargs['origin'] = parse_origin(node) + kwargs["origin"] = parse_origin(node) return Visual(**kwargs) def _to_xml(self, parent, path): @@ -1149,7 +1103,7 @@ def _to_xml(self, parent, path): node.append(unparse_origin(self.origin)) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1162,14 +1116,14 @@ def copy(self, prefix='', scale=None): :class:`.Visual` A deep copy of the visual. """ - origin=self.origin.copy() + origin = self.origin.copy() if scale is not None: if not isinstance(scale, (list, np.ndarray)): scale = np.repeat(scale, 3) - origin[:3,3] *= scale + origin[:3, 3] *= scale return Visual( geometry=self.geometry.copy(prefix=prefix, scale=scale), - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), origin=origin, material=(self.material.copy(prefix=prefix) if self.material else None), ) @@ -1188,7 +1142,8 @@ class Inertial(URDFType): The pose of the inertials relative to the link frame. Defaults to identity if not specified. """ - _TAG = 'inertial' + + _TAG = "inertial" def __init__(self, mass, inertia, origin=None): self.mass = mass @@ -1197,8 +1152,7 @@ def __init__(self, mass, inertia, origin=None): @property def mass(self): - """float : The mass of the link in kilograms. - """ + """float : The mass of the link in kilograms.""" return self._mass @mass.setter @@ -1207,21 +1161,19 @@ def mass(self, value): @property def inertia(self): - """(3,3) float : The 3x3 symmetric rotational inertia matrix. - """ + """(3,3) float : The 3x3 symmetric rotational inertia matrix.""" return self._inertia @inertia.setter def inertia(self, value): value = np.asanyarray(value).astype(np.float64) if not np.allclose(value, value.T): - raise ValueError('Inertia must be a symmetric matrix') + raise ValueError("Inertia must be a symmetric matrix") self._inertia = value @property def origin(self): - """(4,4) float : The pose of the inertials relative to the link frame. - """ + """(4,4) float : The pose of the inertials relative to the link frame.""" return self._origin @origin.setter @@ -1231,38 +1183,34 @@ def origin(self, value): @classmethod def _from_xml(cls, node, path): origin = parse_origin(node) - mass = float(node.find('mass').attrib['value']) - n = node.find('inertia') - xx = float(n.attrib['ixx']) - xy = float(n.attrib['ixy']) - xz = float(n.attrib['ixz']) - yy = float(n.attrib['iyy']) - yz = float(n.attrib['iyz']) - zz = float(n.attrib['izz']) - inertia = np.array([ - [xx, xy, xz], - [xy, yy, yz], - [xz, yz, zz] - ], dtype=np.float64) + mass = float(node.find("mass").attrib["value"]) + n = node.find("inertia") + xx = float(n.attrib["ixx"]) + xy = float(n.attrib["ixy"]) + xz = float(n.attrib["ixz"]) + yy = float(n.attrib["iyy"]) + yz = float(n.attrib["iyz"]) + zz = float(n.attrib["izz"]) + inertia = np.array([[xx, xy, xz], [xy, yy, yz], [xz, yz, zz]], dtype=np.float64) return Inertial(mass=mass, inertia=inertia, origin=origin) def _to_xml(self, parent, path): - node = ET.Element('inertial') + node = ET.Element("inertial") node.append(unparse_origin(self.origin)) - mass = ET.Element('mass') - mass.attrib['value'] = str(self.mass) + mass = ET.Element("mass") + mass.attrib["value"] = str(self.mass) node.append(mass) - inertia = ET.Element('inertia') - inertia.attrib['ixx'] = str(self.inertia[0,0]) - inertia.attrib['ixy'] = str(self.inertia[0,1]) - inertia.attrib['ixz'] = str(self.inertia[0,2]) - inertia.attrib['iyy'] = str(self.inertia[1,1]) - inertia.attrib['iyz'] = str(self.inertia[1,2]) - inertia.attrib['izz'] = str(self.inertia[2,2]) + inertia = ET.Element("inertia") + inertia.attrib["ixx"] = str(self.inertia[0, 0]) + inertia.attrib["ixy"] = str(self.inertia[0, 1]) + inertia.attrib["ixz"] = str(self.inertia[0, 2]) + inertia.attrib["iyy"] = str(self.inertia[1, 1]) + inertia.attrib["iyz"] = str(self.inertia[1, 2]) + inertia.attrib["izz"] = str(self.inertia[2, 2]) node.append(inertia) return node - def copy(self, prefix='', mass=None, origin=None, inertia=None): + def copy(self, prefix="", mass=None, origin=None, inertia=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1305,11 +1253,9 @@ class JointCalibration(URDFType): When the joint moves in a positive direction, this position will trigger a falling edge. """ - _ATTRIBS = { - 'rising': (float, False), - 'falling': (float, False) - } - _TAG = 'calibration' + + _ATTRIBS = {"rising": (float, False), "falling": (float, False)} + _TAG = "calibration" def __init__(self, rising=None, falling=None): self.rising = rising @@ -1317,8 +1263,7 @@ def __init__(self, rising=None, falling=None): @property def rising(self): - """float : description. - """ + """float : description.""" return self._rising @rising.setter @@ -1329,8 +1274,7 @@ def rising(self, value): @property def falling(self): - """float : description. - """ + """float : description.""" return self._falling @falling.setter @@ -1339,7 +1283,7 @@ def falling(self, value): value = float(value) self._falling = value - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1370,11 +1314,12 @@ class JointDynamics(URDFType): The static friction value of the joint (N for prismatic joints, Nm for revolute). """ + _ATTRIBS = { - 'damping': (float, False), - 'friction': (float, False), + "damping": (float, False), + "friction": (float, False), } - _TAG = 'dynamics' + _TAG = "dynamics" def __init__(self, damping, friction): self.damping = damping @@ -1382,8 +1327,7 @@ def __init__(self, damping, friction): @property def damping(self): - """float : The damping value of the joint. - """ + """float : The damping value of the joint.""" return self._damping @damping.setter @@ -1394,8 +1338,7 @@ def damping(self, value): @property def friction(self): - """float : The static friction value of the joint. - """ + """float : The static friction value of the joint.""" return self._friction @friction.setter @@ -1404,7 +1347,7 @@ def friction(self, value): value = float(value) self._friction = value - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1440,12 +1383,12 @@ class JointLimit(URDFType): """ _ATTRIBS = { - 'effort': (float, False), - 'velocity': (float, False), - 'lower': (float, False), - 'upper': (float, False), + "effort": (float, False), + "velocity": (float, False), + "lower": (float, False), + "upper": (float, False), } - _TAG = 'limit' + _TAG = "limit" def __init__(self, effort=None, velocity=None, lower=None, upper=None): self.effort = effort @@ -1455,8 +1398,7 @@ def __init__(self, effort=None, velocity=None, lower=None, upper=None): @property def effort(self): - """float : The maximum joint effort. - """ + """float : The maximum joint effort.""" return self._effort @effort.setter @@ -1465,8 +1407,7 @@ def effort(self, value): @property def velocity(self): - """float : The maximum joint velocity. - """ + """float : The maximum joint velocity.""" return self._velocity @velocity.setter @@ -1475,8 +1416,7 @@ def velocity(self, value): @property def lower(self): - """float : The lower joint limit. - """ + """float : The lower joint limit.""" return self._lower @lower.setter @@ -1487,8 +1427,7 @@ def lower(self, value): @property def upper(self): - """float : The upper joint limit. - """ + """float : The upper joint limit.""" return self._upper @upper.setter @@ -1497,7 +1436,7 @@ def upper(self, value): value = float(value) self._upper = value - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1534,12 +1473,13 @@ class JointMimic(URDFType): offset : float, optional The joint configuration offset. Defaults to 0.0. """ + _ATTRIBS = { - 'joint': (str, True), - 'multiplier': (float, False), - 'offset': (float, False), + "joint": (str, True), + "multiplier": (float, False), + "offset": (float, False), } - _TAG = 'mimic' + _TAG = "mimic" def __init__(self, joint, multiplier=None, offset=None): self.joint = joint @@ -1548,8 +1488,7 @@ def __init__(self, joint, multiplier=None, offset=None): @property def joint(self): - """float : The name of the joint to mimic. - """ + """float : The name of the joint to mimic.""" return self._joint @joint.setter @@ -1558,8 +1497,7 @@ def joint(self, value): @property def multiplier(self): - """float : The multiplier for the joint configuration. - """ + """float : The multiplier for the joint configuration.""" return self._multiplier @multiplier.setter @@ -1572,8 +1510,7 @@ def multiplier(self, value): @property def offset(self): - """float : The offset for the joint configuration - """ + """float : The offset for the joint configuration""" return self._offset @offset.setter @@ -1584,7 +1521,7 @@ def offset(self, value): value = 0.0 self._offset = value - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the joint mimic with the prefix applied to all names. Parameters @@ -1597,11 +1534,7 @@ def copy(self, prefix='', scale=None): :class:`.JointMimic` A deep copy of the joint mimic. """ - return JointMimic( - joint='{}{}'.format(prefix, self.joint), - multiplier=self.multiplier, - offset=self.offset - ) + return JointMimic(joint="{}{}".format(prefix, self.joint), multiplier=self.multiplier, offset=self.offset) class SafetyController(URDFType): @@ -1622,16 +1555,16 @@ class SafetyController(URDFType): The upper joint boundary where the safety controller kicks in. Defaults to 0.0. """ + _ATTRIBS = { - 'k_velocity': (float, True), - 'k_position': (float, False), - 'soft_lower_limit': (float, False), - 'soft_upper_limit': (float, False), + "k_velocity": (float, True), + "k_position": (float, False), + "soft_lower_limit": (float, False), + "soft_upper_limit": (float, False), } - _TAG = 'safety_controller' + _TAG = "safety_controller" - def __init__(self, k_velocity, k_position=None, soft_lower_limit=None, - soft_upper_limit=None): + def __init__(self, k_velocity, k_position=None, soft_lower_limit=None, soft_upper_limit=None): self.k_velocity = k_velocity self.k_position = k_position self.soft_lower_limit = soft_lower_limit @@ -1639,8 +1572,7 @@ def __init__(self, k_velocity, k_position=None, soft_lower_limit=None, @property def soft_lower_limit(self): - """float : The soft lower limit where the safety controller kicks in. - """ + """float : The soft lower limit where the safety controller kicks in.""" return self._soft_lower_limit @soft_lower_limit.setter @@ -1653,8 +1585,7 @@ def soft_lower_limit(self, value): @property def soft_upper_limit(self): - """float : The soft upper limit where the safety controller kicks in. - """ + """float : The soft upper limit where the safety controller kicks in.""" return self._soft_upper_limit @soft_upper_limit.setter @@ -1667,8 +1598,7 @@ def soft_upper_limit(self, value): @property def k_position(self): - """float : A relation between the position and velocity limits. - """ + """float : A relation between the position and velocity limits.""" return self._k_position @k_position.setter @@ -1681,15 +1611,14 @@ def k_position(self, value): @property def k_velocity(self): - """float : A relation between the effort and velocity limits. - """ + """float : A relation between the effort and velocity limits.""" return self._k_velocity @k_velocity.setter def k_velocity(self, value): self._k_velocity = float(value) - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1728,21 +1657,20 @@ class Actuator(URDFType): hardwareInterfaces : list of str, optional The supported hardware interfaces to the actuator. """ + _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } - _TAG = 'actuator' + _TAG = "actuator" - def __init__(self, name, mechanicalReduction=None, - hardwareInterfaces=None): + def __init__(self, name, mechanicalReduction=None, hardwareInterfaces=None): self.name = name self.mechanicalReduction = mechanicalReduction self.hardwareInterfaces = hardwareInterfaces @property def name(self): - """str : The name of this actuator. - """ + """str : The name of this actuator.""" return self._name @name.setter @@ -1751,8 +1679,7 @@ def name(self, value): @property def mechanicalReduction(self): - """str : A specifier for the type of mechanical reduction. - """ + """str : A specifier for the type of mechanical reduction.""" return self._mechanicalReduction @mechanicalReduction.setter @@ -1763,8 +1690,7 @@ def mechanicalReduction(self, value): @property def hardwareInterfaces(self): - """list of str : The supported hardware interfaces. - """ + """list of str : The supported hardware interfaces.""" return self._hardwareInterfaces @hardwareInterfaces.setter @@ -1780,30 +1706,30 @@ def hardwareInterfaces(self, value): @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - mr = node.find('mechanicalReduction') + mr = node.find("mechanicalReduction") if mr is not None: mr = float(mr.text) - kwargs['mechanicalReduction'] = mr - hi = node.findall('hardwareInterface') + kwargs["mechanicalReduction"] = mr + hi = node.findall("hardwareInterface") if len(hi) > 0: hi = [h.text for h in hi] - kwargs['hardwareInterfaces'] = hi + kwargs["hardwareInterfaces"] = hi return Actuator(**kwargs) def _to_xml(self, parent, path): node = self._unparse(path) if self.mechanicalReduction is not None: - mr = ET.Element('mechanicalReduction') + mr = ET.Element("mechanicalReduction") mr.text = str(self.mechanicalReduction) node.append(mr) if len(self.hardwareInterfaces) > 0: for hi in self.hardwareInterfaces: - h = ET.Element('hardwareInterface') + h = ET.Element("hardwareInterface") h.text = hi node.append(h) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the visual with the prefix applied to all names. Parameters @@ -1817,7 +1743,7 @@ def copy(self, prefix='', scale=None): A deep copy of the visual. """ return Actuator( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), mechanicalReduction=self.mechanicalReduction, hardwareInterfaces=self.hardwareInterfaces.copy(), ) @@ -1833,10 +1759,11 @@ class TransmissionJoint(URDFType): hardwareInterfaces : list of str, optional The supported hardware interfaces to the actuator. """ + _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } - _TAG = 'joint' + _TAG = "joint" def __init__(self, name, hardwareInterfaces): self.name = name @@ -1844,8 +1771,7 @@ def __init__(self, name, hardwareInterfaces): @property def name(self): - """str : The name of this transmission joint. - """ + """str : The name of this transmission joint.""" return self._name @name.setter @@ -1854,8 +1780,7 @@ def name(self, value): @property def hardwareInterfaces(self): - """list of str : The supported hardware interfaces. - """ + """list of str : The supported hardware interfaces.""" return self._hardwareInterfaces @hardwareInterfaces.setter @@ -1871,22 +1796,22 @@ def hardwareInterfaces(self, value): @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - hi = node.findall('hardwareInterface') + hi = node.findall("hardwareInterface") if len(hi) > 0: hi = [h.text for h in hi] - kwargs['hardwareInterfaces'] = hi + kwargs["hardwareInterfaces"] = hi return TransmissionJoint(**kwargs) def _to_xml(self, parent, path): node = self._unparse(path) if len(self.hardwareInterfaces) > 0: for hi in self.hardwareInterfaces: - h = ET.Element('hardwareInterface') + h = ET.Element("hardwareInterface") h.text = hi node.append(h) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -1900,7 +1825,7 @@ def copy(self, prefix='', scale=None): A deep copy. """ return TransmissionJoint( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), hardwareInterfaces=self.hardwareInterfaces.copy(), ) @@ -1925,14 +1850,15 @@ class Transmission(URDFType): actuators : list of :class:`.Actuator` The actuators connected to this transmission. """ + _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } _ELEMENTS = { - 'joints': (TransmissionJoint, True, True), - 'actuators': (Actuator, True, True), + "joints": (TransmissionJoint, True, True), + "actuators": (Actuator, True, True), } - _TAG = 'transmission' + _TAG = "transmission" def __init__(self, name, trans_type, joints=None, actuators=None): self.name = name @@ -1942,8 +1868,7 @@ def __init__(self, name, trans_type, joints=None, actuators=None): @property def name(self): - """str : The name of this transmission. - """ + """str : The name of this transmission.""" return self._name @name.setter @@ -1952,8 +1877,7 @@ def name(self, value): @property def trans_type(self): - """str : The type of this transmission. - """ + """str : The type of this transmission.""" return self._trans_type @trans_type.setter @@ -1975,15 +1899,12 @@ def joints(self, value): value = list(value) for v in value: if not isinstance(v, TransmissionJoint): - raise TypeError( - 'Joints expects a list of TransmissionJoint' - ) + raise TypeError("Joints expects a list of TransmissionJoint") self._joints = value @property def actuators(self): - """:class:`.Actuator` : The actuators the transmission is connected to. - """ + """:class:`.Actuator` : The actuators the transmission is connected to.""" return self._actuators @actuators.setter @@ -1994,25 +1915,23 @@ def actuators(self, value): value = list(value) for v in value: if not isinstance(v, Actuator): - raise TypeError( - 'Actuators expects a list of Actuator' - ) + raise TypeError("Actuators expects a list of Actuator") self._actuators = value @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - kwargs['trans_type'] = node.find('type').text + kwargs["trans_type"] = node.find("type").text return Transmission(**kwargs) def _to_xml(self, parent, path): node = self._unparse(path) - ttype = ET.Element('type') + ttype = ET.Element("type") ttype.text = self.trans_type node.append(ttype) return node - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy with the prefix applied to all names. Parameters @@ -2026,7 +1945,7 @@ def copy(self, prefix='', scale=None): A deep copy. """ return Transmission( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), trans_type=self.trans_type, joints=[j.copy(prefix) for j in self.joints], actuators=[a.copy(prefix) for a in self.actuators], @@ -2077,23 +1996,34 @@ class Joint(URDFType): mimic : :class:`JointMimic`, optional Joint mimicry information. """ - TYPES = ['fixed', 'prismatic', 'revolute', - 'continuous', 'floating', 'planar'] + + TYPES = ["fixed", "prismatic", "revolute", "continuous", "floating", "planar"] _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } _ELEMENTS = { - 'dynamics': (JointDynamics, False, False), - 'limit': (JointLimit, False, False), - 'mimic': (JointMimic, False, False), - 'safety_controller': (SafetyController, False, False), - 'calibration': (JointCalibration, False, False), + "dynamics": (JointDynamics, False, False), + "limit": (JointLimit, False, False), + "mimic": (JointMimic, False, False), + "safety_controller": (SafetyController, False, False), + "calibration": (JointCalibration, False, False), } - _TAG = 'joint' - - def __init__(self, name, joint_type, parent, child, axis=None, origin=None, - limit=None, dynamics=None, safety_controller=None, - calibration=None, mimic=None): + _TAG = "joint" + + def __init__( + self, + name, + joint_type, + parent, + child, + axis=None, + origin=None, + limit=None, + dynamics=None, + safety_controller=None, + calibration=None, + mimic=None, + ): self.name = name self.parent = parent self.child = child @@ -2108,8 +2038,7 @@ def __init__(self, name, joint_type, parent, child, axis=None, origin=None, @property def name(self): - """str : Name for this joint. - """ + """str : Name for this joint.""" return self._name @name.setter @@ -2118,21 +2047,19 @@ def name(self, value): @property def joint_type(self): - """str : The type of this joint. - """ + """str : The type of this joint.""" return self._joint_type @joint_type.setter def joint_type(self, value): value = str(value) if value not in Joint.TYPES: - raise ValueError('Unsupported joint type {}'.format(value)) + raise ValueError("Unsupported joint type {}".format(value)) self._joint_type = value @property def parent(self): - """str : The name of the parent link. - """ + """str : The name of the parent link.""" return self._parent @parent.setter @@ -2141,8 +2068,7 @@ def parent(self, value): @property def child(self): - """str : The name of the child link. - """ + """str : The name of the child link.""" return self._child @child.setter @@ -2151,8 +2077,7 @@ def child(self, value): @property def axis(self): - """(3,) float : The joint axis in the joint frame. - """ + """(3,) float : The joint axis in the joint frame.""" return self._axis @axis.setter @@ -2164,7 +2089,7 @@ def axis(self, value): else: value = np.asanyarray(value, dtype=np.float64) if value.shape != (3,): - raise ValueError('Invalid shape for axis, should be (3,)') + raise ValueError("Invalid shape for axis, should be (3,)") value = value / np.linalg.norm(value) self._axis = value @@ -2181,70 +2106,64 @@ def origin(self, value): @property def limit(self): - """:class:`.JointLimit` : The limits for this joint. - """ + """:class:`.JointLimit` : The limits for this joint.""" return self._limit @limit.setter def limit(self, value): if value is None: - if self.joint_type in ['prismatic', 'revolute']: - raise ValueError('Require joint limit for prismatic and ' - 'revolute joints') + if self.joint_type in ["prismatic", "revolute"]: + raise ValueError("Require joint limit for prismatic and " "revolute joints") elif not isinstance(value, JointLimit): - raise TypeError('Expected JointLimit type') + raise TypeError("Expected JointLimit type") self._limit = value @property def dynamics(self): - """:class:`.JointDynamics` : The dynamics for this joint. - """ + """:class:`.JointDynamics` : The dynamics for this joint.""" return self._dynamics @dynamics.setter def dynamics(self, value): if value is not None: if not isinstance(value, JointDynamics): - raise TypeError('Expected JointDynamics type') + raise TypeError("Expected JointDynamics type") self._dynamics = value @property def safety_controller(self): - """:class:`.SafetyController` : The safety controller for this joint. - """ + """:class:`.SafetyController` : The safety controller for this joint.""" return self._safety_controller @safety_controller.setter def safety_controller(self, value): if value is not None: if not isinstance(value, SafetyController): - raise TypeError('Expected SafetyController type') + raise TypeError("Expected SafetyController type") self._safety_controller = value @property def calibration(self): - """:class:`.JointCalibration` : The calibration for this joint. - """ + """:class:`.JointCalibration` : The calibration for this joint.""" return self._calibration @calibration.setter def calibration(self, value): if value is not None: if not isinstance(value, JointCalibration): - raise TypeError('Expected JointCalibration type') + raise TypeError("Expected JointCalibration type") self._calibration = value @property def mimic(self): - """:class:`.JointMimic` : The mimic for this joint. - """ + """:class:`.JointMimic` : The mimic for this joint.""" return self._mimic @mimic.setter def mimic(self, value): if value is not None: if not isinstance(value, JointMimic): - raise TypeError('Expected JointMimic type') + raise TypeError("Expected JointMimic type") self._mimic = value def is_valid(self, cfg): @@ -2260,7 +2179,7 @@ def is_valid(self, cfg): is_valid : bool True if the configuration is valid, and False otherwise. """ - if self.joint_type not in ['fixed', 'revolute']: + if self.joint_type not in ["fixed", "revolute"]: return True if self.joint_limit is None: return True @@ -2271,7 +2190,7 @@ def is_valid(self, cfg): lower = self.limit.lower if self.limit.upper is not None: upper = self.limit.upper - return (cfg >= lower and cfg <= upper) + return cfg >= lower and cfg <= upper def get_child_pose(self, cfg=None): """Computes the child pose relative to a parent pose for a given @@ -2300,45 +2219,43 @@ def get_child_pose(self, cfg=None): """ if cfg is None: return self.origin - elif self.joint_type == 'fixed': + elif self.joint_type == "fixed": return self.origin - elif self.joint_type in ['revolute', 'continuous']: + elif self.joint_type in ["revolute", "continuous"]: if cfg is None: cfg = 0.0 else: cfg = float(cfg) R = trimesh.transformations.rotation_matrix(cfg, self.axis) return self.origin.dot(R) - elif self.joint_type == 'prismatic': + elif self.joint_type == "prismatic": if cfg is None: cfg = 0.0 else: cfg = float(cfg) translation = np.eye(4, dtype=np.float64) - translation[:3,3] = self.axis * cfg + translation[:3, 3] = self.axis * cfg return self.origin.dot(translation) - elif self.joint_type == 'planar': + elif self.joint_type == "planar": if cfg is None: cfg = np.zeros(2, dtype=np.float64) else: cfg = np.asanyarray(cfg, dtype=np.float64) if cfg.shape != (2,): - raise ValueError( - '(2,) float configuration required for planar joints' - ) + raise ValueError("(2,) float configuration required for planar joints") translation = np.eye(4, dtype=np.float64) - translation[:3,3] = self.origin[:3,:2].dot(cfg) + translation[:3, 3] = self.origin[:3, :2].dot(cfg) return self.origin.dot(translation) - elif self.joint_type == 'floating': + elif self.joint_type == "floating": if cfg is None: cfg = np.zeros(6, dtype=np.float64) else: cfg = configure_origin(cfg) if cfg is None: - raise ValueError('Invalid configuration for floating joint') + raise ValueError("Invalid configuration for floating joint") return self.origin.dot(cfg) else: - raise ValueError('Invalid configuration') + raise ValueError("Invalid configuration") def get_child_poses(self, cfg, n_cfgs): """Computes the child pose relative to a parent pose for a given set of @@ -2366,52 +2283,52 @@ def get_child_poses(self, cfg, n_cfgs): """ if cfg is None: return np.tile(self.origin, (n_cfgs, 1, 1)) - elif self.joint_type == 'fixed': + elif self.joint_type == "fixed": return np.tile(self.origin, (n_cfgs, 1, 1)) - elif self.joint_type in ['revolute', 'continuous']: + elif self.joint_type in ["revolute", "continuous"]: if cfg is None: cfg = np.zeros(n_cfgs) return np.matmul(self.origin, self._rotation_matrices(cfg, self.axis)) - elif self.joint_type == 'prismatic': + elif self.joint_type == "prismatic": if cfg is None: cfg = np.zeros(n_cfgs) translation = np.tile(np.eye(4), (n_cfgs, 1, 1)) - translation[:,:3,3] = self.axis * cfg[:,np.newaxis] + translation[:, :3, 3] = self.axis * cfg[:, np.newaxis] return np.matmul(self.origin, translation) - elif self.joint_type == 'planar': + elif self.joint_type == "planar": raise NotImplementedError() - elif self.joint_type == 'floating': + elif self.joint_type == "floating": raise NotImplementedError() else: - raise ValueError('Invalid configuration') + raise ValueError("Invalid configuration") @classmethod def _from_xml(cls, node, path): kwargs = cls._parse(node, path) - kwargs['joint_type'] = str(node.attrib['type']) - kwargs['parent'] = node.find('parent').attrib['link'] - kwargs['child'] = node.find('child').attrib['link'] - axis = node.find('axis') + kwargs["joint_type"] = str(node.attrib["type"]) + kwargs["parent"] = node.find("parent").attrib["link"] + kwargs["child"] = node.find("child").attrib["link"] + axis = node.find("axis") if axis is not None: - axis = np.fromstring(axis.attrib['xyz'], sep=' ') - kwargs['axis'] = axis - kwargs['origin'] = parse_origin(node) + axis = np.fromstring(axis.attrib["xyz"], sep=" ") + kwargs["axis"] = axis + kwargs["origin"] = parse_origin(node) return Joint(**kwargs) def _to_xml(self, parent, path): node = self._unparse(path) - parent = ET.Element('parent') - parent.attrib['link'] = self.parent + parent = ET.Element("parent") + parent.attrib["link"] = self.parent node.append(parent) - child = ET.Element('child') - child.attrib['link'] = self.child + child = ET.Element("child") + child.attrib["link"] = self.child node.append(child) if self.axis is not None: - axis = ET.Element('axis') - axis.attrib['xyz'] = np.array2string(self.axis)[1:-1] + axis = ET.Element("axis") + axis.attrib["xyz"] = np.array2string(self.axis)[1:-1] node.append(axis) node.append(unparse_origin(self.origin)) - node.attrib['type'] = self.joint_type + node.attrib["type"] = self.joint_type return node def _rotation_matrices(self, angles, axis): @@ -2433,21 +2350,20 @@ def _rotation_matrices(self, angles, axis): sina = np.sin(angles) cosa = np.cos(angles) M = np.tile(np.eye(4), (len(angles), 1, 1)) - M[:,0,0] = cosa - M[:,1,1] = cosa - M[:,2,2] = cosa - M[:,:3,:3] += ( - np.tile(np.outer(axis, axis), (len(angles), 1, 1)) * - (1.0 - cosa)[:, np.newaxis, np.newaxis] + M[:, 0, 0] = cosa + M[:, 1, 1] = cosa + M[:, 2, 2] = cosa + M[:, :3, :3] += np.tile(np.outer(axis, axis), (len(angles), 1, 1)) * (1.0 - cosa)[:, np.newaxis, np.newaxis] + M[:, :3, :3] += ( + np.tile( + np.array([[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]]), + (len(angles), 1, 1), + ) + * sina[:, np.newaxis, np.newaxis] ) - M[:,:3,:3] += np.tile(np.array([ - [0.0, -axis[2], axis[1]], - [axis[2], 0.0, -axis[0]], - [-axis[1], axis[0], 0.0]] - ), (len(angles), 1, 1)) * sina[:, np.newaxis, np.newaxis] return M - def copy(self, prefix='', scale=None): + def copy(self, prefix="", scale=None): """Create a deep copy of the joint with the prefix applied to all names. Parameters @@ -2464,20 +2380,19 @@ def copy(self, prefix='', scale=None): if scale is not None: if not isinstance(scale, (list, np.ndarray)): scale = np.repeat(scale, 3) - origin[:3,3] *= scale + origin[:3, 3] *= scale cpy = Joint( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), joint_type=self.joint_type, - parent='{}{}'.format(prefix, self.parent), - child='{}{}'.format(prefix, self.child), + parent="{}{}".format(prefix, self.parent), + child="{}{}".format(prefix, self.child), axis=self.axis.copy(), origin=origin, limit=(self.limit.copy(prefix, scale) if self.limit else None), - dynamics=(self.dynamics.copy(prefix,scale) if self.dynamics else None), - safety_controller=(self.safety_controller.copy(prefix, scale) if - self.safety_controller else None), + dynamics=(self.dynamics.copy(prefix, scale) if self.dynamics else None), + safety_controller=(self.safety_controller.copy(prefix, scale) if self.safety_controller else None), calibration=(self.calibration.copy(prefix, scale) if self.calibration else None), - mimic=(self.mimic.copy(prefix=prefix, scale=scale) if self.mimic else None) + mimic=(self.mimic.copy(prefix=prefix, scale=scale) if self.mimic else None), ) return cpy @@ -2498,14 +2413,14 @@ class Link(URDFType): """ _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } _ELEMENTS = { - 'inertial': (Inertial, False, False), - 'visuals': (Visual, False, True), - 'collisions': (Collision, False, True), + "inertial": (Inertial, False, False), + "visuals": (Visual, False, True), + "collisions": (Collision, False, True), } - _TAG = 'link' + _TAG = "link" def __init__(self, name, inertial, visuals, collisions): self.name = name @@ -2517,8 +2432,7 @@ def __init__(self, name, inertial, visuals, collisions): @property def name(self): - """str : The name of this link. - """ + """str : The name of this link.""" return self._name @name.setter @@ -2527,14 +2441,13 @@ def name(self, value): @property def inertial(self): - """:class:`.Inertial` : Inertial properties of the link. - """ + """:class:`.Inertial` : Inertial properties of the link.""" return self._inertial @inertial.setter def inertial(self, value): if value is not None and not isinstance(value, Inertial): - raise TypeError('Expected Inertial object') + raise TypeError("Expected Inertial object") # Set default inertial if value is None: value = Inertial(mass=1.0, inertia=np.eye(3)) @@ -2542,8 +2455,7 @@ def inertial(self, value): @property def visuals(self): - """list of :class:`.Visual` : The visual properties of this link. - """ + """list of :class:`.Visual` : The visual properties of this link.""" return self._visuals @visuals.setter @@ -2554,13 +2466,12 @@ def visuals(self, value): value = list(value) for v in value: if not isinstance(v, Visual): - raise ValueError('Expected list of Visual objects') + raise ValueError("Expected list of Visual objects") self._visuals = value @property def collisions(self): - """list of :class:`.Collision` : The collision properties of this link. - """ + """list of :class:`.Collision` : The collision properties of this link.""" return self._collisions @collisions.setter @@ -2571,7 +2482,7 @@ def collisions(self, value): value = list(value) for v in value: if not isinstance(v, Collision): - raise ValueError('Expected list of Collision objects') + raise ValueError("Expected list of Collision objects") self._collisions = value @property @@ -2590,16 +2501,16 @@ def collision_mesh(self): if c.geometry.mesh is not None: if c.geometry.mesh.scale is not None: S = np.eye(4) - S[:3,:3] = np.diag(c.geometry.mesh.scale) + S[:3, :3] = np.diag(c.geometry.mesh.scale) pose = pose.dot(S) m.apply_transform(pose) meshes.append(m) if len(meshes) == 0: return None - self._collision_mesh = (meshes[0] + meshes[1:]) + self._collision_mesh = meshes[0] + meshes[1:] return self._collision_mesh - def copy(self, prefix='', scale=None, collision_only=False): + def copy(self, prefix="", scale=None, collision_only=False): """Create a deep copy of the link. Parameters @@ -2619,21 +2530,20 @@ def copy(self, prefix='', scale=None, collision_only=False): sm = np.eye(4) if not isinstance(scale, (list, np.ndarray)): scale = np.repeat(scale, 3) - sm[:3,:3] = np.diag(scale) + sm[:3, :3] = np.diag(scale) cm = self.collision_mesh.copy() cm.density = self.inertial.mass / cm.volume cm.apply_transform(sm) cmm = np.eye(4) - cmm[:3,3] = cm.center_mass - inertial = Inertial(mass=cm.mass, inertia=cm.moment_inertia, - origin=cmm) + cmm[:3, 3] = cm.center_mass + inertial = Inertial(mass=cm.mass, inertia=cm.moment_inertia, origin=cmm) visuals = None if not collision_only: - visuals=[v.copy(prefix=prefix, scale=scale) for v in self.visuals] + visuals = [v.copy(prefix=prefix, scale=scale) for v in self.visuals] cpy = Link( - name='{}{}'.format(prefix, self.name), + name="{}{}".format(prefix, self.name), inertial=inertial, visuals=visuals, collisions=[v.copy(prefix=prefix, scale=scale) for v in self.collisions], @@ -2664,19 +2574,19 @@ class URDF(URDFType): other_xml : str, optional A string containing any extra XML for extensions. """ + _ATTRIBS = { - 'name': (str, True), + "name": (str, True), } _ELEMENTS = { - 'links': (Link, True, True), - 'joints': (Joint, False, True), - 'transmissions': (Transmission, False, True), - 'materials': (Material, False, True), + "links": (Link, True, True), + "joints": (Joint, False, True), + "transmissions": (Transmission, False, True), + "materials": (Material, False, True), } - _TAG = 'robot' + _TAG = "robot" - def __init__(self, name, links, joints=None, - transmissions=None, materials=None, other_xml=None): + def __init__(self, name, links, joints=None, transmissions=None, materials=None, other_xml=None): if joints is None: joints = [] if transmissions is None: @@ -2701,25 +2611,22 @@ def __init__(self, name, links, joints=None, for x in self._links: if x.name in self._link_map: - raise ValueError('Two links with name {} found'.format(x.name)) + raise ValueError("Two links with name {} found".format(x.name)) self._link_map[x.name] = x for x in self._joints: if x.name in self._joint_map: - raise ValueError('Two joints with name {} ' - 'found'.format(x.name)) + raise ValueError("Two joints with name {} " "found".format(x.name)) self._joint_map[x.name] = x for x in self._transmissions: if x.name in self._transmission_map: - raise ValueError('Two transmissions with name {} ' - 'found'.format(x.name)) + raise ValueError("Two transmissions with name {} " "found".format(x.name)) self._transmission_map[x.name] = x for x in self._materials: if x.name in self._material_map: - raise ValueError('Two materials with name {} ' - 'found'.format(x.name)) + raise ValueError("Two materials with name {} " "found".format(x.name)) self._material_map[x.name] = x # Synchronize materials between links and top-level set @@ -2746,9 +2653,7 @@ def __init__(self, name, links, joints=None, self._base_link, self._end_links = self._validate_graph() # Cache the paths to the base link - self._paths_to_base = nx.shortest_path( - self._G, target=self._base_link - ) + self._paths_to_base = nx.shortest_path(self._G, target=self._base_link) self._actuated_joints = self._sort_joints(actuated_joints) @@ -2759,8 +2664,7 @@ def __init__(self, name, links, joints=None, @property def name(self): - """str : The name of the URDF. - """ + """str : The name of the URDF.""" return self._name @name.setter @@ -2849,8 +2753,7 @@ def material_map(self): @property def other_xml(self): - """str : Any extra XML that belongs with the URDF. - """ + """str : Any extra XML that belongs with the URDF.""" return self._other_xml @other_xml.setter @@ -2904,7 +2807,7 @@ def cfg_to_vector(self, cfg): vec[i] = cfg[jn] return vec else: - raise ValueError('Invalid configuration: {}'.format(cfg)) + raise ValueError("Invalid configuration: {}".format(cfg)) @property def base_link(self): @@ -2944,8 +2847,7 @@ def joint_limit_cfgs(self): @property def joint_limits(self): - """(n,2) float : A lower and upper limit for each joint. - """ + """(n,2) float : A lower and upper limit for each joint.""" limits = [] for joint in self.actuated_joints: limit = [-np.infty, np.infty] @@ -3002,8 +2904,7 @@ def link_fk(self, cfg=None, link=None, links=None, use_names=False): elif isinstance(lnk, Link): link_set.add(lnk) else: - raise TypeError('Got object of type {} in links list' - .format(type(lnk))) + raise TypeError("Got object of type {} in links list".format(type(lnk))) else: link_set = self.links @@ -3017,7 +2918,7 @@ def link_fk(self, cfg=None, link=None, links=None, use_names=False): for i in range(len(path) - 1): child = path[i] parent = path[i + 1] - joint = self._G.get_edge_data(child, parent)['joint'] + joint = self._G.get_edge_data(child, parent)["joint"] cfg = None if joint.mimic is not None: @@ -3087,8 +2988,7 @@ def link_fk_batch(self, cfgs=None, link=None, links=None, use_names=False): elif isinstance(lnk, Link): link_set.add(lnk) else: - raise TypeError('Got object of type {} in links list' - .format(type(lnk))) + raise TypeError("Got object of type {} in links list".format(type(lnk))) else: link_set = self.links @@ -3102,7 +3002,7 @@ def link_fk_batch(self, cfgs=None, link=None, links=None, use_names=False): for i in range(len(path) - 1): child = path[i] parent = path[i + 1] - joint = self._G.get_edge_data(child, parent)['joint'] + joint = self._G.get_edge_data(child, parent)["joint"] cfg_vals = None if joint.mimic is not None: @@ -3223,7 +3123,7 @@ def visual_trimesh_fk(self, cfg=None, links=None): if visual.geometry.mesh is not None: if visual.geometry.mesh.scale is not None: S = np.eye(4, dtype=np.float64) - S[:3,:3] = np.diag(visual.geometry.mesh.scale) + S[:3, :3] = np.diag(visual.geometry.mesh.scale) pose = pose.dot(S) fk[mesh] = pose return fk @@ -3261,7 +3161,7 @@ def visual_trimesh_fk_batch(self, cfgs=None, links=None): if visual.geometry.mesh is not None: if visual.geometry.mesh.scale is not None: S = np.eye(4, dtype=np.float64) - S[:3,:3] = np.diag(visual.geometry.mesh.scale) + S[:3, :3] = np.diag(visual.geometry.mesh.scale) poses = np.matmul(poses, S) fk[mesh] = poses return fk @@ -3444,7 +3344,7 @@ def animate(self, cfg_trajectory=None, loop_time=3.0, use_collision=False): ct = cfg_trajectory traj_len = None # Length of the trajectory in steps - ct_np = {} # Numpyified trajectory + ct_np = {} # Numpyified trajectory # If trajectory not specified, articulate between the limits. if ct is None: @@ -3461,18 +3361,17 @@ def animate(self, cfg_trajectory=None, loop_time=3.0, use_collision=False): if traj_len is None: traj_len = len(val) elif traj_len != len(val): - raise ValueError('Trajectories must be same length') + raise ValueError("Trajectories must be same length") ct_np[k] = val elif isinstance(ct, (list, tuple, np.ndarray)): ct = np.asanyarray(ct).astype(np.float64) if ct.ndim == 1: ct = ct.reshape(-1, 1) if ct.ndim != 2 or ct.shape[1] != len(self.actuated_joints): - raise ValueError('Cfg trajectory must have entry for each joint') - ct_np = {j: ct[:,i] for i, j in enumerate(self.actuated_joints)} + raise ValueError("Cfg trajectory must have entry for each joint") + ct_np = {j: ct[:, i] for i, j in enumerate(self.actuated_joints)} else: - raise TypeError('Invalid type for cfg_trajectory: {}' - .format(type(cfg_trajectory))) + raise TypeError("Invalid type for cfg_trajectory: {}".format(type(cfg_trajectory))) # If there isn't a trajectory to render, just show the model and exit if len(ct_np) == 0 or traj_len < 2: @@ -3491,14 +3390,12 @@ def animate(self, cfg_trajectory=None, loop_time=3.0, use_collision=False): # Compute alphas for each time right_inds = np.digitize(times, bins, right=True) right_inds[right_inds == 0] = 1 - alphas = ((bins[right_inds] - times) / - (bins[right_inds] - bins[right_inds - 1])) + alphas = (bins[right_inds] - times) / (bins[right_inds] - bins[right_inds - 1]) # Create the new interpolated trajectory new_ct = {} for k in ct_np: - new_ct[k] = (alphas * ct_np[k][right_inds - 1] + - (1.0 - alphas) * ct_np[k][right_inds]) + new_ct[k] = alphas * ct_np[k][right_inds - 1] + (1.0 - alphas) * ct_np[k][right_inds] # Create the scene if use_collision: @@ -3518,9 +3415,7 @@ def animate(self, cfg_trajectory=None, loop_time=3.0, use_collision=False): blp = self.link_fk(links=[self.base_link])[self.base_link] # Pop the visualizer asynchronously - v = pyrender.Viewer(scene, run_in_thread=True, - use_raymond_lighting=True, - view_center=blp[:3,3]) + v = pyrender.Viewer(scene, run_in_thread=True, use_raymond_lighting=True, view_center=blp[:3, 3]) # Now, run our loop i = 0 @@ -3570,7 +3465,7 @@ def show(self, cfg=None, use_collision=False): scene.add(mesh, pose=pose) pyrender.Viewer(scene, use_raymond_lighting=True) - def copy(self, name=None, prefix='', scale=None, collision_only=False): + def copy(self, name=None, prefix="", scale=None, collision_only=False): """Make a deep copy of the URDF. Parameters @@ -3590,12 +3485,12 @@ def copy(self, name=None, prefix='', scale=None, collision_only=False): The copied URDF. """ return URDF( - name = (name if name else self.name), + name=(name if name else self.name), links=[v.copy(prefix, scale, collision_only) for v in self.links], joints=[v.copy(prefix, scale) for v in self.joints], transmissions=[v.copy(prefix, scale) for v in self.transmissions], materials=[v.copy(prefix, scale) for v in self.materials], - other_xml=self.other_xml + other_xml=self.other_xml, ) def save(self, file_obj): @@ -3621,10 +3516,9 @@ def save(self, file_obj): node = self._to_xml(None, path) tree = ET.ElementTree(node) - tree.write(file_obj, pretty_print=True, - xml_declaration=True, encoding='utf-8') + tree.write(file_obj, pretty_print=True, xml_declaration=True, encoding="utf-8") - def join(self, other, link, origin=None, name=None, prefix=''): + def join(self, other, link, origin=None, name=None, prefix=""): """Join another URDF to this one by rigidly fixturing the two at a link. Parameters @@ -3654,12 +3548,12 @@ def join(self, other, link, origin=None, name=None, prefix=''): link_names = set(myself.link_map.keys()) other_link_names = set(other.link_map.keys()) if len(link_names.intersection(other_link_names)) > 0: - raise ValueError('Cannot merge two URDFs with shared link names') + raise ValueError("Cannot merge two URDFs with shared link names") joint_names = set(myself.joint_map.keys()) other_joint_names = set(other.joint_map.keys()) if len(joint_names.intersection(other_joint_names)) > 0: - raise ValueError('Cannot merge two URDFs with shared joint names') + raise ValueError("Cannot merge two URDFs with shared joint names") links = myself.links + other.links joints = myself.joints + other.joints @@ -3670,20 +3564,20 @@ def join(self, other, link, origin=None, name=None, prefix=''): name = self.name # Create joint that links the two rigidly - joints.append(Joint( - name='{}_join_{}{}_joint'.format(self.name, prefix, other.name), - joint_type='fixed', - parent=link if isinstance(link, str) else link.name, - child=other.base_link.name, - origin=origin - )) + joints.append( + Joint( + name="{}_join_{}{}_joint".format(self.name, prefix, other.name), + joint_type="fixed", + parent=link if isinstance(link, str) else link.name, + child=other.base_link.name, + origin=origin, + ) + ) - return URDF(name=name, links=links, joints=joints, transmissions=transmissions, - materials=materials) + return URDF(name=name, links=links, joints=joints, transmissions=transmissions, materials=materials) def _merge_materials(self): - """Merge the top-level material set with the link materials. - """ + """Merge the top-level material set with the link materials.""" for link in self.links: for v in link.visuals: if v.material is None: @@ -3713,12 +3607,11 @@ def load(file_obj): """ if isinstance(file_obj, six.string_types): if os.path.isfile(file_obj): - parser = ET.XMLParser(remove_comments=True, - remove_blank_text=True) + parser = ET.XMLParser(remove_comments=True, remove_blank_text=True) tree = ET.parse(file_obj, parser=parser) path, _ = os.path.split(file_obj) else: - raise ValueError('{} is not a file'.format(file_obj)) + raise ValueError("{} is not a file".format(file_obj)) else: parser = ET.XMLParser(remove_comments=True, remove_blank_text=True) tree = ET.parse(file_obj, parser=parser) @@ -3744,26 +3637,19 @@ def _validate_joints(self): actuated_joints = [] for joint in self.joints: if joint.parent not in self._link_map: - raise ValueError('Joint {} has invalid parent link name {}' - .format(joint.name, joint.parent)) + raise ValueError("Joint {} has invalid parent link name {}".format(joint.name, joint.parent)) if joint.child not in self._link_map: - raise ValueError('Joint {} has invalid child link name {}' - .format(joint.name, joint.child)) + raise ValueError("Joint {} has invalid child link name {}".format(joint.name, joint.child)) if joint.child == joint.parent: - raise ValueError('Joint {} has matching parent and child' - .format(joint.name)) + raise ValueError("Joint {} has matching parent and child".format(joint.name)) if joint.mimic is not None: if joint.mimic.joint not in self._joint_map: raise ValueError( - 'Joint {} has an invalid mimic joint name {}' - .format(joint.name, joint.mimic.joint) + "Joint {} has an invalid mimic joint name {}".format(joint.name, joint.mimic.joint) ) if joint.mimic.joint == joint.name: - raise ValueError( - 'Joint {} set up to mimic itself' - .format(joint.mimic.joint) - ) - elif joint.joint_type != 'fixed': + raise ValueError("Joint {} set up to mimic itself".format(joint.mimic.joint)) + elif joint.joint_type != "fixed": actuated_joints.append(joint) # Do a depth-first search @@ -3799,8 +3685,7 @@ def _validate_transmissions(self): for t in self.transmissions: for joint in t.joints: if joint.name not in self._joint_map: - raise ValueError('Transmission {} has invalid joint name ' - '{}'.format(t.name, joint.name)) + raise ValueError("Transmission {} has invalid joint name " "{}".format(t.name, joint.name)) def _validate_graph(self): """Raise an exception if the link-joint structure is invalid. @@ -3827,17 +3712,16 @@ def _validate_graph(self): for n in cc: cluster.append(n.name) link_clusters.append(cluster) - message = ('Links are not all connected. ' - 'Connected components are:') + message = "Links are not all connected. " "Connected components are:" for lc in link_clusters: - message += '\n\t' + message += "\n\t" for n in lc: - message += ' {}'.format(n) + message += " {}".format(n) raise ValueError(message) # Check that link graph is acyclic if not nx.is_directed_acyclic_graph(self._G): - raise ValueError('There are cycles in the link graph') + raise ValueError("There are cycles in the link graph") # Ensure that there is exactly one base link, which has no parent base_link = None @@ -3847,8 +3731,7 @@ def _validate_graph(self): if base_link is None: base_link = n else: - raise ValueError('Links {} and {} are both base links!' - .format(n.name, base_link.name)) + raise ValueError("Links {} and {} are both base links!".format(n.name, base_link.name)) if len(nx.ancestors(self._G, n)) == 0: end_links.append(n) return base_link, end_links @@ -3868,12 +3751,11 @@ def _process_cfg(self, cfg): joint_cfg[joint] = cfg[joint] elif isinstance(cfg, (list, tuple, np.ndarray)): if len(cfg) != len(self.actuated_joints): - raise ValueError('Cfg must have same length as actuated joints ' - 'if specified as a numerical array') + raise ValueError("Cfg must have same length as actuated joints " "if specified as a numerical array") for joint, value in zip(self.actuated_joints, cfg): joint_cfg[joint] = value else: - raise TypeError('Invalid type for config') + raise TypeError("Invalid type for config") return joint_cfg def _process_cfgs(self, cfgs): @@ -3883,7 +3765,7 @@ def _process_cfgs(self, cfgs): This should result in a dict mapping each joint to a list of cfg values, one per joint. """ - joint_cfg = {j : [] for j in self.actuated_joints} + joint_cfg = {j: [] for j in self.actuated_joints} n_cfgs = None if isinstance(cfgs, dict): for joint in cfgs: @@ -3907,30 +3789,30 @@ def _process_cfgs(self, cfgs): else: cfgs = np.asanyarray(cfgs, dtype=np.float64) for i, j in enumerate(self.actuated_joints): - joint_cfg[j] = cfgs[:,i] + joint_cfg[j] = cfgs[:, i] else: - raise ValueError('Incorrectly formatted config array') + raise ValueError("Incorrectly formatted config array") for j in joint_cfg: if len(joint_cfg[j]) == 0: joint_cfg[j] = None elif len(joint_cfg[j]) != n_cfgs: - raise ValueError('Inconsistent number of configurations for joints') + raise ValueError("Inconsistent number of configurations for joints") return joint_cfg, n_cfgs @classmethod def _from_xml(cls, node, path): - valid_tags = set(['joint', 'link', 'transmission', 'material']) + valid_tags = set(["joint", "link", "transmission", "material"]) kwargs = cls._parse(node, path) - extra_xml_node = ET.Element('extra') + extra_xml_node = ET.Element("extra") for child in node: if child.tag not in valid_tags: extra_xml_node.append(child) data = ET.tostring(extra_xml_node) - kwargs['other_xml'] = data + kwargs["other_xml"] = data return URDF(**kwargs) def _to_xml(self, parent, path): @@ -3967,11 +3849,14 @@ def rpy_to_matrix(coords): c3, c2, c1 = np.cos(coords) s3, s2, s1 = np.sin(coords) - return np.array([ - [c1 * c2, (c1 * s2 * s3) - (c3 * s1), (s1 * s3) + (c1 * c3 * s2)], - [c2 * s1, (c1 * c3) + (s1 * s2 * s3), (c3 * s1 * s2) - (c1 * s3)], - [-s2, c2 * s3, c2 * c3] - ], dtype=np.float64) + return np.array( + [ + [c1 * c2, (c1 * s2 * s3) - (c3 * s1), (s1 * s3) + (c1 * c3 * s2)], + [c2 * s1, (c1 * c3) + (s1 * s2 * s3), (c3 * s1 * s2) - (c1 * s3)], + [-s2, c2 * s3, c2 * c3], + ], + dtype=np.float64, + ) def matrix_to_rpy(R, solution=1): @@ -4006,21 +3891,21 @@ def matrix_to_rpy(R, solution=1): p = 0.0 y = 0.0 - if np.abs(R[2,0]) >= 1.0 - 1e-12: + if np.abs(R[2, 0]) >= 1.0 - 1e-12: y = 0.0 - if R[2,0] < 0: + if R[2, 0] < 0: p = np.pi / 2 - r = np.arctan2(R[0,1], R[0,2]) + r = np.arctan2(R[0, 1], R[0, 2]) else: p = -np.pi / 2 - r = np.arctan2(-R[0,1], -R[0,2]) + r = np.arctan2(-R[0, 1], -R[0, 2]) else: if solution == 1: - p = -np.arcsin(R[2,0]) + p = -np.arcsin(R[2, 0]) else: - p = np.pi + np.arcsin(R[2,0]) - r = np.arctan2(R[2,1] / np.cos(p), R[2,2] / np.cos(p)) - y = np.arctan2(R[1,0] / np.cos(p), R[0,0] / np.cos(p)) + p = np.pi + np.arcsin(R[2, 0]) + r = np.arctan2(R[2, 1] / np.cos(p), R[2, 2] / np.cos(p)) + y = np.arctan2(R[1, 0] / np.cos(p), R[0, 0] / np.cos(p)) return np.array([r, p, y], dtype=np.float64) @@ -4038,8 +3923,8 @@ def matrix_to_xyz_rpy(matrix): xyz_rpy : (6,) float The xyz_rpy vector. """ - xyz = matrix[:3,3] - rpy = matrix_to_rpy(matrix[:3,:3]) + xyz = matrix[:3, 3] + rpy = matrix_to_rpy(matrix[:3, :3]) return np.hstack((xyz, rpy)) @@ -4057,8 +3942,8 @@ def xyz_rpy_to_matrix(xyz_rpy): The homogenous transform matrix. """ matrix = np.eye(4, dtype=np.float64) - matrix[:3,3] = xyz_rpy[:3] - matrix[:3,:3] = rpy_to_matrix(xyz_rpy[3:]) + matrix[:3, 3] = xyz_rpy[:3] + matrix[:3, :3] = rpy_to_matrix(xyz_rpy[3:]) return matrix @@ -4080,13 +3965,13 @@ def parse_origin(node): child was found. """ matrix = np.eye(4, dtype=np.float64) - origin_node = node.find('origin') + origin_node = node.find("origin") if origin_node is not None: - if 'xyz' in origin_node.attrib: - matrix[:3,3] = np.fromstring(origin_node.attrib['xyz'], sep=' ') - if 'rpy' in origin_node.attrib: - rpy = np.fromstring(origin_node.attrib['rpy'], sep=' ') - matrix[:3,:3] = rpy_to_matrix(rpy) + if "xyz" in origin_node.attrib: + matrix[:3, 3] = np.fromstring(origin_node.attrib["xyz"], sep=" ") + if "rpy" in origin_node.attrib: + rpy = np.fromstring(origin_node.attrib["rpy"], sep=" ") + matrix[:3, :3] = rpy_to_matrix(rpy) return matrix @@ -4109,9 +3994,9 @@ def unparse_origin(matrix): - ``rpy`` - A string with three space-delimited floats representing the rotation of the origin. """ - node = ET.Element('origin') - node.attrib['xyz'] = '{} {} {}'.format(*matrix[:3,3]) - node.attrib['rpy'] = '{} {} {}'.format(*matrix_to_rpy(matrix[:3,:3])) + node = ET.Element("origin") + node.attrib["xyz"] = "{} {} {}".format(*matrix[:3, 3]) + node.attrib["rpy"] = "{} {} {}".format(*matrix_to_rpy(matrix[:3, :3])) return node @@ -4167,14 +4052,14 @@ def load_meshes(filename): if isinstance(meshes, (list, tuple, set)): meshes = list(meshes) if len(meshes) == 0: - raise ValueError('At least one mesh must be pmeshesent in file') + raise ValueError("At least one mesh must be pmeshesent in file") for r in meshes: if not isinstance(r, trimesh.Trimesh): - raise TypeError('Could not load meshes from file') + raise TypeError("Could not load meshes from file") elif isinstance(meshes, trimesh.Trimesh): meshes = [meshes] else: - raise ValueError('Unable to load mesh from file') + raise ValueError("Unable to load mesh from file") return meshes @@ -4199,10 +4084,8 @@ def configure_origin(value): value = np.asanyarray(value, dtype=np.float64) if value.shape == (6,): value = xyz_rpy_to_matrix(value) - elif value.shape != (4,4): - raise ValueError('Origin must be specified as a 4x4 ' - 'homogenous transformation matrix') + elif value.shape != (4, 4): + raise ValueError("Origin must be specified as a 4x4 " "homogenous transformation matrix") else: - raise TypeError('Invalid type for origin, expect 4x4 matrix') + raise TypeError("Invalid type for origin, expect 4x4 matrix") return value - From 4e8765fb87388af41c3186a56b8bf9316ca48701 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 26 Nov 2024 16:06:28 -0800 Subject: [PATCH 25/61] update for newest assets --- omnigibson/robots/locomotion_robot.py | 2 +- omnigibson/robots/r1.py | 4 ++-- omnigibson/robots/tiago.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/omnigibson/robots/locomotion_robot.py b/omnigibson/robots/locomotion_robot.py index 95601c175..a723be8e8 100644 --- a/omnigibson/robots/locomotion_robot.py +++ b/omnigibson/robots/locomotion_robot.py @@ -190,7 +190,7 @@ def non_floor_touching_base_links(self): @property def non_floor_touching_base_link_names(self): - raise [self.base_footprint_link_name] + return [self.base_footprint_link_name] @property def floor_touching_base_links(self): diff --git a/omnigibson/robots/r1.py b/omnigibson/robots/r1.py index 6b660d3cb..21e014a38 100644 --- a/omnigibson/robots/r1.py +++ b/omnigibson/robots/r1.py @@ -223,7 +223,7 @@ def arm_joint_names(self): @property def eef_link_names(self): - return {arm: f"{arm}_hand" for arm in self.arm_names} + return {arm: f"{arm}_eef_link" for arm in self.arm_names} @property def finger_link_names(self): @@ -235,7 +235,7 @@ def finger_joint_names(self): @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/r1/r1.usd") + return os.path.join(gm.ASSET_PATH, "models/r1/r1_pro.usda") @property def curobo_path(self): diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 43614b8b4..4185c8927 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -417,7 +417,7 @@ def arm_link_names(self): @property def eef_link_names(self): - return {arm: "gripper_{}_grasping_frame".format(arm) for arm in self.arm_names} + return {arm: f"{arm}_eef_link" for arm in self.arm_names} @property def finger_link_names(self): @@ -439,7 +439,7 @@ def usd_path(self): # Default variant return os.path.join( - gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford/tiago_dual_omnidirectional_stanford_33.usd" + gm.ASSET_PATH, "models/tiago/tiago.usda" ) @property From 6c2b3177b66e23dbbf10b015874a05e2d90669d5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 27 Nov 2024 00:06:53 +0000 Subject: [PATCH 26/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/robots/tiago.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 4185c8927..7ec03042c 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -438,9 +438,7 @@ def usd_path(self): ) # Default variant - return os.path.join( - gm.ASSET_PATH, "models/tiago/tiago.usda" - ) + return os.path.join(gm.ASSET_PATH, "models/tiago/tiago.usda") @property def curobo_path(self): From 704c14e1543f7f3e1793f0e19cd8fa86fe96a365 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 3 Dec 2024 15:44:28 -0800 Subject: [PATCH 27/61] add option to merge fixed joints, fix collision spheres logic, simplify generated convex hulls to be physx compatible --- .../examples/robots/import_custom_robot.py | 51 ++++++++-------- omnigibson/utils/asset_conversion_utils.py | 58 +++++++++++++++++-- 2 files changed, 78 insertions(+), 31 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 39ec8dcc0..c61775439 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -6,8 +6,8 @@ from pathlib import Path import yaml +from addict import Dict -yaml.Dumper.ignore_aliases = lambda *args: True import click import torch as th from addict import Dict @@ -429,6 +429,18 @@ def set_link_collision_approximation(stage, root_prim, link_name, approximation_ mesh_collision_api.GetApproximationAttr().Set(approximation_type) +def is_articulated_joint(prim): + prim_type = prim.GetPrimTypeInfo().GetTypeName().lower() + return "joint" in prim_type and "fixed" not in prim_type + + +def find_all_articulated_joints(root_prim): + return _find_prims_with_condition( + condition=is_articulated_joint, + root_prim=root_prim, + ) + + def create_curobo_cfgs(robot_prim, curobo_cfg, root_link, save_dir, is_holonomic=False): """ Creates a set of curobo configs based on @robot_prim and @curobo_cfg @@ -453,29 +465,15 @@ def get_joint_upper_limit(root_prim, joint_name): root_prim=robot_prim, ) - all_collision_links = [ - link - for link in all_links - if _find_prim_with_condition( - condition=lambda prim: prim.GetPrimTypeInfo().GetTypeName() == "Mesh" - and prim.HasAPI(lazy.pxr.UsdPhysics.MeshCollisionAPI), - root_prim=link, - ) - is not None - ] - all_collision_link_names = [link.GetName() for link in all_collision_links] - - def is_articulated_joint(prim): - prim_type = prim.GetPrimTypeInfo().GetTypeName().lower() - return "joint" in prim_type and "fixed" not in prim_type + # Generate list of collision link names -- this is simply the list of all link names from the + # collision spheres specification + collision_spheres = curobo_cfg.collision_spheres.to_dict() + all_collision_link_names = list(collision_spheres.keys()) - joint_prims = _find_prims_with_condition( - condition=is_articulated_joint, - root_prim=robot_prim, - ) + joint_prims = find_all_articulated_joints(robot_prim) all_joint_names = [joint_prim.GetName() for joint_prim in joint_prims] retract_cfg = curobo_cfg.default_qpos - lock_joints = dict(curobo_cfg.lock_joints) + lock_joints = curobo_cfg.lock_joints.to_dict() if is_holonomic: # Move the final six joints to the beginning, since the holonomic joints are added at the end all_joint_names = list(reversed(all_joint_names[-6:])) + all_joint_names[:-6] @@ -496,14 +494,14 @@ def is_articulated_joint(prim): "link_names": ee_links[1:], "lock_joints": lock_joints, "extra_links": {}, - "collision_link_names": all_collision_link_names, - "collision_spheres": dict(curobo_cfg.collision_spheres), + "collision_link_names": deepcopy(all_collision_link_names), + "collision_spheres": collision_spheres, "collision_sphere_buffer": 0.002, "extra_collision_spheres": {}, - "self_collision_ignore": dict(curobo_cfg.self_collision_ignore), + "self_collision_ignore": curobo_cfg.self_collision_ignore.to_dict(), "self_collision_buffer": {root_link: 0.02}, "use_global_cumul": True, - "mesh_link_names": all_collision_link_names, + "mesh_link_names": deepcopy(all_collision_link_names), "external_asset_path": None, "cspace": all_joint_names, "retract_config": retract_cfg, @@ -537,7 +535,7 @@ def is_articulated_joint(prim): Path(save_dir).mkdir(parents=True, exist_ok=True) save_fpath = f"{save_dir}/{robot_name}_description_curobo_default.yaml" with open(save_fpath, "w+") as f: - yaml.dump(default_generated_cfg, f, default_flow_style=False) + yaml.dump(default_generated_cfg, f) # Permute the default config to have additional base only / arm only configs # Only relevant if robot is holonomic @@ -585,6 +583,7 @@ def import_custom_robot(config): convex_links=cfg.collision.convex_links, no_decompose_links=cfg.collision.no_decompose_links, visual_only_links=cfg.collision.no_collision_links, + merge_fixed_joints=cfg.merge_fixed_joints, hull_count=cfg.collision.hull_count, overwrite=cfg.overwrite, use_usda=True, diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 1f306bf54..891047e53 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -15,6 +15,7 @@ import torch as th import trimesh +import pymeshlab import omnigibson as og import omnigibson.lazy as lazy @@ -1153,7 +1154,10 @@ def _recursively_replace_list_of_dict(dic): return dic -def _create_urdf_import_config(use_convex_decomposition=False): +def _create_urdf_import_config( + use_convex_decomposition=False, + merge_fixed_joints=False, +): """ Creates and configures a URDF import configuration. @@ -1166,6 +1170,7 @@ def _create_urdf_import_config(use_convex_decomposition=False): Args: use_convex_decomposition (bool): Whether to have omniverse use internal convex decomposition on any collision meshes + merge_fixed_joints (bool): Whether to merge fixed joints or not Returns: import_config: The configured URDF import configuration object. @@ -1176,7 +1181,7 @@ def _create_urdf_import_config(use_convex_decomposition=False): import_config.default_drive_type.__class__ ) # Hacky way to get class for default drive type, options are JOINT_DRIVE_{NONE / POSITION / VELOCITY} - import_config.set_merge_fixed_joints(False) + import_config.set_merge_fixed_joints(merge_fixed_joints) import_config.set_convex_decomp(use_convex_decomposition) import_config.set_fix_base(False) import_config.set_import_inertia_tensor(False) @@ -1199,6 +1204,7 @@ def import_obj_urdf( dataset_root=gm.EXTERNAL_DATASET_PATH, use_omni_convex_decomp=False, use_usda=False, + merge_fixed_joints=False, ): """ Imports an object from a URDF file into the current stage. @@ -1211,6 +1217,7 @@ def import_obj_urdf( use_omni_convex_decomp (bool): Whether to use omniverse's built-in convex decomposer for collision meshes use_usda (bool): If set, will write files to .usda files instead of .usd (bigger memory footprint, but human-readable) + merge_fixed_joints (bool): whether to merge fixed joints or not Returns: str: Absolute path to the imported USD file @@ -1220,7 +1227,10 @@ def import_obj_urdf( urdf_path=urdf_path, obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root ) # Import URDF - cfg = _create_urdf_import_config(use_convex_decomposition=use_omni_convex_decomp) + cfg = _create_urdf_import_config( + use_convex_decomposition=use_omni_convex_decomp, + merge_fixed_joints=merge_fixed_joints, + ) # Check if filepath exists usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.{'usda' if use_usda else 'usd'}" if _SPLIT_COLLISION_MESHES: @@ -1819,6 +1829,32 @@ def make_asset_positive(urdf_fpath, output_suffix="mirror"): return out_file +def simplify_convex_hull(tm, max_faces=64): + """ + Simplifies a convex hull mesh by using quadric edge collapse to reduce the number of faces + + Args: + tm (Trimesh): Trimesh mesh to simply. Should be convex hull + max_faces (int): Maximum number of faces to generate + """ + # If number of faces is less than or equal to @max_faces, simply return directly + if len(tm.faces) <= max_faces: + return tm + + # Use pymeshlab to reduce + ms = pymeshlab.MeshSet() + ms.add_mesh(pymeshlab.Mesh(vertex_matrix=tm.vertices, face_matrix=tm.faces, v_normals_matrix=tm.vertex_normals)) + ms.apply_filter('meshing_decimation_quadric_edge_collapse', targetfacenum=max_faces) + vertices_reduced = ms.current_mesh().vertex_matrix() + faces_reduced = ms.current_mesh().face_matrix() + vertex_normals_reduced = ms.current_mesh().vertex_normal_matrix() + return trimesh.Trimesh( + vertices=vertices_reduced, + faces=faces_reduced, + vertex_normals=vertex_normals_reduced, + ).convex_hull + + def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, discard_not_volume=True): """ Generates a set of collision meshes from a trimesh mesh using CoACD. @@ -1852,6 +1888,7 @@ def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, disca result = coacd.run_coacd( coacd_mesh, max_convex_hull=hull_count, + max_ch_vertex=60, ) # Convert the returned vertices and faces to trimesh meshes @@ -1882,7 +1919,16 @@ def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, disca else: raise ValueError(f"Invalid collision mesh generation method specified: {method}") - return hulls + # Sanity check all convex hulls + # For whatever reason, some convex hulls are not true volumes, so we take the convex hull again + # See https://github.com/mikedh/trimesh/issues/535 + hulls = [hull.convex_hull if not hull.is_volume else hull for hull in hulls] + + # For each hull, simplify so that the complexity is more likely to be Omniverse-GPU compatible + # See https://docs.omniverse.nvidia.com/extensions/latest/ext_physics/rigid-bodies.html#collision-settings + simplified_hulls = [simplify_convex_hull(hull) for hull in hulls] + + return simplified_hulls def get_collision_approximation_for_urdf( @@ -1963,7 +2009,6 @@ def get_collision_approximation_for_urdf( continue else: method = collision_method - collision_meshes = generate_collision_meshes( trimesh_mesh=tm, method=method, @@ -2286,6 +2331,7 @@ def import_og_asset_from_urdf( convex_links=None, no_decompose_links=None, visual_only_links=None, + merge_fixed_joints=False, dataset_root=gm.EXTERNAL_DATASET_PATH, hull_count=32, overwrite=False, @@ -2307,6 +2353,7 @@ def import_og_asset_from_urdf( no_decompose_links (None or list of str): If specified, links that should not have any special collision decomposition applied. This will only use the convex hull visual_only_links (None or list of str): If specified, links that should have no colliders associated with it + merge_fixed_joints (bool): Whether to merge fixed joints or not dataset_root (str): Dataset root directory to use for writing imported USD file. Default is external dataset path set from the global macros hull_count (int): Maximum number of convex hulls to decompose individual visual meshes into. @@ -2372,6 +2419,7 @@ def import_og_asset_from_urdf( dataset_root=dataset_root, use_omni_convex_decomp=False, # We already pre-decomposed the values, so don' use omni convex decomp use_usda=use_usda, + merge_fixed_joints=merge_fixed_joints, ) prim = import_obj_metadata( From 3e61ed8530817f823cee24edf564f582a77a60d6 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 3 Dec 2024 15:44:42 -0800 Subject: [PATCH 28/61] set friction to 0 by default when importing robots --- omnigibson/examples/robots/import_custom_robot.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index c61775439..07fb8b82e 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -639,6 +639,10 @@ def import_custom_robot(config): assert articulation_root_prim is not None, "Could not find any valid articulation root prim!" root_prim_name = articulation_root_prim.GetName() + # Zero out all friction on joints + for joint_prim in find_all_articulated_joints(root_prim=prim): + joint_prim.GetAttribute("physxJoint:jointFriction").Set(0.0) + # Add holonomic base if requested if cfg.base_motion.use_holonomic_joints: # Convert all wheel joints into fixed joints From edceabe32fdfd92248d2097baa287a037a2f50f7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 23:45:18 +0000 Subject: [PATCH 29/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/examples/robots/import_custom_robot.py | 4 +--- omnigibson/utils/asset_conversion_utils.py | 8 ++++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 07fb8b82e..062af0d24 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -5,11 +5,9 @@ from copy import deepcopy from pathlib import Path -import yaml -from addict import Dict - import click import torch as th +import yaml from addict import Dict import omnigibson as og diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 891047e53..36cc0d7df 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -13,9 +13,9 @@ from pathlib import Path from xml.dom import minidom +import pymeshlab import torch as th import trimesh -import pymeshlab import omnigibson as og import omnigibson.lazy as lazy @@ -1155,8 +1155,8 @@ def _recursively_replace_list_of_dict(dic): def _create_urdf_import_config( - use_convex_decomposition=False, - merge_fixed_joints=False, + use_convex_decomposition=False, + merge_fixed_joints=False, ): """ Creates and configures a URDF import configuration. @@ -1844,7 +1844,7 @@ def simplify_convex_hull(tm, max_faces=64): # Use pymeshlab to reduce ms = pymeshlab.MeshSet() ms.add_mesh(pymeshlab.Mesh(vertex_matrix=tm.vertices, face_matrix=tm.faces, v_normals_matrix=tm.vertex_normals)) - ms.apply_filter('meshing_decimation_quadric_edge_collapse', targetfacenum=max_faces) + ms.apply_filter("meshing_decimation_quadric_edge_collapse", targetfacenum=max_faces) vertices_reduced = ms.current_mesh().vertex_matrix() faces_reduced = ms.current_mesh().face_matrix() vertex_normals_reduced = ms.current_mesh().vertex_normal_matrix() From 462b16df8790172d6a1b4a62666024d34c37532c Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 5 Dec 2024 17:05:27 -0800 Subject: [PATCH 30/61] enforce max 60 vertex constraint --- omnigibson/utils/asset_conversion_utils.py | 33 ++++++++++++++++++---- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 891047e53..85c7b0e4e 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1829,22 +1829,45 @@ def make_asset_positive(urdf_fpath, output_suffix="mirror"): return out_file -def simplify_convex_hull(tm, max_faces=64): +def find_all_prim_children_with_type(prim_type, root_prim): + """ + Recursively searches children of @root_prim to find all instances of prim that satisfy type @prim_type + + Args: + prim_type (str): Type of the prim to search + root_prim (Usd.Prim): Root prim to search + + Returns: + list of Usd.Prim: All found prims whose prim type includes @prim_type + """ + found_prims = [] + for child in root_prim.GetChildren(): + if prim_type in child.GetTypeName(): + found_prims.append(child) + found_prims += find_all_prim_children_with_type(prim_type=prim_type, root_prim=child) + + return found_prims + + +def simplify_convex_hull(tm, max_vertices=60): """ Simplifies a convex hull mesh by using quadric edge collapse to reduce the number of faces Args: tm (Trimesh): Trimesh mesh to simply. Should be convex hull - max_faces (int): Maximum number of faces to generate + max_vertices (int): Maximum number of vertices to generate """ # If number of faces is less than or equal to @max_faces, simply return directly - if len(tm.faces) <= max_faces: + if len(tm.vertices) <= max_vertices: return tm # Use pymeshlab to reduce + max_faces = 64 ms = pymeshlab.MeshSet() ms.add_mesh(pymeshlab.Mesh(vertex_matrix=tm.vertices, face_matrix=tm.faces, v_normals_matrix=tm.vertex_normals)) - ms.apply_filter('meshing_decimation_quadric_edge_collapse', targetfacenum=max_faces) + while len(ms.current_mesh().vertex_matrix()) > max_vertices: + ms.apply_filter('meshing_decimation_quadric_edge_collapse', targetfacenum=max_faces) + max_faces -= 2 vertices_reduced = ms.current_mesh().vertex_matrix() faces_reduced = ms.current_mesh().face_matrix() vertex_normals_reduced = ms.current_mesh().vertex_normal_matrix() @@ -1924,7 +1947,7 @@ def generate_collision_meshes(trimesh_mesh, method="coacd", hull_count=32, disca # See https://github.com/mikedh/trimesh/issues/535 hulls = [hull.convex_hull if not hull.is_volume else hull for hull in hulls] - # For each hull, simplify so that the complexity is more likely to be Omniverse-GPU compatible + # For each hull, simplify so that the complexity is guaranteed to be Omniverse-GPU compatible # See https://docs.omniverse.nvidia.com/extensions/latest/ext_physics/rigid-bodies.html#collision-settings simplified_hulls = [simplify_convex_hull(hull) for hull in hulls] From 25331ca349992fc7145b7e6468087b09ba2eeb89 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 5 Dec 2024 17:05:52 -0800 Subject: [PATCH 31/61] verify that all material paths are relative instead of absolute for portability --- omnigibson/prims/material_prim.py | 8 ++-- omnigibson/utils/asset_conversion_utils.py | 49 +++++++++++++++------- 2 files changed, 39 insertions(+), 18 deletions(-) diff --git a/omnigibson/prims/material_prim.py b/omnigibson/prims/material_prim.py index d92135a1b..439b8943d 100644 --- a/omnigibson/prims/material_prim.py +++ b/omnigibson/prims/material_prim.py @@ -184,12 +184,14 @@ def shader_force_populate(self, render=True): assert self._shader is not None asyncio.run(self._load_mdl_parameters(render=render)) - def shader_update_asset_paths_with_root_path(self, root_path): + def shader_update_asset_paths_with_root_path(self, root_path, relative=False): """ Similar to @shader_update_asset_paths, except in this case, root_path is explicitly provided by the caller. Args: - root_path (str): root to be pre-appended to the original asset paths + root_path (str): root directory from which to update shader paths + relative (bool): If set, all paths will be updated as relative paths with respect to @root_path. + Otherwise, @root_path will be pre-appended to the original asset paths """ for inp_name in self.shader_input_names_by_type("SdfAssetPath"): @@ -203,7 +205,7 @@ def shader_update_asset_paths_with_root_path(self, root_path): if original_path == "": continue - new_path = os.path.join(root_path, original_path) + new_path = f"./{os.path.relpath(original_path, root_path)}" if relative else os.path.join(root_path, original_path) self.set_input(inp_name, new_path) def get_input(self, inp): diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 85c7b0e4e..3603521ec 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -22,6 +22,7 @@ import omnigibson.utils.transform_utils as T from omnigibson.macros import gm from omnigibson.objects import DatasetObject +from omnigibson.prims.material_prim import MaterialPrim from omnigibson.scenes import Scene from omnigibson.utils.ui_utils import create_module_logger from omnigibson.utils.urdfpy_utils import URDF @@ -1073,12 +1074,23 @@ def import_obj_metadata(usd_path, obj_category, obj_model, dataset_root, import_ if "glass" in link_tags: _process_glass_link(prim.GetChild(link)) - # Rename model to be named + # Rename model to be named if not already named that old_prim_path = prim.GetPrimPath().pathString - new_prim_path = "/".join(old_prim_path.split("/")[:-1]) + f"/{obj_model}" - lazy.omni.kit.commands.execute("MovePrim", path_from=old_prim_path, path_to=new_prim_path) - - prim = stage.GetDefaultPrim() + if old_prim_path.split("/")[-1] != obj_model: + new_prim_path = "/".join(old_prim_path.split("/")[:-1]) + f"/{obj_model}" + lazy.omni.kit.commands.execute("MovePrim", path_from=old_prim_path, path_to=new_prim_path) + prim = stage.GetDefaultPrim() + + # Hacky way to avoid new prim being created at /World + class DummyScene: + prim_path = "" + + og.sim.render() + mat_prims = find_all_prim_children_with_type(prim_type="Material", root_prim=prim) + for i, mat_prim in enumerate(mat_prims): + mat = MaterialPrim(mat_prim.GetPrimPath().pathString, f"mat{i}") + mat.load(DummyScene) + mat.shader_update_asset_paths_with_root_path(root_path=os.path.dirname(usd_path), relative=True) # Save stage stage.Save() @@ -1220,7 +1232,9 @@ def import_obj_urdf( merge_fixed_joints (bool): whether to merge fixed joints or not Returns: - str: Absolute path to the imported USD file + 2-tuple: + - str: Absolute path to post-processed URDF file used to generate USD + - str: Absolute path to the imported USD file """ # Preprocess input URDF to account for metalinks urdf_path = _add_metalinks_to_urdf( @@ -1246,7 +1260,7 @@ def import_obj_urdf( ) log.debug(f"Imported {obj_category}, {obj_model}") - return usd_path + return urdf_path, usd_path def _pretty_print_xml(current, parent=None, index=-1, depth=0, use_tabs=False): @@ -1567,7 +1581,7 @@ def _add_metalinks_to_urdf(urdf_path, obj_category, obj_model, dataset_root): return _save_xmltree_as_urdf( root_element=root, name=f"{obj_model}_with_metalinks", - dirpath=model_root_path, + dirpath=f"{model_root_path}/urdf", unique_urdf=False, ) @@ -2088,9 +2102,9 @@ def get_collision_approximation_for_urdf( ) -def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False): +def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, suffix="original", overwrite=False): # Create a directory for the object - obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl + obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl / "urdf" if not overwrite: assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!" obj_dir.mkdir(parents=True, exist_ok=True) @@ -2114,7 +2128,7 @@ def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATA # Export this URDF return _save_xmltree_as_urdf( root_element=root, - name=mdl, + name=f"{mdl}_{suffix}", dirpath=obj_dir, unique_urdf=False, ) @@ -2398,15 +2412,17 @@ def import_og_asset_from_urdf( category=category, mdl=model, dataset_root=dataset_root, + suffix="original", overwrite=overwrite, ) else: # Verify that the object exists at the expected location - # This is /objects///.urdf - urdf_path = os.path.join(dataset_root, "objects", category, model, f"{model}.urdf") + # This is /objects///urdf/_original.urdf + urdf_path = os.path.join(dataset_root, "objects", category, model, "urdf", f"{model}_original.urdf") assert os.path.exists(urdf_path), f"Expected urdf at dataset location {urdf_path}, but none was found!" # Make sure all scaling is positive + model_dir = os.path.join(dataset_root, "objects", category, model) urdf_path = make_asset_positive(urdf_fpath=urdf_path) # Update collisions if requested @@ -2426,7 +2442,7 @@ def import_og_asset_from_urdf( print("Recording object metadata from URDF...") record_obj_metadata_from_urdf( urdf_path=urdf_path, - obj_dir=os.path.dirname(urdf_path), + obj_dir=model_dir, joint_setting="zero", overwrite=overwrite, ) @@ -2435,7 +2451,7 @@ def import_og_asset_from_urdf( print("Converting obj URDF to USD...") og.launch() assert len(og.sim.scenes) == 0 - usd_path = import_obj_urdf( + urdf_path, usd_path = import_obj_urdf( urdf_path=urdf_path, obj_category=category, obj_model=model, @@ -2445,6 +2461,9 @@ def import_og_asset_from_urdf( merge_fixed_joints=merge_fixed_joints, ) + # Copy metalinks URDF to original name of object model + shutil.copy2(urdf_path, os.path.join(dataset_root, "objects", category, model, "urdf", f"{model}.urdf")) + prim = import_obj_metadata( usd_path=usd_path, obj_category=category, From 12af1155883248499a7f8de377f694e7598a8115 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 01:06:56 +0000 Subject: [PATCH 32/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/prims/material_prim.py | 4 +++- omnigibson/utils/asset_conversion_utils.py | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/omnigibson/prims/material_prim.py b/omnigibson/prims/material_prim.py index 439b8943d..3098658a3 100644 --- a/omnigibson/prims/material_prim.py +++ b/omnigibson/prims/material_prim.py @@ -205,7 +205,9 @@ def shader_update_asset_paths_with_root_path(self, root_path, relative=False): if original_path == "": continue - new_path = f"./{os.path.relpath(original_path, root_path)}" if relative else os.path.join(root_path, original_path) + new_path = ( + f"./{os.path.relpath(original_path, root_path)}" if relative else os.path.join(root_path, original_path) + ) self.set_input(inp_name, new_path) def get_input(self, inp): diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index d5ea31d80..b7bac6498 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1880,7 +1880,7 @@ def simplify_convex_hull(tm, max_vertices=60): ms = pymeshlab.MeshSet() ms.add_mesh(pymeshlab.Mesh(vertex_matrix=tm.vertices, face_matrix=tm.faces, v_normals_matrix=tm.vertex_normals)) while len(ms.current_mesh().vertex_matrix()) > max_vertices: - ms.apply_filter('meshing_decimation_quadric_edge_collapse', targetfacenum=max_faces) + ms.apply_filter("meshing_decimation_quadric_edge_collapse", targetfacenum=max_faces) max_faces -= 2 vertices_reduced = ms.current_mesh().vertex_matrix() faces_reduced = ms.current_mesh().face_matrix() @@ -2102,7 +2102,9 @@ def get_collision_approximation_for_urdf( ) -def copy_urdf_to_dataset(urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, suffix="original", overwrite=False): +def copy_urdf_to_dataset( + urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, suffix="original", overwrite=False +): # Create a directory for the object obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl / "urdf" if not overwrite: From 3e5cb80055a77fa0ee87a77243c216941f17b575 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 9 Dec 2024 17:10:29 -0800 Subject: [PATCH 33/61] update all robot classes for refactored assets --- omnigibson/robots/a1.py | 4 -- omnigibson/robots/fetch.py | 61 +---------------- omnigibson/robots/franka.py | 8 --- omnigibson/robots/franka_mounted.py | 9 --- omnigibson/robots/freight.py | 21 ------ omnigibson/robots/husky.py | 8 --- omnigibson/robots/locobot.py | 20 ------ omnigibson/robots/manipulation_robot.py | 26 +++++-- omnigibson/robots/r1.py | 31 --------- omnigibson/robots/robot_base.py | 34 +++------ omnigibson/robots/stretch.py | 91 ++----------------------- omnigibson/robots/tiago.py | 59 ---------------- omnigibson/robots/turtlebot.py | 34 --------- omnigibson/robots/vx300s.py | 36 +--------- 14 files changed, 39 insertions(+), 403 deletions(-) diff --git a/omnigibson/robots/a1.py b/omnigibson/robots/a1.py index 1b3e233c2..4e5ba4e5d 100644 --- a/omnigibson/robots/a1.py +++ b/omnigibson/robots/a1.py @@ -191,10 +191,6 @@ def finger_joint_names(self): def usd_path(self): return os.path.join(gm.ASSET_PATH, f"models/a1/{self.model_name}.usd") - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, f"models/a1/{self.model_name}_description.yaml")} - @property def urdf_path(self): return os.path.join(gm.ASSET_PATH, f"models/a1/{self.model_name}.urdf") diff --git a/omnigibson/robots/fetch.py b/omnigibson/robots/fetch.py index 0fd6496b2..3a789dfa9 100644 --- a/omnigibson/robots/fetch.py +++ b/omnigibson/robots/fetch.py @@ -174,24 +174,6 @@ def default_arm_poses(self): "horizontal": th.tensor([-1.43016, 0.20965, 1.86816, 1.77576, -0.27289, 1.31715, 2.01226]), } - def _post_load(self): - super()._post_load() - - # Set the wheels back to using sphere approximations - for wheel_name in ["l_wheel_link", "r_wheel_link"]: - log.warning( - "Fetch wheel links are post-processed to use sphere approximation collision meshes. " - "Please ignore any previous errors about these collision meshes." - ) - wheel_link = self.links[wheel_name] - assert set(wheel_link.collision_meshes) == {"collisions"}, "Wheel link should only have 1 collision!" - wheel_link.collision_meshes["collisions"].set_collision_approximation("boundingSphere") - - # Also apply a convex decomposition to the torso lift link - torso_lift_link = self.links["torso_lift_link"] - assert set(torso_lift_link.collision_meshes) == {"collisions"}, "torso link should only have 1 collision!" - torso_lift_link.collision_meshes["collisions"].set_collision_approximation("convexDecomposition") - @property def discrete_action_list(self): raise NotImplementedError() @@ -248,29 +230,6 @@ def assisted_grasp_end_points(self): ] } - @property - def disabled_collision_pairs(self): - return [ - ["torso_lift_link", "shoulder_lift_link"], - ["torso_lift_link", "torso_fixed_link"], - ["torso_lift_link", "estop_link"], - ["base_link", "laser_link"], - ["base_link", "torso_fixed_link"], - ["base_link", "l_wheel_link"], - ["base_link", "r_wheel_link"], - ["base_link", "estop_link"], - ["torso_lift_link", "shoulder_pan_link"], - ["torso_lift_link", "head_pan_link"], - ["head_pan_link", "head_tilt_link"], - ["shoulder_pan_link", "shoulder_lift_link"], - ["shoulder_lift_link", "upperarm_roll_link"], - ["upperarm_roll_link", "elbow_flex_link"], - ["elbow_flex_link", "forearm_roll_link"], - ["forearm_roll_link", "wrist_flex_link"], - ["wrist_flex_link", "wrist_roll_link"], - ["wrist_roll_link", "gripper_link"], - ] - @property def base_joint_names(self): return ["l_wheel_joint", "r_wheel_joint"] @@ -296,7 +255,7 @@ def manipulation_link_names(self): "forearm_roll_link", "wrist_flex_link", "wrist_roll_link", - "gripper_link", + "eef_link", "l_gripper_finger_link", "r_gripper_finger_link", ] @@ -331,7 +290,7 @@ def arm_joint_names(self): @property def eef_link_names(self): - return {self.default_arm: "gripper_link"} + return {self.default_arm: "eef_link"} @property def finger_link_names(self): @@ -341,26 +300,10 @@ def finger_link_names(self): def finger_joint_names(self): return {self.default_arm: ["r_gripper_finger_joint", "l_gripper_finger_joint"]} - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/fetch/fetch/fetch.usd") - - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/fetch/fetch_descriptor.yaml")} - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/fetch/fetch.urdf") - @property def arm_workspace_range(self): return {self.default_arm: th.deg2rad(th.tensor([-45, 45], dtype=th.float32))} - @property - def eef_usd_path(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/fetch/fetch/fetch_eef.usd")} - @property def teleop_rotation_offset(self): return {self.default_arm: euler2quat(th.tensor([0, math.pi / 2, math.pi], dtype=th.float32))} diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index 358d4c2bd..25334de49 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -255,10 +255,6 @@ def finger_joint_names(self): def usd_path(self): return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}_description.yaml")} - @property def urdf_path(self): return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") @@ -275,10 +271,6 @@ def curobo_path(self): def curobo_attached_object_link_names(self): return {self._eef_link_names: "attached_object"} - @property - def eef_usd_path(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}_eef.usd")} - @property def teleop_rotation_offset(self): return {self.default_arm: self._teleop_rotation_offset} diff --git a/omnigibson/robots/franka_mounted.py b/omnigibson/robots/franka_mounted.py index 566720833..c26d0a49a 100644 --- a/omnigibson/robots/franka_mounted.py +++ b/omnigibson/robots/franka_mounted.py @@ -29,10 +29,6 @@ def finger_lengths(self): def usd_path(self): return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.usd") - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted_description.yaml")} - @property def urdf_path(self): return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.urdf") @@ -41,11 +37,6 @@ def urdf_path(self): def curobo_path(self): return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted_description_curobo.yaml") - @property - def eef_usd_path(self): - # TODO: Update! - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/franka/franka_panda_eef.usd")} - @property def assisted_grasp_start_points(self): return { diff --git a/omnigibson/robots/freight.py b/omnigibson/robots/freight.py index 0b3df7ff4..1f7608791 100644 --- a/omnigibson/robots/freight.py +++ b/omnigibson/robots/freight.py @@ -16,19 +16,6 @@ class Freight(TwoWheelRobot): Uses joint velocity control """ - def _post_load(self): - super()._post_load() - - # Set the wheels back to using sphere approximations - for wheel_name in ["l_wheel_link", "r_wheel_link"]: - log.warning( - "Freight wheel links are post-processed to use sphere approximation collision meshes. " - "Please ignore any previous errors about these collision meshes." - ) - wheel_link = self.links[wheel_name] - assert set(wheel_link.collision_meshes) == {"collisions"}, "Wheel link should only have 1 collision!" - wheel_link.collision_meshes["collisions"].set_collision_approximation("boundingSphere") - @property def wheel_radius(self): return 0.0613 @@ -44,11 +31,3 @@ def base_joint_names(self): @property def _default_joint_pos(self): return th.zeros(self.n_joints) - - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/fetch/freight/freight.usd") - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/fetch/freight.urdf") diff --git a/omnigibson/robots/husky.py b/omnigibson/robots/husky.py index fe036e452..f04e5d9bf 100644 --- a/omnigibson/robots/husky.py +++ b/omnigibson/robots/husky.py @@ -30,11 +30,3 @@ def base_joint_names(self): @property def _default_joint_pos(self): return th.zeros(self.n_joints) - - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/husky/husky/husky.usd") - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/husky/husky.urdf") diff --git a/omnigibson/robots/locobot.py b/omnigibson/robots/locobot.py index ff753c6fa..a80ee6421 100644 --- a/omnigibson/robots/locobot.py +++ b/omnigibson/robots/locobot.py @@ -27,23 +27,3 @@ def base_joint_names(self): @property def _default_joint_pos(self): return th.zeros(self.n_joints) - - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/locobot/locobot/locobot.usd") - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/locobot/locobot.urdf") - - @property - def disabled_collision_pairs(self): - # badly modeled gripper collision meshes - return [ - ["base_link", "arm_base_link"], - ["base_link", "plate_2"], - ["cam_mount", "forearm_link"], - ["cam_mount", "elbow_link"], - ["cam_mount", "plate_1"], - ["cam_mount", "plate_2"], - ] diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index f3f6d55ff..9a6709135 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -5,6 +5,7 @@ import networkx as nx import torch as th +import os import omnigibson as og import omnigibson.lazy as lazy @@ -974,13 +975,30 @@ def _freeze_gripper(self, arm="default"): joint.set_vel(vel=0.0) @property - def robot_arm_descriptor_yamls(self): + def curobo_path(self): """ Returns: - dict: Dictionary mapping arm appendage name to files path to the descriptor - of the robot for IK Controller. + str or Dict[CuroboEmbodimentSelection, str]: file path to the robot curobo file or a mapping from + CuroboEmbodimentSelection to the file path """ - raise NotImplementedError + # Import here to avoid circular imports + from omnigibson.action_primitives.curobo import CuroboEmbodimentSelection + + # By default, sets the standardized path + model = self.model_name.lower() + return { + emb_sel: os.path.join(gm.ASSET_PATH, f"models/{model}/curobo/{model}_description_curobo_{emb_sel.value}.yaml") + for emb_sel in CuroboEmbodimentSelection + } + + @property + def curobo_attached_object_link_names(self): + """ + Returns: + Dict[str, str]: mapping from robot eef link names to the link names of the attached objects + """ + # By default, sets the standardized path + return {eef_link_name: f"attached_object_{eef_link_name}" for eef_link_name in self.eef_link_names.values()} @property def _default_arm_joint_controller_configs(self): diff --git a/omnigibson/robots/r1.py b/omnigibson/robots/r1.py index 21e014a38..34180d5e1 100644 --- a/omnigibson/robots/r1.py +++ b/omnigibson/robots/r1.py @@ -233,41 +233,10 @@ def finger_link_names(self): def finger_joint_names(self): return {arm: [f"{arm}_gripper_axis{i}" for i in range(1, 3)] for arm in self.arm_names} - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/r1/r1_pro.usda") - - @property - def curobo_path(self): - return { - emb_sel: os.path.join(gm.ASSET_PATH, f"models/r1/r1_description_curobo_{emb_sel.value}.yaml") - for emb_sel in CuroboEmbodimentSelection - } - - @property - def curobo_attached_object_link_names(self): - return {eef_link_name: f"attached_object_{eef_link_name}" for eef_link_name in self.eef_link_names.values()} - - @property - def robot_arm_descriptor_yamls(self): - descriptor_yamls = { - arm: os.path.join(gm.ASSET_PATH, f"models/r1/r1_{arm}_descriptor.yaml") for arm in self.arm_names - } - descriptor_yamls["combined"]: os.path.join(gm.ASSET_PATH, "models/r1/r1_combined_descriptor.yaml") - return descriptor_yamls - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/r1/r1.urdf") - @property def arm_workspace_range(self): return {arm: [th.deg2rad(-45), th.deg2rad(45)] for arm in self.arm_names} - @property - def eef_usd_path(self): - return {arm: os.path.join(gm.ASSET_PATH, "models/r1/r1_eef.usd") for arm in self.arm_names} - @property def disabled_collision_pairs(self): # badly modeled gripper collision meshes diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 19baf7ef7..8df0d29c1 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -1,12 +1,11 @@ -from abc import abstractmethod +import os from copy import deepcopy import torch as th import omnigibson.utils.transform_utils as T -from omnigibson.macros import create_module_macros +from omnigibson.macros import create_module_macros, gm from omnigibson.objects.controllable_object import ControllableObject -from omnigibson.objects.object_base import BaseObject from omnigibson.objects.usd_object import USDObject from omnigibson.sensors import ( ALL_SENSOR_MODALITIES, @@ -22,7 +21,6 @@ from omnigibson.utils.usd_utils import ( ControllableObjectViewAPI, absolute_prim_path_to_scene_relative, - add_asset_to_stage, ) from omnigibson.utils.vision_utils import segmentation_to_rgb @@ -573,11 +571,10 @@ def model_name(self): return self.__class__.__name__ @property - @abstractmethod def usd_path(self): - # For all robots, this must be specified a priori, before we actually initialize the USDObject constructor! - # So we override the parent implementation, and make this an abstract method - raise NotImplementedError + # By default, sets the standardized path + model = self.model_name.lower() + return os.path.join(gm.ASSET_PATH, f"models/{model}/usd/{model}.usda") @property def urdf_path(self): @@ -585,24 +582,9 @@ def urdf_path(self): Returns: str: file path to the robot urdf file. """ - raise NotImplementedError - - @property - def curobo_path(self): - """ - Returns: - str or Dict[CuroboEmbodimentSelection, str]: file path to the robot curobo file or a mapping from - CuroboEmbodimentSelection to the file path - """ - raise NotImplementedError - - @property - def curobo_attached_object_link_names(self): - """ - Returns: - Dict[str, str]: mapping from robot eef link names to the link names of the attached objects - """ - raise NotImplementedError + # By default, sets the standardized path + model = self.model_name.lower() + return os.path.join(gm.ASSET_PATH, f"models/{model}/urdf/{model}.urdf") @classproperty def _do_not_register_classes(cls): diff --git a/omnigibson/robots/stretch.py b/omnigibson/robots/stretch.py index b2c5c38fc..e8e88a3cb 100644 --- a/omnigibson/robots/stretch.py +++ b/omnigibson/robots/stretch.py @@ -18,19 +18,6 @@ class Stretch(ManipulationRobot, TwoWheelRobot, ActiveCameraRobot): Reference: https://hello-robot.com/stretch-3-product """ - def _post_load(self): - super()._post_load() - - # Set the wheels back to using sphere approximations - for wheel_name in ["link_left_wheel", "link_right_wheel"]: - log.warning( - "Stretch wheel links are post-processed to use sphere approximation collision meshes. " - "Please ignore any previous errors about these collision meshes." - ) - wheel_link = self.links[wheel_name] - assert set(wheel_link.collision_meshes) == {"collisions"}, "Wheel link should only have 1 collision!" - wheel_link.collision_meshes["collisions"].set_collision_approximation("boundingSphere") - @property def discrete_action_list(self): raise NotImplementedError() @@ -76,8 +63,8 @@ def finger_lengths(self): def assisted_grasp_start_points(self): return { self.default_arm: [ - GraspingPoint(link_name="link_gripper_fingertip_right", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_fingertip_right", position=th.tensor([-0.01, 0.0, 0.009])), + GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), + GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), ] } @@ -85,69 +72,22 @@ def assisted_grasp_start_points(self): def assisted_grasp_end_points(self): return { self.default_arm: [ - GraspingPoint(link_name="link_gripper_fingertip_left", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_fingertip_left", position=th.tensor([-0.01, 0.0, 0.009])), + GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), + GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), ] } @property def disabled_collision_pairs(self): return [ - ["base_link", "caster_link"], - ["base_link", "link_aruco_left_base"], - ["base_link", "link_aruco_right_base"], - ["base_link", "base_imu"], - ["base_link", "laser"], - ["base_link", "link_left_wheel"], - ["base_link", "link_right_wheel"], - ["base_link", "link_mast"], - ["link_mast", "link_head"], - ["link_head", "link_head_pan"], - ["link_head_pan", "link_head_tilt"], - ["camera_link", "link_head_tilt"], - ["camera_link", "link_head_pan"], - ["link_head_nav_cam", "link_head_tilt"], - ["link_head_nav_cam", "link_head_pan"], - ["link_mast", "link_lift"], - ["link_lift", "link_aruco_shoulder"], - ["link_lift", "link_arm_l4"], ["link_lift", "link_arm_l3"], ["link_lift", "link_arm_l2"], ["link_lift", "link_arm_l1"], - ["link_arm_l4", "link_arm_l3"], - ["link_arm_l4", "link_arm_l2"], - ["link_arm_l4", "link_arm_l1"], - ["link_arm_l4", "link_aruco_inner_wrist"], - ["link_arm_l3", "link_arm_l2"], + ["link_lift", "link_arm_l0"], ["link_arm_l3", "link_arm_l1"], - ["link_arm_l3", "link_aruco_top_wrist"], - ["link_arm_l3", "link_aruco_inner_wrist"], - ["link_arm_l2", "link_arm_l1"], - ["link_arm_l2", "link_aruco_top_wrist"], - ["link_arm_l2", "link_aruco_inner_wrist"], ["link_arm_l0", "link_arm_l1"], ["link_arm_l0", "link_arm_l2"], ["link_arm_l0", "link_arm_l3"], - ["link_arm_l0", "link_arm_l4"], - ["link_arm_l0", "link_arm_l1"], - ["link_arm_l0", "link_aruco_inner_wrist"], - ["link_arm_l0", "link_aruco_top_wrist"], - ["link_arm_l0", "link_wrist_yaw"], - ["link_arm_l0", "link_wrist_yaw_bottom"], - ["link_arm_l0", "link_wrist_pitch"], - ["link_wrist_yaw_bottom", "link_wrist_pitch"], - ["link_wrist_yaw_bottom", "link_arm_l4"], - ["link_wrist_yaw_bottom", "link_arm_l3"], - ["gripper_camera_link", "link_gripper_s3_body"], - ["link_gripper_s3_body", "link_aruco_d405"], - ["link_gripper_s3_body", "link_gripper_finger_left"], - ["link_gripper_finger_left", "link_aruco_fingertip_left"], - ["link_gripper_finger_left", "link_gripper_fingertip_left"], - ["link_gripper_s3_body", "link_gripper_finger_right"], - ["link_gripper_finger_right", "link_aruco_fingertip_right"], - ["link_gripper_finger_right", "link_gripper_fingertip_right"], - ["respeaker_base", "link_head"], - ["respeaker_base", "link_mast"], ] @property @@ -162,17 +102,12 @@ def camera_joint_names(self): def arm_link_names(self): return { self.default_arm: [ - "link_mast", "link_lift", - "link_arm_l4", "link_arm_l3", "link_arm_l2", "link_arm_l1", "link_arm_l0", - "link_aruco_inner_wrist", - "link_aruco_top_wrist", "link_wrist_yaw", - "link_wrist_yaw_bottom", "link_wrist_pitch", "link_wrist_roll", ] @@ -195,7 +130,7 @@ def arm_joint_names(self): @property def eef_link_names(self): - return {self.default_arm: "link_grasp_center"} + return {self.default_arm: "eef_link"} @property def finger_link_names(self): @@ -203,23 +138,9 @@ def finger_link_names(self): self.default_arm: [ "link_gripper_finger_left", "link_gripper_finger_right", - "link_gripper_fingertip_left", - "link_gripper_fingertip_right", ] } @property def finger_joint_names(self): return {self.default_arm: ["joint_gripper_finger_right", "joint_gripper_finger_left"]} - - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/stretch/stretch/stretch.usd") - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/stretch/stretch.urdf") - - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/stretch/stretch_descriptor.yaml")} diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 7ec03042c..c5c7ea5b3 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -387,7 +387,6 @@ def manipulation_link_names(self): "wrist_left_ft_link", "wrist_left_ft_tool_link", "gripper_left_link", - # "gripper_left_grasping_frame", "gripper_left_left_finger_link", "gripper_left_right_finger_link", "gripper_left_tool_link", @@ -402,7 +401,6 @@ def manipulation_link_names(self): "wrist_right_ft_link", "wrist_right_ft_tool_link", "gripper_right_link", - # "gripper_right_grasping_frame", "gripper_right_left_finger_link", "gripper_right_right_finger_link", "gripper_right_tool_link", @@ -429,66 +427,9 @@ def finger_joint_names(self): arm: [f"gripper_{arm}_right_finger_joint", f"gripper_{arm}_left_finger_joint"] for arm in self.arm_names } - @property - def usd_path(self): - if self._variant == "wrist_cam": - return os.path.join( - gm.ASSET_PATH, - "models/tiago/tiago_dual_omnidirectional_stanford/tiago_dual_omnidirectional_stanford_33_with_wrist_cam.usd", - ) - - # Default variant - return os.path.join(gm.ASSET_PATH, "models/tiago/tiago.usda") - - @property - def curobo_path(self): - return { - emb_sel: os.path.join(gm.ASSET_PATH, f"models/tiago/tiago_description_curobo_{emb_sel.value}.yaml") - for emb_sel in CuroboEmbodimentSelection - } - - @property - def curobo_attached_object_link_names(self): - return {eef_link_name: f"attached_object_{eef_link_name}" for eef_link_name in self.eef_link_names.values()} - - @property - def simplified_mesh_usd_path(self): - # TODO: How can we make this more general - maybe some automatic way to generate these? - return os.path.join( - gm.ASSET_PATH, - "models/tiago/tiago_dual_omnidirectional_stanford/tiago_dual_omnidirectional_stanford_33_simplified_collision_mesh.usd", - ) - - @property - def robot_arm_descriptor_yamls(self): - # TODO: Remove the need to do this by making the arm descriptor yaml files generated automatically - return { - "left": os.path.join( - gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_left_arm_descriptor.yaml" - ), - "left_fixed": os.path.join( - gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_left_arm_fixed_trunk_descriptor.yaml" - ), - "right": os.path.join( - gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_right_arm_fixed_trunk_descriptor.yaml" - ), - "combined": os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford.yaml"), - } - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford.urdf") - @property def arm_workspace_range(self): return { "left": th.deg2rad(th.tensor([15, 75], dtype=th.float32)), "right": th.deg2rad(th.tensor([-75, -15], dtype=th.float32)), } - - @property - def eef_usd_path(self): - return { - arm: os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford/tiago_eef.usd") - for arm in self.arm_names - } diff --git a/omnigibson/robots/turtlebot.py b/omnigibson/robots/turtlebot.py index f545103c4..6dc053ed9 100644 --- a/omnigibson/robots/turtlebot.py +++ b/omnigibson/robots/turtlebot.py @@ -28,37 +28,3 @@ def base_joint_names(self): @property def _default_joint_pos(self): return th.zeros(self.n_joints) - - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/turtlebot/turtlebot/turtlebot.usd") - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/turtlebot/turtlebot.urdf") - - @property - def disabled_collision_pairs(self): - # badly modeled gripper collision meshes - return [ - ["plate_bottom_link", "pole_middle_0_link"], - ["plate_bottom_link", "pole_middle_2_link"], - ["plate_middle_link", "pole_top_3_link"], - ["plate_middle_link", "pole_kinect_0_link"], - ["plate_middle_link", "pole_kinect_1_link"], - ["plate_middle_link", "pole_top_1_link"], - ["plate_middle_link", "pole_middle_0_link"], - ["plate_middle_link", "pole_middle_1_link"], - ["plate_middle_link", "pole_middle_2_link"], - ["plate_middle_link", "pole_middle_3_link"], - ["plate_top_link", "pole_top_0_link"], - ["plate_top_link", "pole_top_1_link"], - ["plate_top_link", "pole_top_2_link"], - ["plate_top_link", "pole_top_3_link"], - ["pole_top_0_link", "pole_middle_0_link"], - ["pole_top_0_link", "plate_middle_link"], - ["pole_top_1_link", "pole_middle_1_link"], - ["pole_top_2_link", "pole_middle_2_link"], - ["pole_top_2_link", "plate_middle_link"], - ["pole_top_3_link", "pole_middle_3_link"], - ] diff --git a/omnigibson/robots/vx300s.py b/omnigibson/robots/vx300s.py index facb36e27..fccdf4234 100644 --- a/omnigibson/robots/vx300s.py +++ b/omnigibson/robots/vx300s.py @@ -133,14 +133,6 @@ def _default_joint_pos(self): def finger_lengths(self): return {self.default_arm: 0.1} - @property - def disabled_collision_pairs(self): - return [ - ["gripper_bar_link", "left_finger_link"], - ["gripper_bar_link", "right_finger_link"], - ["gripper_bar_link", "gripper_link"], - ] - @property def arm_link_names(self): return { @@ -152,7 +144,6 @@ def arm_link_names(self): "lower_forearm_link", "wrist_link", "gripper_link", - "gripper_bar_link", ] } @@ -171,7 +162,7 @@ def arm_joint_names(self): @property def eef_link_names(self): - return {self.default_arm: "ee_gripper_link"} + return {self.default_arm: "eef_link"} @property def finger_link_names(self): @@ -181,31 +172,6 @@ def finger_link_names(self): def finger_joint_names(self): return {self.default_arm: ["left_finger", "right_finger"]} - @property - def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/vx300s/vx300s/vx300s.usd") - - @property - def robot_arm_descriptor_yamls(self): - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/vx300s/vx300s_description.yaml")} - - @property - def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/vx300s/vx300s.urdf") - - @property - def curobo_path(self): - return os.path.join(gm.ASSET_PATH, "models/vx300s/vx300s_description_curobo.yaml") - - @property - def curobo_attached_object_link_names(self): - return {"ee_gripper_link": "attached_object"} - - @property - def eef_usd_path(self): - # return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/vx300s/vx300s_eef.usd")} - raise NotImplementedError - @property def teleop_rotation_offset(self): return {self.default_arm: euler2quat([-math.pi, 0, 0])} From fb0796914e28abf94f5101e258140f732bba0763 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 9 Dec 2024 17:11:05 -0800 Subject: [PATCH 34/61] handle edge cases when importing custom robots --- .../examples/robots/import_custom_robot.py | 90 ++++++++----------- 1 file changed, 37 insertions(+), 53 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 062af0d24..b0c290def 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -14,7 +14,7 @@ import omnigibson.lazy as lazy import omnigibson.utils.transform_utils as T from omnigibson.macros import gm -from omnigibson.utils.asset_conversion_utils import _add_xform_properties, import_og_asset_from_urdf +from omnigibson.utils.asset_conversion_utils import _add_xform_properties, import_og_asset_from_urdf, find_all_prim_children_with_type from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.usd_utils import create_joint, create_primitive_mesh @@ -314,26 +314,6 @@ def find_articulation_root_prim(root_prim): ) -def find_all_prim_children_with_type(prim_type, root_prim): - """ - Recursively searches children of @root_prim to find all instances of prim that satisfy type @prim_type - - Args: - prim_type (str): Type of the prim to search - root_prim (Usd.Prim): Root prim to search - - Returns: - list of Usd.Prim: All found prims whose prim type includes @prim_type - """ - found_prims = [] - for child in root_prim.GetChildren(): - if prim_type in child.GetTypeName(): - found_prims.append(child) - found_prims += find_all_prim_children_with_type(prim_type=prim_type, root_prim=child) - - return found_prims - - def make_joint_fixed(stage, root_prim, joint_name): """ Converts a revolute / prismatic joint @joint_name into a fixed joint @@ -471,7 +451,7 @@ def get_joint_upper_limit(root_prim, joint_name): joint_prims = find_all_articulated_joints(robot_prim) all_joint_names = [joint_prim.GetName() for joint_prim in joint_prims] retract_cfg = curobo_cfg.default_qpos - lock_joints = curobo_cfg.lock_joints.to_dict() + lock_joints = curobo_cfg.lock_joints.to_dict() if curobo_cfg.lock_joints else {} if is_holonomic: # Move the final six joints to the beginning, since the holonomic joints are added at the end all_joint_names = list(reversed(all_joint_names[-6:])) + all_joint_names[:-6] @@ -591,36 +571,39 @@ def import_custom_robot(config): stage = lazy.omni.isaac.core.utils.stage.get_current_stage() # Add cameras, lidars, and visual spheres - for eef_vis_info in cfg.eef_vis_links: - add_sensor( - stage=stage, - root_prim=prim, - sensor_type="VisualSphere", - link_name=eef_vis_info.link, - parent_link_name=eef_vis_info.parent_link, - pos_offset=eef_vis_info.offset.position, - ori_offset=eef_vis_info.offset.orientation, - ) - for camera_info in cfg.camera_links: - add_sensor( - stage=stage, - root_prim=prim, - sensor_type="Camera", - link_name=camera_info.link, - parent_link_name=camera_info.parent_link, - pos_offset=camera_info.offset.position, - ori_offset=camera_info.offset.orientation, - ) - for lidar_info in cfg.lidar_links: - add_sensor( - stage=stage, - root_prim=prim, - sensor_type="Lidar", - link_name=lidar_info.link, - parent_link_name=lidar_info.parent_link, - pos_offset=lidar_info.offset.position, - ori_offset=lidar_info.offset.orientation, - ) + if cfg.eef_vis_links: + for eef_vis_info in cfg.eef_vis_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="VisualSphere", + link_name=eef_vis_info.link, + parent_link_name=eef_vis_info.parent_link, + pos_offset=eef_vis_info.offset.position, + ori_offset=eef_vis_info.offset.orientation, + ) + if cfg.camera_links: + for camera_info in cfg.camera_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="Camera", + link_name=camera_info.link, + parent_link_name=camera_info.parent_link, + pos_offset=camera_info.offset.position, + ori_offset=camera_info.offset.orientation, + ) + if cfg.lidar_links: + for lidar_info in cfg.lidar_links: + add_sensor( + stage=stage, + root_prim=prim, + sensor_type="Lidar", + link_name=lidar_info.link, + parent_link_name=lidar_info.parent_link, + pos_offset=lidar_info.offset.position, + ori_offset=lidar_info.offset.orientation, + ) # Make wheels sphere approximations if requested if cfg.base_motion.use_sphere_wheels: @@ -699,7 +682,7 @@ def import_custom_robot(config): robot_prim=prim, root_link=root_prim_name, curobo_cfg=cfg.curobo, - save_dir="/".join(usd_path.split("/")[:-2]), + save_dir="/".join(usd_path.split("/")[:-2]) + "/curobo", is_holonomic=cfg.base_motion.use_holonomic_joints, ) @@ -710,6 +693,7 @@ def import_custom_robot(config): og.sim.render() + # TODO: Create configs and import all robots # TODO: Make manual updates to the new robots so that they match the original robots # TODO: Add function to merge specified fixed links and / or remove extraneous ones From 878ad6c2c28d263a11cbee3a22f03c96831cf07e Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 9 Dec 2024 17:11:30 -0800 Subject: [PATCH 35/61] remove todos --- omnigibson/examples/robots/import_custom_robot.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index b0c290def..9556ef5a7 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -693,11 +693,5 @@ def import_custom_robot(config): og.sim.render() - -# TODO: Create configs and import all robots -# TODO: Make manual updates to the new robots so that they match the original robots -# TODO: Add function to merge specified fixed links and / or remove extraneous ones -# TODO: Import textures properly - if __name__ == "__main__": import_custom_robot() From 5f20029d8f6214280ae564be51b93c0d75c734c4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 01:12:00 +0000 Subject: [PATCH 36/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/examples/robots/import_custom_robot.py | 6 +++++- omnigibson/robots/manipulation_robot.py | 6 ++++-- omnigibson/robots/robot_base.py | 5 +---- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 9556ef5a7..9e610bb59 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -14,7 +14,11 @@ import omnigibson.lazy as lazy import omnigibson.utils.transform_utils as T from omnigibson.macros import gm -from omnigibson.utils.asset_conversion_utils import _add_xform_properties, import_og_asset_from_urdf, find_all_prim_children_with_type +from omnigibson.utils.asset_conversion_utils import ( + _add_xform_properties, + find_all_prim_children_with_type, + import_og_asset_from_urdf, +) from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.usd_utils import create_joint, create_primitive_mesh diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 9a6709135..36593ec08 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -1,11 +1,11 @@ import math +import os from abc import abstractmethod from collections import namedtuple from typing import Literal import networkx as nx import torch as th -import os import omnigibson as og import omnigibson.lazy as lazy @@ -987,7 +987,9 @@ def curobo_path(self): # By default, sets the standardized path model = self.model_name.lower() return { - emb_sel: os.path.join(gm.ASSET_PATH, f"models/{model}/curobo/{model}_description_curobo_{emb_sel.value}.yaml") + emb_sel: os.path.join( + gm.ASSET_PATH, f"models/{model}/curobo/{model}_description_curobo_{emb_sel.value}.yaml" + ) for emb_sel in CuroboEmbodimentSelection } diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 8df0d29c1..834b9bbb8 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -18,10 +18,7 @@ from omnigibson.utils.gym_utils import GymObservable from omnigibson.utils.numpy_utils import NumpyTypes from omnigibson.utils.python_utils import classproperty, merge_nested_dicts -from omnigibson.utils.usd_utils import ( - ControllableObjectViewAPI, - absolute_prim_path_to_scene_relative, -) +from omnigibson.utils.usd_utils import ControllableObjectViewAPI, absolute_prim_path_to_scene_relative from omnigibson.utils.vision_utils import segmentation_to_rgb # Global dicts that will contain mappings From 117b11c9278129d6602d8c810adecc6da3522eaf Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 9 Dec 2024 17:58:54 -0800 Subject: [PATCH 37/61] update example config --- .../examples/robots/import_custom_robot.py | 328 ++++++++++++++++-- 1 file changed, 295 insertions(+), 33 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 9556ef5a7..c142a1036 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -26,47 +26,309 @@ Imports an externally-defined robot URDF asset into an OmniGibson-compatible USD format and saves the imported asset files to the external dataset directory (gm.EXTERNAL_DATASET_PATH) -Note that @config is expected to follow the following format (fetch config shown as an example): +Note that @config is expected to follow the following format (R1 config shown as an example): \b -urdf_path: XXX # (str) Absolute path to robot URDF to import -name: XXX # (str) Name to assign to robot -collision_method: # (str) [coacd, convex, or null] collision decomposition method -hull_count: # (int) max hull count to use during decomposition, only relevant for coacd -headless: # (bool) if set, run without GUI -overwrite: # (bool) if set, overwrite any existing files -visual_only_links: # (list of str) links that will have any associated collision meshes removed - - laser_link - - estop_link -sphere_links: # (list of str) links that should have sphere collision approximation (eg: wheels) - - l_wheel_link - - r_wheel_link +urdf_path: r1_pro_source.urdf # (str) Absolute path to robot URDF to import +name: r1 # (str) Name to assign to robot +headless: false # (bool) if set, run without GUI +overwrite: true # (bool) if set, overwrite any existing files +merge_fixed_joints: false # (bool) whether to merge fixed joints in the robot hierarchy or not +base_motion: + wheel_links: # (list of str): links corresponding to wheels + - wheel_link1 + - wheel_link2 + - wheel_link3 + wheel_joints: # (list of str): joints corresponding to wheel motion + - servo_joint1 + - servo_joint2 + - servo_joint3 + - wheel_joint1 + - wheel_joint2 + - wheel_joint3 + use_sphere_wheels: true # (bool) whether to use sphere approximation for wheels (better stability) + use_holonomic_joints: true # (bool) whether to use joints to approximate a holonomic base. In this case, all + # wheel-related joints will be made into fixed joints, and 6 additional + # "virtual" joints will be added to the robot's base capturing 6DOF movement, + # with the (x,y,rz) joints being controllable by motors +collision: + decompose_method: coacd # (str) [coacd, convex, or null] collision decomposition method + hull_count: 8 # (int) per-mesh max hull count to use during decomposition, only relevant for coacd + coacd_links: [] # (list of str): links that should use CoACD to decompose collision meshes + convex_links: # (list of str): links that should use convex hull to decompose collision meshes + - base_link + - wheel_link1 + - wheel_link2 + - wheel_link3 + - torso_link1 + - torso_link2 + - torso_link3 + - torso_link4 + - left_arm_link1 + - left_arm_link4 + - left_arm_link5 + - right_arm_link1 + - right_arm_link4 + - right_arm_link5 + no_decompose_links: [] # (list of str): links that should not have any post-processing done to them + no_collision_links: # (list of str) links that will have any associated collision meshes removed + - servo_link1 + - servo_link2 + - servo_link3 +eef_vis_links: # (list of dict) information for adding cameras to robot + - link: left_eef_link # same format as @camera_links + parent_link: left_arm_link6 + offset: + position: [0, 0, 0.06] + orientation: [0, 0, 0, 1] + - link: right_eef_link # same format as @camera_links + parent_link: right_arm_link6 + offset: + position: [0, 0, 0.06] + orientation: [0, 0, 0, 1] camera_links: # (list of dict) information for adding cameras to robot - - link: eyes # (str) link name to add camera. Must exist if @parent_link is null, else will be + - link: eyes # (str) link name to add camera. Must exist if @parent_link is null, else will be # added as a child of the parent - parent_link: null # (str) optional parent link to use if adding new link - offset: # (dict) local pos,ori offset values. if @parent_link is specified, defines offset - # between @parent_link and @link specified in @parent_link's frame. + parent_link: torso_link4 # (str) optional parent link to use if adding new link + offset: # (dict) local pos,ori offset values. if @parent_link is specified, defines offset + # between @parent_link and @link specified in @parent_link's frame. # Otherwise, specifies offset of generated prim relative to @link's frame - position: [0, 0, 0] # (3-tuple) (x,y,z) offset - orientation: [0, 0, 0, 1.0] # (4-tuple) (x,y,z,w) offset - - link: wrist + position: [0.0732, 0, 0.4525] # (3-tuple) (x,y,z) offset -- this is done BEFORE the rotation + orientation: [0.4056, -0.4056, -0.5792, 0.5792] # (4-tuple) (x,y,z,w) offset + - link: left_eef_link parent_link: null offset: - position: [0, 0, 0] - orientation: [0, 0, 0, 1.0] -lidar_links: # (list of dict) information for adding cameras to robot - - link: laser_link # same format as @camera_links + position: [0.05, 0, -0.05] + orientation: [-0.7011, -0.7011, -0.0923, -0.0923] + - link: right_eef_link parent_link: null offset: - position: [0, 0, 0] - orientation: [0, 0, 0, 1.0] -eef_vis_links: # (list of dict) information for adding cameras to robot - - link: gripper_vis_link # same format as @camera_links - parent_link: gripper_link - offset: - position: [0, 0, 0.1] - orientation: [0, 0, 0, 1.0] + position: [0.05, 0, -0.05] + orientation: [-0.7011, -0.7011, -0.0923, -0.0923] +lidar_links: [] # (list of dict) information for adding cameras to robot +curobo: + eef_to_gripper_info: # (dict) Maps EEF link name to corresponding gripper links / joints + right_eef_link: + links: ["right_gripper_link1", "right_gripper_link2"] + joints: ["right_gripper_axis1", "right_gripper_axis2"] + left_eef_link: + links: ["left_gripper_link1", "left_gripper_link2"] + joints: ["left_gripper_axis1", "left_gripper_axis2"] + flip_joint_limits: [] # (list of str) any joints that have a negative axis specified in the + # source URDF + lock_joints: {} # (dict) Maps joint name to "locked" joint configuration. Any joints + # specified here will not be considered active when motion planning + # NOTE: All gripper joints and non-controllable holonomic joints + # will automatically be added here + self_collision_ignore: # (dict) Maps link name to list of other ignore links to ignore collisions + # with. Note that bi-directional specification is not necessary, + # e.g.: "torso_link1" does not need to be specified in + # "torso_link2"'s list if "torso_link2" is already specified in + # "torso_link1"'s list + base_link: ["torso_link1", "wheel_link1", "wheel_link2", "wheel_link3"] + torso_link1: ["torso_link2"] + torso_link2: ["torso_link3", "torso_link4"] + torso_link3: ["torso_link4"] + torso_link4: ["left_arm_link1", "right_arm_link1", "left_arm_link2", "right_arm_link2"] + left_arm_link1: ["left_arm_link2"] + left_arm_link2: ["left_arm_link3"] + left_arm_link3: ["left_arm_link4"] + left_arm_link4: ["left_arm_link5"] + left_arm_link5: ["left_arm_link6"] + left_arm_link6: ["left_gripper_link1", "left_gripper_link2"] + right_arm_link1: ["right_arm_link2"] + right_arm_link2: ["right_arm_link3"] + right_arm_link3: ["right_arm_link4"] + right_arm_link4: ["right_arm_link5"] + right_arm_link5: ["right_arm_link6"] + right_arm_link6: ["right_gripper_link1", "right_gripper_link2"] + left_gripper_link1: ["left_gripper_link2"] + right_gripper_link1: ["right_gripper_link2"] + collision_spheres: # (dict) Maps link name to list of collision sphere representations, + # where each sphere is defined by its (x,y,z) "center" and "radius" + # values. This defines the collision geometry during motion planning + base_link: + - "center": [-0.009, -0.094, 0.131] + "radius": 0.09128 + - "center": [-0.021, 0.087, 0.121] + "radius": 0.0906 + - "center": [0.019, 0.137, 0.198] + "radius": 0.07971 + - "center": [0.019, -0.14, 0.209] + "radius": 0.07563 + - "center": [0.007, -0.018, 0.115] + "radius": 0.08448 + - "center": [0.119, -0.176, 0.209] + "radius": 0.05998 + - "center": [0.137, 0.118, 0.208] + "radius": 0.05862 + - "center": [-0.152, -0.049, 0.204] + "radius": 0.05454 + torso_link1: + - "center": [-0.001, -0.014, -0.057] + "radius": 0.1 + - "center": [-0.001, -0.127, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.219, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.29, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.375, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.419, -0.064] + "radius": 0.07 + torso_link2: + - "center": [-0.001, -0.086, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.194, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.31, -0.064] + "radius": 0.07 + torso_link4: + - "center": [0.005, -0.001, 0.062] + "radius": 0.1 + - "center": [0.005, -0.001, 0.245] + "radius": 0.15 + - "center": [0.005, -0.001, 0.458] + "radius": 0.1 + - "center": [0.002, 0.126, 0.305] + "radius": 0.08 + - "center": [0.002, -0.126, 0.305] + "radius": 0.08 + left_arm_link1: + - "center": [0.001, 0.0, 0.069] + "radius": 0.06 + left_arm_link2: + - "center": [-0.062, -0.016, -0.03] + "radius": 0.06 + - "center": [-0.135, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.224, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.31, -0.022, -0.03] + "radius": 0.06 + - "center": [-0.34, -0.027, -0.03] + "radius": 0.06 + left_arm_link3: + - "center": [0.037, -0.058, -0.044] + "radius": 0.05 + - "center": [0.095, -0.08, -0.044] + "radius": 0.03 + - "center": [0.135, -0.08, -0.043] + "radius": 0.03 + - "center": [0.176, -0.08, -0.043] + "radius": 0.03 + - "center": [0.22, -0.077, -0.043] + "radius": 0.03 + left_arm_link4: + - "center": [-0.002, 0.0, 0.276] + "radius": 0.04 + left_arm_link5: + - "center": [0.059, -0.001, -0.021] + "radius": 0.035 + left_arm_link6: + - "center": [0.0, 0.0, 0.04] + "radius": 0.04 + right_arm_link1: + - "center": [0.001, 0.0, 0.069] + "radius": 0.06 + right_arm_link2: + - "center": [-0.062, -0.016, -0.03] + "radius": 0.06 + - "center": [-0.135, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.224, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.31, -0.022, -0.03] + "radius": 0.06 + - "center": [-0.34, -0.027, -0.03] + "radius": 0.06 + right_arm_link3: + - "center": [0.037, -0.058, -0.044] + "radius": 0.05 + - "center": [0.095, -0.08, -0.044] + "radius": 0.03 + - "center": [0.135, -0.08, -0.043] + "radius": 0.03 + - "center": [0.176, -0.08, -0.043] + "radius": 0.03 + - "center": [0.22, -0.077, -0.043] + "radius": 0.03 + right_arm_link4: + - "center": [-0.002, 0.0, 0.276] + "radius": 0.04 + right_arm_link5: + - "center": [0.059, -0.001, -0.021] + "radius": 0.035 + right_arm_link6: + - "center": [-0.0, 0.0, 0.04] + "radius": 0.035 + wheel_link1: + - "center": [-0.0, 0.0, -0.03] + "radius": 0.06 + wheel_link2: + - "center": [0.0, 0.0, 0.03] + "radius": 0.06 + wheel_link3: + - "center": [0.0, 0.0, -0.03] + "radius": 0.06 + left_gripper_link1: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + left_gripper_link2: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + right_gripper_link1: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, -0.0, -0.003] + "radius": 0.007 + - "center": [0.005, -0.0, -0.005] + "radius": 0.005 + - "center": [0.02, -0.0, -0.007] + "radius": 0.003 + right_gripper_link2: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + default_qpos: # (list of float): Default joint configuration + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 1.906 + - 1.906 + - -0.991 + - -0.991 + - 1.571 + - 1.571 + - 0.915 + - 0.915 + - -1.571 + - -1.571 + - 0.03 + - 0.03 + - 0.03 + - 0.03 """ @@ -570,7 +832,7 @@ def import_custom_robot(config): # Get current stage stage = lazy.omni.isaac.core.utils.stage.get_current_stage() - # Add cameras, lidars, and visual spheres + # Add visual spheres, cameras, and lidars if cfg.eef_vis_links: for eef_vis_info in cfg.eef_vis_links: add_sensor( From b78a2787e9e5b1352c1738558f9d10fc5eaa7004 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 9 Dec 2024 17:59:27 -0800 Subject: [PATCH 38/61] update custom robot import tutorial --- docs/tutorials/custom_robot_import.md | 1147 +++++++++++++++++++++---- 1 file changed, 990 insertions(+), 157 deletions(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index 87800952f..d7d241342 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -13,7 +13,341 @@ In order to import a custom robot, You will need to first prepare your robot mod Below, we will walk through each step for importing a new custom robot into **OmniGibson**. We use [Hello Robotic](https://hello-robot.com/)'s [Stretch](https://hello-robot.com/stretch-3-product) robot as an example, taken directly from their [official repo](https://github.com/hello-robot/stretch_urdf). ## Convert from URDF to USD +There are two ways to convert our raw robot URDF into an OmniGibson-compatible USD file. The first is by using our integrated script, while the other method is using IsaacSim's native URDF-to-USD converter via the GUI. We highly recommend our script version, as it both wraps the same functionality from the underlying IsaacSim converter as well as providing useful features such as automatic convex decomposition of collision meshes, programmatic adding of sensors, etc. +### Option 1: Using Our 1-Liner Script (Recommended) +Our custom robot importer [`import_custom_robot.py`](https://github.com/StanfordVL/OmniGibson/tree/main/omnigibson/examples/robots/import_custom_robot.py) wraps the native URDF Importer from Isaac Sim to convert our robot URDF model into USD format. Please see the following steps for running this script: + +1. All that is required is a single source config yaml file that dictates how the URDF should be post-processed when being converted into a USD. You can run `import_custom_robot.py --help` to see a detailed example configuration used, which is also shown below (`r1_pro_source_config.yaml`) for your convenience. +2. All output files are written to `/objects/robot/`. Please copy / move this directory to `/objects/` so it can be imported into **OmniGibson**. +3. That's it! + +Some notes about the importing script: + +- The importing procedure can be summarized as follows: + + 1. Copy the raw URDF file + any dependencies into the `gm.EXTERNAL_DATASET_PATH` directory + 2. Updates the URDF + meshes to ensure all scaling is positive + 3. Generates collision meshes for each robot link (as specified by the source config) + 4. Generates metadata necessary for **OmniGibson** + 5. Converts the post-processed URDF into USD (technically, USDA) format + 6. Generates relevant semantic metadata for the given object, given its category + 7. Generates visual spheres, cameras, and lidars (in that order, as specified by the source config) + 8. Updates wheel approximations (as specified by the source config) + 9. Removes all friction from all joints + 10. Generates holonomic base joints (as specified by the source config) + 11. Generates configuration files needed for CuRobo motion planning (as specified by the source config) + +- If `merge_fixed_joints=true`, all robot links that are connected to a parent link via a fixed joint will be merged directly into the parent joint. This means that the USD will _not_ contain these links! However, this collapsing occurs during the final URDF to USD conversion, meaning that these links _can_ be referenced beforehand (e.g.: when specifying desired per-link collision decomposition behavior) + +??? code "r1_pro_source_config.yaml" + ``` yaml linenums="1" + + urdf_path: r1_pro_source.urdf # (str) Absolute path to robot URDF to import + name: r1 # (str) Name to assign to robot + headless: false # (bool) if set, run without GUI + overwrite: true # (bool) if set, overwrite any existing files + merge_fixed_joints: false # (bool) whether to merge fixed joints in the robot hierarchy or not + base_motion: + wheel_links: # (list of str): links corresponding to wheels + - wheel_link1 + - wheel_link2 + - wheel_link3 + wheel_joints: # (list of str): joints corresponding to wheel motion + - servo_joint1 + - servo_joint2 + - servo_joint3 + - wheel_joint1 + - wheel_joint2 + - wheel_joint3 + use_sphere_wheels: true # (bool) whether to use sphere approximation for wheels (better stability) + use_holonomic_joints: true # (bool) whether to use joints to approximate a holonomic base. In this case, all + # wheel-related joints will be made into fixed joints, and 6 additional + # "virtual" joints will be added to the robot's base capturing 6DOF movement, + # with the (x,y,rz) joints being controllable by motors + collision: + decompose_method: coacd # (str) [coacd, convex, or null] collision decomposition method + hull_count: 8 # (int) per-mesh max hull count to use during decomposition, only relevant for coacd + coacd_links: [] # (list of str): links that should use CoACD to decompose collision meshes + convex_links: # (list of str): links that should use convex hull to decompose collision meshes + - base_link + - wheel_link1 + - wheel_link2 + - wheel_link3 + - torso_link1 + - torso_link2 + - torso_link3 + - torso_link4 + - left_arm_link1 + - left_arm_link4 + - left_arm_link5 + - right_arm_link1 + - right_arm_link4 + - right_arm_link5 + no_decompose_links: [] # (list of str): links that should not have any post-processing done to them + no_collision_links: # (list of str) links that will have any associated collision meshes removed + - servo_link1 + - servo_link2 + - servo_link3 + eef_vis_links: # (list of dict) information for adding cameras to robot + - link: left_eef_link # same format as @camera_links + parent_link: left_arm_link6 + offset: + position: [0, 0, 0.06] + orientation: [0, 0, 0, 1] + - link: right_eef_link # same format as @camera_links + parent_link: right_arm_link6 + offset: + position: [0, 0, 0.06] + orientation: [0, 0, 0, 1] + camera_links: # (list of dict) information for adding cameras to robot + - link: eyes # (str) link name to add camera. Must exist if @parent_link is null, else will be + # added as a child of the parent + parent_link: torso_link4 # (str) optional parent link to use if adding new link + offset: # (dict) local pos,ori offset values. if @parent_link is specified, defines offset + # between @parent_link and @link specified in @parent_link's frame. + # Otherwise, specifies offset of generated prim relative to @link's frame + position: [0.0732, 0, 0.4525] # (3-tuple) (x,y,z) offset -- this is done BEFORE the rotation + orientation: [0.4056, -0.4056, -0.5792, 0.5792] # (4-tuple) (x,y,z,w) offset + - link: left_eef_link + parent_link: null + offset: + position: [0.05, 0, -0.05] + orientation: [-0.7011, -0.7011, -0.0923, -0.0923] + - link: right_eef_link + parent_link: null + offset: + position: [0.05, 0, -0.05] + orientation: [-0.7011, -0.7011, -0.0923, -0.0923] + lidar_links: [] # (list of dict) information for adding cameras to robot + curobo: + eef_to_gripper_info: # (dict) Maps EEF link name to corresponding gripper links / joints + right_eef_link: + links: ["right_gripper_link1", "right_gripper_link2"] + joints: ["right_gripper_axis1", "right_gripper_axis2"] + left_eef_link: + links: ["left_gripper_link1", "left_gripper_link2"] + joints: ["left_gripper_axis1", "left_gripper_axis2"] + flip_joint_limits: [] # (list of str) any joints that have a negative axis specified in the + # source URDF + lock_joints: {} # (dict) Maps joint name to "locked" joint configuration. Any joints + # specified here will not be considered active when motion planning + # NOTE: All gripper joints and non-controllable holonomic joints + # will automatically be added here + self_collision_ignore: # (dict) Maps link name to list of other ignore links to ignore collisions + # with. Note that bi-directional specification is not necessary, + # e.g.: "torso_link1" does not need to be specified in + # "torso_link2"'s list if "torso_link2" is already specified in + # "torso_link1"'s list + base_link: ["torso_link1", "wheel_link1", "wheel_link2", "wheel_link3"] + torso_link1: ["torso_link2"] + torso_link2: ["torso_link3", "torso_link4"] + torso_link3: ["torso_link4"] + torso_link4: ["left_arm_link1", "right_arm_link1", "left_arm_link2", "right_arm_link2"] + left_arm_link1: ["left_arm_link2"] + left_arm_link2: ["left_arm_link3"] + left_arm_link3: ["left_arm_link4"] + left_arm_link4: ["left_arm_link5"] + left_arm_link5: ["left_arm_link6"] + left_arm_link6: ["left_gripper_link1", "left_gripper_link2"] + right_arm_link1: ["right_arm_link2"] + right_arm_link2: ["right_arm_link3"] + right_arm_link3: ["right_arm_link4"] + right_arm_link4: ["right_arm_link5"] + right_arm_link5: ["right_arm_link6"] + right_arm_link6: ["right_gripper_link1", "right_gripper_link2"] + left_gripper_link1: ["left_gripper_link2"] + right_gripper_link1: ["right_gripper_link2"] + collision_spheres: # (dict) Maps link name to list of collision sphere representations, + # where each sphere is defined by its (x,y,z) "center" and "radius" + # values. This defines the collision geometry during motion planning + base_link: + - "center": [-0.009, -0.094, 0.131] + "radius": 0.09128 + - "center": [-0.021, 0.087, 0.121] + "radius": 0.0906 + - "center": [0.019, 0.137, 0.198] + "radius": 0.07971 + - "center": [0.019, -0.14, 0.209] + "radius": 0.07563 + - "center": [0.007, -0.018, 0.115] + "radius": 0.08448 + - "center": [0.119, -0.176, 0.209] + "radius": 0.05998 + - "center": [0.137, 0.118, 0.208] + "radius": 0.05862 + - "center": [-0.152, -0.049, 0.204] + "radius": 0.05454 + torso_link1: + - "center": [-0.001, -0.014, -0.057] + "radius": 0.1 + - "center": [-0.001, -0.127, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.219, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.29, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.375, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.419, -0.064] + "radius": 0.07 + torso_link2: + - "center": [-0.001, -0.086, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.194, -0.064] + "radius": 0.07 + - "center": [-0.001, -0.31, -0.064] + "radius": 0.07 + torso_link4: + - "center": [0.005, -0.001, 0.062] + "radius": 0.1 + - "center": [0.005, -0.001, 0.245] + "radius": 0.15 + - "center": [0.005, -0.001, 0.458] + "radius": 0.1 + - "center": [0.002, 0.126, 0.305] + "radius": 0.08 + - "center": [0.002, -0.126, 0.305] + "radius": 0.08 + left_arm_link1: + - "center": [0.001, 0.0, 0.069] + "radius": 0.06 + left_arm_link2: + - "center": [-0.062, -0.016, -0.03] + "radius": 0.06 + - "center": [-0.135, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.224, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.31, -0.022, -0.03] + "radius": 0.06 + - "center": [-0.34, -0.027, -0.03] + "radius": 0.06 + left_arm_link3: + - "center": [0.037, -0.058, -0.044] + "radius": 0.05 + - "center": [0.095, -0.08, -0.044] + "radius": 0.03 + - "center": [0.135, -0.08, -0.043] + "radius": 0.03 + - "center": [0.176, -0.08, -0.043] + "radius": 0.03 + - "center": [0.22, -0.077, -0.043] + "radius": 0.03 + left_arm_link4: + - "center": [-0.002, 0.0, 0.276] + "radius": 0.04 + left_arm_link5: + - "center": [0.059, -0.001, -0.021] + "radius": 0.035 + left_arm_link6: + - "center": [0.0, 0.0, 0.04] + "radius": 0.04 + right_arm_link1: + - "center": [0.001, 0.0, 0.069] + "radius": 0.06 + right_arm_link2: + - "center": [-0.062, -0.016, -0.03] + "radius": 0.06 + - "center": [-0.135, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.224, -0.019, -0.03] + "radius": 0.06 + - "center": [-0.31, -0.022, -0.03] + "radius": 0.06 + - "center": [-0.34, -0.027, -0.03] + "radius": 0.06 + right_arm_link3: + - "center": [0.037, -0.058, -0.044] + "radius": 0.05 + - "center": [0.095, -0.08, -0.044] + "radius": 0.03 + - "center": [0.135, -0.08, -0.043] + "radius": 0.03 + - "center": [0.176, -0.08, -0.043] + "radius": 0.03 + - "center": [0.22, -0.077, -0.043] + "radius": 0.03 + right_arm_link4: + - "center": [-0.002, 0.0, 0.276] + "radius": 0.04 + right_arm_link5: + - "center": [0.059, -0.001, -0.021] + "radius": 0.035 + right_arm_link6: + - "center": [-0.0, 0.0, 0.04] + "radius": 0.035 + wheel_link1: + - "center": [-0.0, 0.0, -0.03] + "radius": 0.06 + wheel_link2: + - "center": [0.0, 0.0, 0.03] + "radius": 0.06 + wheel_link3: + - "center": [0.0, 0.0, -0.03] + "radius": 0.06 + left_gripper_link1: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + left_gripper_link2: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + right_gripper_link1: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, -0.0, -0.003] + "radius": 0.007 + - "center": [0.005, -0.0, -0.005] + "radius": 0.005 + - "center": [0.02, -0.0, -0.007] + "radius": 0.003 + right_gripper_link2: + - "center": [-0.03, 0.0, -0.002] + "radius": 0.008 + - "center": [-0.01, 0.0, -0.003] + "radius": 0.007 + - "center": [0.005, 0.0, -0.005] + "radius": 0.005 + - "center": [0.02, 0.0, -0.007] + "radius": 0.003 + default_qpos: # (list of float): Default joint configuration + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 1.906 + - 1.906 + - -0.991 + - -0.991 + - 1.571 + - 1.571 + - 0.915 + - 0.915 + - -1.571 + - -1.571 + - 0.03 + - 0.03 + - 0.03 + - 0.03 + + ``` + + +### Option 2: Using IsaacSim's Native URDF-to-USD Converter In this section, we will be using the URDF Importer in native Isaac Sim to convert our robot URDF model into USD format. Before we get started, it is strongly recommended that you read through the official [URDF Importer Tutorial](https://docs.omniverse.nvidia.com/isaacsim/latest/features/environment_setup/ext_omni_isaac_urdf.html). 1. Create a directory with the name of the new robot under `/models`. This is where all of our robot models live. In our case, we created a directory named `stretch`. @@ -75,7 +409,7 @@ Now that we have the USD model, let's open it up in Isaac Sim and inspect it. ![Stretch Robot Import 5b](../assets/tutorials/stretch-import-5b.png) ![Stretch Robot Import 5c](../assets/tutorials/stretch-import-5c.png) -6. Finally, save your USD! Note that you need to remove the fixed link created at step 4 before saving. +6. Finally, save your USD (as a USDA file)! Note that you need to remove the fixed link created at step 4 before saving. Please save the file to `/models//usd/.usda`. ## Create the Robot Class Now that we have the USD file for the robot, let's write our own robot class. For more information please refer to the [Robot module](../modules/robots.md). @@ -84,7 +418,7 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo 2. Determine which robot interfaces it should inherit. We currently support three modular interfaces that can be used together: [`LocomotionRobot`](../reference/robots/locomotion_robot.md) for robots whose bases can move (and a more specific [`TwoWheelRobot`](../reference/robots/two_wheel_robot.md) for locomotive robots that only have two wheels), [`ManipulationRobot`](../reference/robots/manipulation_robot.md) for robots equipped with one or more arms and grippers, and [`ActiveCameraRobot`](../reference/robots/active_camera_robot.md) for robots with a controllable head or camera mount. In our case, our robot is a mobile manipulator with a moveable camera mount, so our Python class inherits all three interfaces. -3. You must implement all required abstract properties defined by each respective inherited robot interface. In the most simple case, this is usually simply defining relevant metadata from the original robot source files, such as relevant joint / link names and absolute paths to the corresponding robot URDF and USD files. Please see our annotated `stretch.py` module below which serves as a good starting point that you can modify. +3. You must implement all required abstract properties defined by each respective inherited robot interface. In the most simple case, this is usually simply defining relevant metadata from the original robot source files, such as relevant joint / link names and absolute paths to the corresponding robot URDF and USD files. Please see our annotated `stretch.py` module below which serves as a good starting point that you can modify. Note that **OmniGibson** automatically looks for your robot file at `/models//usd/.usda`, so if it exists elsewhere please specify the path via the `usd_path` property in the robot class. ??? note "Optional properties" @@ -102,7 +436,7 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo Best practise of setting these points is to load the robot into Isaac Sim, and create a small sphere under the target link of the end effector. Then drag the sphere to the desired location (which should be just right outside the mesh of the link) or by setting the position in the `Property` tab. After you get a desired relative pose to the link, write down the link name and position in the robot class. -4. If your robot is a manipulation robot, you must additionally define a description .yaml file in order to use our inverse kinematics solver for end-effector control. Our example description file is shown below for our Stretch robot, which you can modify as needed. Place the descriptor file under `/models/`. +4. If your robot is a manipulation robot, you must additionally define a description .yaml file in order to use our CuRobo solver for end-effector motion planning. Our example description file is shown below for our R1 robot, which you can modify as needed. (Note that if you import your robot URDF using our import script, these files are automatically generated for you!). Place the curobo file under `/models//curobo`. 5. In order for **OmniGibson** to register your new robot class internally, you must import the robot class before running the simulation. If your python module exists under `omnigibson/robots`, you can simply add an additional import line in `omnigibson/robots/__init__.py`. Otherwise, in any end use-case script, you can simply import your robot class from your python module at the top of the file. @@ -120,29 +454,24 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo class Stretch(ManipulationRobot, TwoWheelRobot, ActiveCameraRobot): """ - Stretch Robot from Hello Robotics + Strech Robot from Hello Robotics Reference: https://hello-robot.com/stretch-3-product """ @property def discrete_action_list(self): - # Only need to define if supporting a discrete set of high-level actions raise NotImplementedError() def _create_discrete_action_space(self): - # Only need to define if @discrete_action_list is defined raise ValueError("Stretch does not support discrete actions!") @property def _raw_controller_order(self): - # Raw controller ordering. Usually determined by general robot kinematics chain - # You can usually simply take a subset of these based on the type of robot interfaces inherited for your robot class + # Ordered by general robot kinematics chain return ["base", "camera", f"arm_{self.default_arm}", f"gripper_{self.default_arm}"] @property def _default_controllers(self): - # Define the default controllers that should be used if no explicit configuration is specified when your robot class is created - # Always call super first controllers = super()._default_controllers @@ -156,126 +485,69 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo @property def _default_joint_pos(self): - # Define the default joint positions for your robot - - return th.tensor([0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0.0, 0, 0, math.pi / 8, math.pi / 8]. dtype=th.float32) + return th.tensor([0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0.0, 0, 0, math.pi / 8, math.pi / 8]) @property def wheel_radius(self): - # Only relevant for TwoWheelRobots. Radius of each wheel return 0.050 @property def wheel_axle_length(self): - # Only relevant for TwoWheelRobots. Distance between the two wheels return 0.330 @property def finger_lengths(self): - # Only relevant for ManipulationRobots. Length of fingers return {self.default_arm: 0.04} @property def assisted_grasp_start_points(self): - # Only relevant for ManipulationRobots. The start points for grasping if using assisted grasping return { self.default_arm: [ - GraspingPoint(link_name="r_gripper_finger_link", position=[0.025, -0.012, 0.0]), - GraspingPoint(link_name="r_gripper_finger_link", position=[-0.025, -0.012, 0.0]), + GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), + GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), ] } @property def assisted_grasp_end_points(self): - # Only relevant for ManipulationRobots. The end points for grasping if using assisted grasping return { self.default_arm: [ - GraspingPoint(link_name="l_gripper_finger_link", position=[0.025, 0.012, 0.0]), - GraspingPoint(link_name="l_gripper_finger_link", position=[-0.025, 0.012, 0.0]), + GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), + GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), ] } @property def disabled_collision_pairs(self): - # Pairs of robot links whose pairwise collisions should be ignored. - # Useful for filtering out bad collision modeling in the native robot meshes return [ - ["base_link", "caster_link"], - ["base_link", "link_aruco_left_base"], - ["base_link", "link_aruco_right_base"], - ["base_link", "base_imu"], - ["base_link", "laser"], - ["base_link", "link_left_wheel"], - ["base_link", "link_right_wheel"], - ["base_link", "link_mast"], - ["link_mast", "link_head"], - ["link_head", "link_head_pan"], - ["link_head_pan", "link_head_tilt"], - ["camera_link", "link_head_tilt"], - ["camera_link", "link_head_pan"], - ["link_head_nav_cam", "link_head_tilt"], - ["link_head_nav_cam", "link_head_pan"], - ["link_mast", "link_lift"], - ["link_lift", "link_aruco_shoulder"], - ["link_lift", "link_arm_l4"], ["link_lift", "link_arm_l3"], ["link_lift", "link_arm_l2"], ["link_lift", "link_arm_l1"], - ["link_arm_l4", "link_arm_l3"], - ["link_arm_l4", "link_arm_l2"], - ["link_arm_l4", "link_arm_l1"], - ["link_arm_l3", "link_arm_l2"], + ["link_lift", "link_arm_l0"], ["link_arm_l3", "link_arm_l1"], - ["link_arm_l2", "link_arm_l1"], ["link_arm_l0", "link_arm_l1"], ["link_arm_l0", "link_arm_l2"], ["link_arm_l0", "link_arm_l3"], - ["link_arm_l0", "link_arm_l4"], - ["link_arm_l0", "link_arm_l1"], - ["link_arm_l0", "link_aruco_inner_wrist"], - ["link_arm_l0", "link_aruco_top_wrist"], - ["link_arm_l0", "link_wrist_yaw"], - ["link_arm_l0", "link_wrist_yaw_bottom"], - ["link_arm_l0", "link_wrist_pitch"], - ["link_wrist_yaw_bottom", "link_wrist_pitch"], - ["gripper_camera_link", "link_gripper_s3_body"], - ["link_gripper_s3_body", "link_aruco_d405"], - ["link_gripper_s3_body", "link_gripper_finger_left"], - ["link_gripper_finger_left", "link_aruco_fingertip_left"], - ["link_gripper_finger_left", "link_gripper_fingertip_left"], - ["link_gripper_s3_body", "link_gripper_finger_right"], - ["link_gripper_finger_right", "link_aruco_fingertip_right"], - ["link_gripper_finger_right", "link_gripper_fingertip_right"], - ["respeaker_base", "link_head"], - ["respeaker_base", "link_mast"], ] @property def base_joint_names(self): - # Names of the joints that control the robot's base return ["joint_left_wheel", "joint_right_wheel"] @property def camera_joint_names(self): - # Names of the joints that control the robot's camera / head return ["joint_head_pan", "joint_head_tilt"] @property def arm_link_names(self): - # Names of the links that compose the robot's arm(s) (not including gripper(s)) return { self.default_arm: [ - "link_mast", "link_lift", - "link_arm_l4", "link_arm_l3", "link_arm_l2", "link_arm_l1", "link_arm_l0", - "link_aruco_inner_wrist", - "link_aruco_top_wrist", "link_wrist_yaw", - "link_wrist_yaw_bottom", "link_wrist_pitch", "link_wrist_roll", ] @@ -283,7 +555,6 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo @property def arm_joint_names(self): - # Names of the joints that control the robot's arm(s) (not including gripper(s)) return { self.default_arm: [ "joint_lift", @@ -299,100 +570,662 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo @property def eef_link_names(self): - # Name of the link that defines the per-arm end-effector frame - return {self.default_arm: "link_grasp_center"} + return {self.default_arm: "eef_link"} @property def finger_link_names(self): - # Names of the links that compose the robot's gripper(s) - return {self.default_arm: ["link_gripper_finger_left", "link_gripper_finger_right", "link_gripper_fingertip_left", "link_gripper_fingertip_right"]} + return { + self.default_arm: [ + "link_gripper_finger_left", + "link_gripper_finger_right", + ] + } @property def finger_joint_names(self): - # Names of the joints that control the robot's gripper(s) return {self.default_arm: ["joint_gripper_finger_right", "joint_gripper_finger_left"]} - - @property - def usd_path(self): - # Absolute path to the native robot USD file - return os.path.join(gm.ASSET_PATH, "models/stretch/stretch/stretch.usd") - - @property - def urdf_path(self): - # Absolute path to the native robot URDF file - return os.path.join(gm.ASSET_PATH, "models/stretch/stretch.urdf") - - @property - def robot_arm_descriptor_yamls(self): - # Only relevant for ManipulationRobots. Absolute path(s) to the per-arm descriptor files (see Step 4 below) - return {self.default_arm: os.path.join(gm.ASSET_PATH, "models/stretch/stretch_descriptor.yaml")} + ``` -??? code "stretch_descriptor.yaml" +??? code "r1_description_curobo_default.yaml" ``` yaml linenums="1" - # The robot descriptor defines the generalized coordinates and how to map those - # to the underlying URDF dofs. - - api_version: 1.0 - - # Defines the generalized coordinates. Each generalized coordinate is assumed - # to have an entry in the URDF, except when otherwise specified below under - # cspace_urdf_bridge - cspace: - - joint_lift - - joint_arm_l3 - - joint_arm_l2 - - joint_arm_l1 - - joint_arm_l0 - - joint_wrist_yaw - - joint_wrist_pitch - - joint_wrist_roll - - root_link: base_link - subtree_root_link: base_link - - default_q: [ - # Original version - # 0.00, 0.00, 0.00, -1.57, 0.00, 1.50, 0.75 - - # New config - 0, 0, 0, 0, 0, 0, 0, 0 - ] - - # Most dimensions of the cspace have a direct corresponding element - # in the URDF. This list of rules defines how unspecified coordinates - # should be extracted. - cspace_to_urdf_rules: [] - - active_task_spaces: - - base_link - - lift_link - - link_mast - - link_lift - - link_arm_l4 - - link_arm_l3 - - link_arm_l2 - - link_arm_l1 - - link_arm_l0 - - link_aruco_inner_wrist - - link_aruco_top_wrist - - link_wrist_yaw - - link_wrist_yaw_bottom - - link_wrist_pitch - - link_wrist_roll - - link_gripper_s3_body - - gripper_camera_link - - link_aruco_d405 - - link_gripper_finger_left - - link_aruco_fingertip_left - - link_gripper_fingertip_left - - link_gripper_finger_right - - link_aruco_fingertip_right - - link_gripper_fingertip_right - - link_grasp_center - - composite_task_spaces: [] + robot_cfg: + base_link: base_footprint_x + collision_link_names: + - base_link + - torso_link1 + - torso_link2 + - torso_link4 + - left_arm_link1 + - left_arm_link2 + - left_arm_link3 + - left_arm_link4 + - left_arm_link5 + - left_arm_link6 + - right_arm_link1 + - right_arm_link2 + - right_arm_link3 + - right_arm_link4 + - right_arm_link5 + - right_arm_link6 + - wheel_link1 + - wheel_link2 + - wheel_link3 + - left_gripper_link1 + - left_gripper_link2 + - right_gripper_link1 + - right_gripper_link2 + - attached_object_right_eef_link + - attached_object_left_eef_link + collision_sphere_buffer: 0.002 + collision_spheres: + base_link: + - center: + - -0.009 + - -0.094 + - 0.131 + radius: 0.09128 + - center: + - -0.021 + - 0.087 + - 0.121 + radius: 0.0906 + - center: + - 0.019 + - 0.137 + - 0.198 + radius: 0.07971 + - center: + - 0.019 + - -0.14 + - 0.209 + radius: 0.07563 + - center: + - 0.007 + - -0.018 + - 0.115 + radius: 0.08448 + - center: + - 0.119 + - -0.176 + - 0.209 + radius: 0.05998 + - center: + - 0.137 + - 0.118 + - 0.208 + radius: 0.05862 + - center: + - -0.152 + - -0.049 + - 0.204 + radius: 0.05454 + left_arm_link1: + - center: + - 0.001 + - 0.0 + - 0.069 + radius: 0.06 + left_arm_link2: + - center: + - -0.062 + - -0.016 + - -0.03 + radius: 0.06 + - center: + - -0.135 + - -0.019 + - -0.03 + radius: 0.06 + - center: + - -0.224 + - -0.019 + - -0.03 + radius: 0.06 + - center: + - -0.31 + - -0.022 + - -0.03 + radius: 0.06 + - center: + - -0.34 + - -0.027 + - -0.03 + radius: 0.06 + left_arm_link3: + - center: + - 0.037 + - -0.058 + - -0.044 + radius: 0.05 + - center: + - 0.095 + - -0.08 + - -0.044 + radius: 0.03 + - center: + - 0.135 + - -0.08 + - -0.043 + radius: 0.03 + - center: + - 0.176 + - -0.08 + - -0.043 + radius: 0.03 + - center: + - 0.22 + - -0.077 + - -0.043 + radius: 0.03 + left_arm_link4: + - center: + - -0.002 + - 0.0 + - 0.276 + radius: 0.04 + left_arm_link5: + - center: + - 0.059 + - -0.001 + - -0.021 + radius: 0.035 + left_arm_link6: + - center: + - 0.0 + - 0.0 + - 0.04 + radius: 0.04 + left_gripper_link1: + - center: + - -0.03 + - 0.0 + - -0.002 + radius: 0.008 + - center: + - -0.01 + - 0.0 + - -0.003 + radius: 0.007 + - center: + - 0.005 + - 0.0 + - -0.005 + radius: 0.005 + - center: + - 0.02 + - 0.0 + - -0.007 + radius: 0.003 + left_gripper_link2: + - center: + - -0.03 + - 0.0 + - -0.002 + radius: 0.008 + - center: + - -0.01 + - 0.0 + - -0.003 + radius: 0.007 + - center: + - 0.005 + - 0.0 + - -0.005 + radius: 0.005 + - center: + - 0.02 + - 0.0 + - -0.007 + radius: 0.003 + right_arm_link1: + - center: + - 0.001 + - 0.0 + - 0.069 + radius: 0.06 + right_arm_link2: + - center: + - -0.062 + - -0.016 + - -0.03 + radius: 0.06 + - center: + - -0.135 + - -0.019 + - -0.03 + radius: 0.06 + - center: + - -0.224 + - -0.019 + - -0.03 + radius: 0.06 + - center: + - -0.31 + - -0.022 + - -0.03 + radius: 0.06 + - center: + - -0.34 + - -0.027 + - -0.03 + radius: 0.06 + right_arm_link3: + - center: + - 0.037 + - -0.058 + - -0.044 + radius: 0.05 + - center: + - 0.095 + - -0.08 + - -0.044 + radius: 0.03 + - center: + - 0.135 + - -0.08 + - -0.043 + radius: 0.03 + - center: + - 0.176 + - -0.08 + - -0.043 + radius: 0.03 + - center: + - 0.22 + - -0.077 + - -0.043 + radius: 0.03 + right_arm_link4: + - center: + - -0.002 + - 0.0 + - 0.276 + radius: 0.04 + right_arm_link5: + - center: + - 0.059 + - -0.001 + - -0.021 + radius: 0.035 + right_arm_link6: + - center: + - -0.0 + - 0.0 + - 0.04 + radius: 0.035 + right_gripper_link1: + - center: + - -0.03 + - 0.0 + - -0.002 + radius: 0.008 + - center: + - -0.01 + - -0.0 + - -0.003 + radius: 0.007 + - center: + - 0.005 + - -0.0 + - -0.005 + radius: 0.005 + - center: + - 0.02 + - -0.0 + - -0.007 + radius: 0.003 + right_gripper_link2: + - center: + - -0.03 + - 0.0 + - -0.002 + radius: 0.008 + - center: + - -0.01 + - 0.0 + - -0.003 + radius: 0.007 + - center: + - 0.005 + - 0.0 + - -0.005 + radius: 0.005 + - center: + - 0.02 + - 0.0 + - -0.007 + radius: 0.003 + torso_link1: + - center: + - -0.001 + - -0.014 + - -0.057 + radius: 0.1 + - center: + - -0.001 + - -0.127 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.219 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.29 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.375 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.419 + - -0.064 + radius: 0.07 + torso_link2: + - center: + - -0.001 + - -0.086 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.194 + - -0.064 + radius: 0.07 + - center: + - -0.001 + - -0.31 + - -0.064 + radius: 0.07 + torso_link4: + - center: + - 0.005 + - -0.001 + - 0.062 + radius: 0.1 + - center: + - 0.005 + - -0.001 + - 0.245 + radius: 0.15 + - center: + - 0.005 + - -0.001 + - 0.458 + radius: 0.1 + - center: + - 0.002 + - 0.126 + - 0.305 + radius: 0.08 + - center: + - 0.002 + - -0.126 + - 0.305 + radius: 0.08 + wheel_link1: + - center: + - -0.0 + - 0.0 + - -0.03 + radius: 0.06 + wheel_link2: + - center: + - 0.0 + - 0.0 + - 0.03 + radius: 0.06 + wheel_link3: + - center: + - 0.0 + - 0.0 + - -0.03 + radius: 0.06 + cspace: + - base_footprint_x_joint + - base_footprint_y_joint + - base_footprint_z_joint + - base_footprint_rx_joint + - base_footprint_ry_joint + - base_footprint_rz_joint + - torso_joint1 + - torso_joint2 + - torso_joint3 + - torso_joint4 + - left_arm_joint1 + - right_arm_joint1 + - left_arm_joint2 + - left_arm_joint3 + - left_arm_joint4 + - left_arm_joint5 + - left_arm_joint6 + - left_gripper_axis1 + - left_gripper_axis2 + - right_arm_joint2 + - right_arm_joint3 + - right_arm_joint4 + - right_arm_joint5 + - right_arm_joint6 + - right_gripper_axis1 + - right_gripper_axis2 + cspace_distance_weight: + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + ee_link: right_eef_link + external_asset_path: null + extra_collision_spheres: + attached_object_left_eef_link: 32 + attached_object_right_eef_link: 32 + extra_links: + attached_object_left_eef_link: + fixed_transform: + - 0 + - 0 + - 0 + - 1 + - 0 + - 0 + - 0 + joint_name: attached_object_left_eef_link_joint + joint_type: FIXED + link_name: attached_object_left_eef_link + parent_link_name: left_eef_link + attached_object_right_eef_link: + fixed_transform: + - 0 + - 0 + - 0 + - 1 + - 0 + - 0 + - 0 + joint_name: attached_object_right_eef_link_joint + joint_type: FIXED + link_name: attached_object_right_eef_link + parent_link_name: right_eef_link + link_names: + - left_eef_link + lock_joints: + base_footprint_rx_joint: 0.0 + base_footprint_ry_joint: 0.0 + base_footprint_z_joint: 0.0 + left_gripper_axis1: 0.05000000074505806 + left_gripper_axis2: 0.05000000074505806 + right_gripper_axis1: 0.05000000074505806 + right_gripper_axis2: 0.05000000074505806 + max_acceleration: 15.0 + max_jerk: 500.0 + mesh_link_names: + - base_link + - torso_link1 + - torso_link2 + - torso_link4 + - left_arm_link1 + - left_arm_link2 + - left_arm_link3 + - left_arm_link4 + - left_arm_link5 + - left_arm_link6 + - right_arm_link1 + - right_arm_link2 + - right_arm_link3 + - right_arm_link4 + - right_arm_link5 + - right_arm_link6 + - wheel_link1 + - wheel_link2 + - wheel_link3 + - left_gripper_link1 + - left_gripper_link2 + - right_gripper_link1 + - right_gripper_link2 + - attached_object_right_eef_link + - attached_object_left_eef_link + null_space_weight: + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + - 1 + retract_config: + - 0 + - 0 + - 0 + - 0 + - 0 + - 0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 1.906 + - 1.906 + - -0.991 + - -0.991 + - 1.571 + - 1.571 + - 0.915 + - 0.915 + - -1.571 + - -1.571 + - 0.03 + - 0.03 + - 0.03 + - 0.03 + self_collision_buffer: + base_link: 0.02 + self_collision_ignore: + base_link: + - torso_link1 + - wheel_link1 + - wheel_link2 + - wheel_link3 + left_arm_link1: + - left_arm_link2 + left_arm_link2: + - left_arm_link3 + left_arm_link3: + - left_arm_link4 + left_arm_link4: + - left_arm_link5 + left_arm_link5: + - left_arm_link6 + left_arm_link6: + - left_gripper_link1 + - left_gripper_link2 + left_gripper_link1: + - left_gripper_link2 + - attached_object_left_eef_link + left_gripper_link2: + - attached_object_left_eef_link + right_arm_link1: + - right_arm_link2 + right_arm_link2: + - right_arm_link3 + right_arm_link3: + - right_arm_link4 + right_arm_link4: + - right_arm_link5 + right_arm_link5: + - right_arm_link6 + right_arm_link6: + - right_gripper_link1 + - right_gripper_link2 + right_gripper_link1: + - right_gripper_link2 + - attached_object_right_eef_link + right_gripper_link2: + - attached_object_right_eef_link + torso_link1: + - torso_link2 + torso_link2: + - torso_link3 + - torso_link4 + torso_link3: + - torso_link4 + torso_link4: + - left_arm_link1 + - right_arm_link1 + - left_arm_link2 + - right_arm_link2 + usd_flip_joint_limits: [] + usd_flip_joints: {} + usd_robot_root: /r1 + use_global_cumul: true ``` From 53eaa2399f1f5d192e1b8c2e7a519b7d6367c51b Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 10 Dec 2024 15:20:31 -0800 Subject: [PATCH 39/61] remove lula FK solver from curobo, fix typo in curobo test --- omnigibson/action_primitives/curobo.py | 14 ++++---------- tests/test_curobo.py | 2 +- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/omnigibson/action_primitives/curobo.py b/omnigibson/action_primitives/curobo.py index f02e22287..73591d3b2 100644 --- a/omnigibson/action_primitives/curobo.py +++ b/omnigibson/action_primitives/curobo.py @@ -176,7 +176,6 @@ def __init__( self.debug = debug self.robot = robot self.robot_joint_names = list(robot.joints.keys()) - self._fk = FKSolver(self.robot.robot_arm_descriptor_yamls[robot.default_arm], self.robot.urdf_path) self.batch_size = batch_size # Load robot config and usd paths and make sure paths point correctly @@ -736,15 +735,10 @@ def convert_q_to_eef_traj(self, traj, return_axisangle=False, emb_sel=CuroboEmbo n = len(traj) # Use forward kinematic solver to compute the EEF link positions - positions = self._tensor_args.to_device(th.zeros((n, 3))) - orientations = self._tensor_args.to_device( - th.zeros((n, 4)) - ) # This will be quat initially but we may convert to aa representation - - for i, qpos in enumerate(traj): - pose = self._fk.get_link_poses(joint_positions=qpos, link_names=[self.ee_link[emb_sel]]) - positions[i] = pose[self.ee_link[emb_sel]][0] - orientations[i] = pose[self.ee_link[emb_sel]][1] + # poses in shape (T, 1, D), where 1 is because only one link is passed to link_names + poses = self.mg[emb_sel].kinematics.get_link_poses(traj, link_names=[self.ee_link[emb_sel]]) + positions = poses.position.squeeze(dim=-1) + orientations = poses.quaternion.squeeze(dim=-1) # Possibly convert orientations to aa-representation if return_axisangle: diff --git a/tests/test_curobo.py b/tests/test_curobo.py index e0e3b45be..ffc160601 100644 --- a/tests/test_curobo.py +++ b/tests/test_curobo.py @@ -343,7 +343,7 @@ def test_curobo(): ), f"Collision checking false positive rate: {false_positive / n_samples}, should be == 0.0." assert ( false_negative / n_samples == 0.0 - ), f"Collision checking false positive rate: {false_positive / n_samples}, should be == 0.0." + ), f"Collision checking false negative rate: {false_negative / n_samples}, should be == 0.0." env.scene.reset() From 905fc1394b7382b2e891f022c722d3f917889bb6 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 12:12:47 -0800 Subject: [PATCH 40/61] fix bugs in curobo config generator during custom robot import --- .../examples/robots/import_custom_robot.py | 94 +++++++++++-------- omnigibson/utils/asset_conversion_utils.py | 5 +- 2 files changed, 59 insertions(+), 40 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index f39eb871e..8c4d8fedd 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -9,6 +9,7 @@ import torch as th import yaml from addict import Dict +import xml.etree.ElementTree as ET import omnigibson as og import omnigibson.lazy as lazy @@ -18,6 +19,7 @@ _add_xform_properties, find_all_prim_children_with_type, import_og_asset_from_urdf, + _space_string_to_tensor, ) from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.usd_utils import create_joint, create_primitive_mesh @@ -685,12 +687,13 @@ def find_all_articulated_joints(root_prim): ) -def create_curobo_cfgs(robot_prim, curobo_cfg, root_link, save_dir, is_holonomic=False): +def create_curobo_cfgs(robot_prim, robot_urdf_path, curobo_cfg, root_link, save_dir, is_holonomic=False): """ Creates a set of curobo configs based on @robot_prim and @curobo_cfg Args: robot_prim (Usd.Prim): Top-level prim defining the robot in the current USD stage + robot_urdf_path (str): Path to robot URDF file curobo_cfg (Dict): Dictionary of relevant curobo information root_link (str): Name of the robot's root link, BEFORE any holonomic joints are applied save_dir (str): Path to the directory to save generated curobo files @@ -699,6 +702,20 @@ def create_curobo_cfgs(robot_prim, curobo_cfg, root_link, save_dir, is_holonomic robot_name = robot_prim.GetName() ee_links = list(curobo_cfg.eef_to_gripper_info.keys()) + # Find all joints that have a negative axis specified so we know to flip them in curobo + tree = ET.parse(robot_urdf_path) + root = tree.getroot() + flip_joints = dict() + flip_joint_limits = [] + for joint in root.findall("joint"): + if joint.attrib["type"] != "fixed": + axis = th.round(_space_string_to_tensor(joint.find("axis").attrib["xyz"])) + axis_idx = th.nonzero(axis).squeeze().item() + flip_joints[joint.attrib["name"]] = "XYZ"[axis_idx] + is_negative = (axis[axis_idx] < 0).item() + if is_negative: + flip_joint_limits.append(joint.attrib["name"]) + def get_joint_upper_limit(root_prim, joint_name): joint_prim = find_prim_with_name(name=joint_name, root_prim=root_prim) assert joint_prim is not None, f"Could not find joint prim with name {joint_name}!" @@ -729,80 +746,80 @@ def get_joint_upper_limit(root_prim, joint_name): joint_to_default_q = {jnt_name: q for jnt_name, q in zip(all_joint_names, retract_cfg)} default_generated_cfg = { - "robot_cfg": { - "usd_robot_root": f"/{robot_prim.GetName()}", - "usd_flip_joints": {}, - "usd_flip_joint_limits": [], - "base_link": "base_footprint_x" if is_holonomic else root_link, - "ee_link": ee_links[0], - "link_names": ee_links[1:], - "lock_joints": lock_joints, - "extra_links": {}, - "collision_link_names": deepcopy(all_collision_link_names), - "collision_spheres": collision_spheres, - "collision_sphere_buffer": 0.002, - "extra_collision_spheres": {}, - "self_collision_ignore": curobo_cfg.self_collision_ignore.to_dict(), - "self_collision_buffer": {root_link: 0.02}, - "use_global_cumul": True, - "mesh_link_names": deepcopy(all_collision_link_names), - "external_asset_path": None, - "cspace": all_joint_names, + "usd_robot_root": f"/{robot_prim.GetName()}", + "usd_flip_joints": flip_joints, + "usd_flip_joint_limits": flip_joint_limits, + "base_link": "base_footprint_x" if is_holonomic else root_link, + "ee_link": ee_links[0], + "link_names": ee_links[1:], + "lock_joints": lock_joints, + "extra_links": {}, + "collision_link_names": deepcopy(all_collision_link_names), + "collision_spheres": collision_spheres, + "collision_sphere_buffer": 0.002, + "extra_collision_spheres": {}, + "self_collision_ignore": curobo_cfg.self_collision_ignore.to_dict(), + "self_collision_buffer": {root_link: 0.02}, + "use_global_cumul": True, + "mesh_link_names": deepcopy(all_collision_link_names), + "external_asset_path": None, + "cspace": { + "joint_names": all_joint_names, "retract_config": retract_cfg, "null_space_weight": [1] * len(all_joint_names), "cspace_distance_weight": [1] * len(all_joint_names), "max_jerk": 500.0, "max_acceleration": 15.0, - } + }, } for eef_link_name, gripper_info in curobo_cfg.eef_to_gripper_info.items(): attached_obj_link_name = f"attached_object_{eef_link_name}" for jnt_name in gripper_info["joints"]: - default_generated_cfg["robot_cfg"]["lock_joints"][jnt_name] = get_joint_upper_limit(robot_prim, jnt_name) - default_generated_cfg["robot_cfg"]["extra_links"][attached_obj_link_name] = { + default_generated_cfg["lock_joints"][jnt_name] = get_joint_upper_limit(robot_prim, jnt_name) + default_generated_cfg["extra_links"][attached_obj_link_name] = { "parent_link_name": eef_link_name, "link_name": attached_obj_link_name, "fixed_transform": [0, 0, 0, 1, 0, 0, 0], # (x,y,z,w,x,y,z) "joint_type": "FIXED", "joint_name": f"{attached_obj_link_name}_joint", } - default_generated_cfg["robot_cfg"]["collision_link_names"].append(attached_obj_link_name) - default_generated_cfg["robot_cfg"]["extra_collision_spheres"][attached_obj_link_name] = 32 + default_generated_cfg["collision_link_names"].append(attached_obj_link_name) + default_generated_cfg["extra_collision_spheres"][attached_obj_link_name] = 32 for link_name in gripper_info["links"]: - if link_name not in default_generated_cfg["robot_cfg"]["self_collision_ignore"]: - default_generated_cfg["robot_cfg"]["self_collision_ignore"][link_name] = [] - default_generated_cfg["robot_cfg"]["self_collision_ignore"][link_name].append(attached_obj_link_name) - default_generated_cfg["robot_cfg"]["mesh_link_names"].append(attached_obj_link_name) + if link_name not in default_generated_cfg["self_collision_ignore"]: + default_generated_cfg["self_collision_ignore"][link_name] = [] + default_generated_cfg["self_collision_ignore"][link_name].append(attached_obj_link_name) + default_generated_cfg["mesh_link_names"].append(attached_obj_link_name) # Save generated file Path(save_dir).mkdir(parents=True, exist_ok=True) save_fpath = f"{save_dir}/{robot_name}_description_curobo_default.yaml" with open(save_fpath, "w+") as f: - yaml.dump(default_generated_cfg, f) + yaml.dump({"robot_cfg": {"kinematics": default_generated_cfg}}, f) # Permute the default config to have additional base only / arm only configs # Only relevant if robot is holonomic if is_holonomic: # Create base only config base_only_cfg = deepcopy(default_generated_cfg) - base_only_cfg["robot_cfg"]["ee_link"] = root_link - base_only_cfg["robot_cfg"]["link_names"] = [] + base_only_cfg["ee_link"] = root_link + base_only_cfg["link_names"] = [] for jnt_name, default_q in joint_to_default_q.items(): - if jnt_name not in base_only_cfg["robot_cfg"]["lock_joints"] and "base_footprint" not in jnt_name: + if jnt_name not in base_only_cfg["lock_joints"] and "base_footprint" not in jnt_name: # Lock this joint - base_only_cfg["robot_cfg"]["lock_joints"][jnt_name] = default_q + base_only_cfg["lock_joints"][jnt_name] = default_q save_base_fpath = f"{save_dir}/{robot_name}_description_curobo_base.yaml" with open(save_base_fpath, "w+") as f: - yaml.dump(base_only_cfg, f) + yaml.dump({"robot_cfg": {"kinematics": base_only_cfg}}, f) # Create arm only config arm_only_cfg = deepcopy(default_generated_cfg) for jnt_name in {"base_footprint_x_joint", "base_footprint_y_joint", "base_footprint_rz_joint"}: - arm_only_cfg["robot_cfg"]["lock_joints"][jnt_name] = 0.0 + arm_only_cfg["lock_joints"][jnt_name] = 0.0 save_arm_fpath = f"{save_dir}/{robot_name}_description_curobo_arm.yaml" with open(save_arm_fpath, "w+") as f: - yaml.dump(arm_only_cfg, f) + yaml.dump({"robot_cfg": {"kinematics": arm_only_cfg}}, f) @click.command(help=_DOCSTRING) @@ -818,7 +835,7 @@ def import_custom_robot(config): cfg = Dict(yaml.load(f, yaml.Loader)) # Convert URDF -> USD - usd_path, prim = import_og_asset_from_urdf( + urdf_path, usd_path, prim = import_og_asset_from_urdf( category="robot", model=cfg.name, urdf_path=cfg.urdf_path, @@ -946,6 +963,7 @@ def import_custom_robot(config): if bool(cfg.curobo): create_curobo_cfgs( robot_prim=prim, + robot_urdf_path=urdf_path, root_link=root_prim_name, curobo_cfg=cfg.curobo, save_dir="/".join(usd_path.split("/")[:-2]) + "/curobo", diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index b7bac6498..29bde4677 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -2402,7 +2402,8 @@ def import_og_asset_from_urdf( (bigger memory footprint, but human-readable) Returns: - 2-tuple: + 3-tuple: + - str: Absolute path to post-processed URDF file - str: Absolute path to generated USD file - Usd.Prim: Generated root USD prim (currently on active stage) """ @@ -2477,4 +2478,4 @@ def import_og_asset_from_urdf( f"\nConversion complete! Object has been successfully imported into OmniGibson-compatible USD, located at:\n\n{usd_path}\n" ) - return usd_path, prim + return urdf_path, usd_path, prim From a233df0863dbcbe31eaae03aa44052fdea5dc207 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 12:13:26 -0800 Subject: [PATCH 41/61] update default pose for R1 Pro model in curobo test --- tests/test_curobo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_curobo.py b/tests/test_curobo.py index ffc160601..4161a1e8a 100644 --- a/tests/test_curobo.py +++ b/tests/test_curobo.py @@ -89,7 +89,7 @@ def test_curobo(): { "type": "R1", "obs_modalities": "rgb", - "position": [0.7, -0.55, 0.0], + "position": [0.7, -0.7, 0.056], "orientation": [0, 0, 0.707, 0.707], "self_collisions": True, "action_normalize": False, From c8600a6939c13dd123b74b487f28bd722bf109dc Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 14:26:09 -0800 Subject: [PATCH 42/61] tune object states test for new fetch asset --- tests/test_object_states.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_object_states.py b/tests/test_object_states.py index 317a9dcc3..3bc4fe256 100644 --- a/tests/test_object_states.py +++ b/tests/test_object_states.py @@ -664,9 +664,7 @@ def test_toggled_on(env): stove = env.scene.object_registry("name", "stove") robot = env.robots[0] - stove.set_position_orientation( - [1.48, 0.3, 0.443], T.euler2quat(th.tensor([0, 0, -math.pi / 2.0], dtype=th.float32)) - ) + stove.set_position_orientation([1.505, 0.3, 0.443], T.euler2quat(th.tensor([0, 0, -math.pi / 2.0], dtype=th.float32))) robot.set_position_orientation(position=[0.0, 0.38, 0.0], orientation=[0, 0, 0, 1]) assert not stove.states[ToggledOn].get_value() From 541b15c30b3f9add7c00b54c3d5964d26b470d12 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 22:31:42 +0000 Subject: [PATCH 43/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/examples/robots/import_custom_robot.py | 4 ++-- tests/test_object_states.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 8c4d8fedd..c0b8070d3 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -2,6 +2,7 @@ Helper script to download OmniGibson dataset and assets. """ +import xml.etree.ElementTree as ET from copy import deepcopy from pathlib import Path @@ -9,7 +10,6 @@ import torch as th import yaml from addict import Dict -import xml.etree.ElementTree as ET import omnigibson as og import omnigibson.lazy as lazy @@ -17,9 +17,9 @@ from omnigibson.macros import gm from omnigibson.utils.asset_conversion_utils import ( _add_xform_properties, + _space_string_to_tensor, find_all_prim_children_with_type, import_og_asset_from_urdf, - _space_string_to_tensor, ) from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.usd_utils import create_joint, create_primitive_mesh diff --git a/tests/test_object_states.py b/tests/test_object_states.py index 3bc4fe256..82cdd1816 100644 --- a/tests/test_object_states.py +++ b/tests/test_object_states.py @@ -664,7 +664,9 @@ def test_toggled_on(env): stove = env.scene.object_registry("name", "stove") robot = env.robots[0] - stove.set_position_orientation([1.505, 0.3, 0.443], T.euler2quat(th.tensor([0, 0, -math.pi / 2.0], dtype=th.float32))) + stove.set_position_orientation( + [1.505, 0.3, 0.443], T.euler2quat(th.tensor([0, 0, -math.pi / 2.0], dtype=th.float32)) + ) robot.set_position_orientation(position=[0.0, 0.38, 0.0], orientation=[0, 0, 0, 1]) assert not stove.states[ToggledOn].get_value() From 863209b16b815592764c83ce49687b858e83c38d Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:00:04 -0800 Subject: [PATCH 44/61] fix grasping state inference for gripper controller --- .../controllers/multi_finger_gripper_controller.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/omnigibson/controllers/multi_finger_gripper_controller.py b/omnigibson/controllers/multi_finger_gripper_controller.py index 9eb70241f..5506be3a6 100644 --- a/omnigibson/controllers/multi_finger_gripper_controller.py +++ b/omnigibson/controllers/multi_finger_gripper_controller.py @@ -17,7 +17,7 @@ # is_grasping heuristics parameters m.POS_TOLERANCE = 0.002 # arbitrary heuristic -m.VEL_TOLERANCE = 0.01 # arbitrary heuristic +m.VEL_TOLERANCE = 0.02 # arbitrary heuristic class MultiFingerGripperController(GripperController): @@ -251,11 +251,8 @@ def _update_grasping_state(self, control_dict): dist_from_lower_limit = finger_pos - min_pos dist_from_upper_limit = max_pos - finger_pos - # If the joint positions are not near the joint limits with some tolerance (m.POS_TOLERANCE) - valid_grasp_pos = ( - th.mean(dist_from_lower_limit) > m.POS_TOLERANCE - and th.mean(dist_from_upper_limit) > m.POS_TOLERANCE - ) + # If either of the joint positions are not near the joint limits with some tolerance (m.POS_TOLERANCE) + valid_grasp_pos = th.mean(dist_from_lower_limit) > m.POS_TOLERANCE or th.mean(dist_from_upper_limit) > m.POS_TOLERANCE # And the joint velocities are close to zero with some tolerance (m.VEL_TOLERANCE) valid_grasp_vel = th.all(th.abs(finger_vel) < m.VEL_TOLERANCE) From e790e770c8e5cfe8b02e5f1d2ece319a229b65c3 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:00:19 -0800 Subject: [PATCH 45/61] add attribution --- omnigibson/utils/urdfpy_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/omnigibson/utils/urdfpy_utils.py b/omnigibson/utils/urdfpy_utils.py index 272521925..f65199172 100644 --- a/omnigibson/utils/urdfpy_utils.py +++ b/omnigibson/utils/urdfpy_utils.py @@ -1,3 +1,7 @@ +""" +Code directly adapted from https://github.com/mmatl/urdfpy +""" + import copy import os import time From 5971fbb022c768d6c59aa3918478c151142377e6 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:01:42 -0800 Subject: [PATCH 46/61] update sensor ground truth for new fetch robot test --- tests/test_robot_states_flatcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_robot_states_flatcache.py b/tests/test_robot_states_flatcache.py index 10c4a4c45..77ff4af8a 100644 --- a/tests/test_robot_states_flatcache.py +++ b/tests/test_robot_states_flatcache.py @@ -61,8 +61,8 @@ def camera_pose_test(flatcache): relative_pose_transform(sensor_world_pos, sensor_world_ori, robot_world_pos, robot_world_ori) ) - sensor_world_pos_gt = th.tensor([150.1628, 149.9993, 101.3773]) - sensor_world_ori_gt = th.tensor([-0.2952, 0.2959, 0.6427, -0.6421]) + sensor_world_pos_gt = th.tensor([150.5187, 149.8295, 101.0960]) + sensor_world_ori_gt = th.tensor([0.0198, -0.1313, 0.9895, -0.0576]) assert th.allclose(sensor_world_pos, sensor_world_pos_gt, atol=1e-3) assert quaternions_close(sensor_world_ori, sensor_world_ori_gt, atol=1e-3) From c71dda8da9c21d2180e76ce30a332986ffef3cc2 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:02:12 -0800 Subject: [PATCH 47/61] do not 0 friction during URDF->USD import process, update docs --- docs/tutorials/custom_robot_import.md | 6 +++--- omnigibson/examples/robots/import_custom_robot.py | 4 ---- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index d7d241342..a496d1813 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -34,11 +34,11 @@ Some notes about the importing script: 6. Generates relevant semantic metadata for the given object, given its category 7. Generates visual spheres, cameras, and lidars (in that order, as specified by the source config) 8. Updates wheel approximations (as specified by the source config) - 9. Removes all friction from all joints - 10. Generates holonomic base joints (as specified by the source config) - 11. Generates configuration files needed for CuRobo motion planning (as specified by the source config) + 9. Generates holonomic base joints (as specified by the source config) + 10. Generates configuration files needed for CuRobo motion planning (as specified by the source config) - If `merge_fixed_joints=true`, all robot links that are connected to a parent link via a fixed joint will be merged directly into the parent joint. This means that the USD will _not_ contain these links! However, this collapsing occurs during the final URDF to USD conversion, meaning that these links _can_ be referenced beforehand (e.g.: when specifying desired per-link collision decomposition behavior) +- [CuRobo](https://curobo.org/index.html) is a highly performant motion planner that is used in **OmniGibson** for specific use-cases, such as skills. However, CuRobo requires a manually-specified sphere representation of the robot to be defined. These values can be generated using [IsaacSim's interactive GUI](https://curobo.org/tutorials/1_robot_configuration.html#robot-collision-representation), and should be exported and copied into the robot source config yaml file used for importing into **OmniGibson**. You can see the set of values used for the R1 robot below. For more information regarding specific keys specified, please see CuRobo's [Configuring a New Robot](https://curobo.org/tutorials/1_robot_configuration.html) tutorial! ??? code "r1_pro_source_config.yaml" ``` yaml linenums="1" diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 8c4d8fedd..de50cbcae 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -903,10 +903,6 @@ def import_custom_robot(config): assert articulation_root_prim is not None, "Could not find any valid articulation root prim!" root_prim_name = articulation_root_prim.GetName() - # Zero out all friction on joints - for joint_prim in find_all_articulated_joints(root_prim=prim): - joint_prim.GetAttribute("physxJoint:jointFriction").Set(0.0) - # Add holonomic base if requested if cfg.base_motion.use_holonomic_joints: # Convert all wheel joints into fixed joints From 2f7a8f8d0646eb70c70eb53f6ff980e27052e24d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 00:02:42 +0000 Subject: [PATCH 48/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/controllers/multi_finger_gripper_controller.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/omnigibson/controllers/multi_finger_gripper_controller.py b/omnigibson/controllers/multi_finger_gripper_controller.py index 5506be3a6..35e83b48a 100644 --- a/omnigibson/controllers/multi_finger_gripper_controller.py +++ b/omnigibson/controllers/multi_finger_gripper_controller.py @@ -252,7 +252,9 @@ def _update_grasping_state(self, control_dict): dist_from_upper_limit = max_pos - finger_pos # If either of the joint positions are not near the joint limits with some tolerance (m.POS_TOLERANCE) - valid_grasp_pos = th.mean(dist_from_lower_limit) > m.POS_TOLERANCE or th.mean(dist_from_upper_limit) > m.POS_TOLERANCE + valid_grasp_pos = ( + th.mean(dist_from_lower_limit) > m.POS_TOLERANCE or th.mean(dist_from_upper_limit) > m.POS_TOLERANCE + ) # And the joint velocities are close to zero with some tolerance (m.VEL_TOLERANCE) valid_grasp_vel = th.all(th.abs(finger_vel) < m.VEL_TOLERANCE) From 2e9cb384610e55063b2e378d0153c0e8e7877f7c Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:18:08 -0800 Subject: [PATCH 49/61] fix bugs in FOV robot object state --- docs/modules/object_states.md | 4 ++-- omnigibson/object_states/robot_related_states.py | 9 +++++---- omnigibson/scene_graphs/graph_builder.py | 6 +++--- tests/test_robot_states_no_flatcache.py | 9 +++++---- 4 files changed, 15 insertions(+), 13 deletions(-) diff --git a/docs/modules/object_states.md b/docs/modules/object_states.md index e35ecff20..2bd95494b 100644 --- a/docs/modules/object_states.md +++ b/docs/modules/object_states.md @@ -210,9 +210,9 @@ These are object states that are agnostic to other objects in a given scene. [**`ObjectsInFOVOfRobot`**](../reference/object_states/robot_related_states.md)

- A robot-specific state. Comptues the list of objects that are currently in the robot's field of view.

+ A robot-specific state. Comptues the set of objects that are currently in the robot's field of view.

    -
  • `get_value()`: returns `obj_list`, the list of `BaseObject`s
  • +
  • `get_value()`: returns `obj_set`, the set of `BaseObject`s
  • `set_value(new_value)`: Not supported
diff --git a/omnigibson/object_states/robot_related_states.py b/omnigibson/object_states/robot_related_states.py index 3871aad30..1e38265ae 100644 --- a/omnigibson/object_states/robot_related_states.py +++ b/omnigibson/object_states/robot_related_states.py @@ -55,14 +55,15 @@ def _get_value(self): Gets all objects in the robot's field of view. Returns: - list: List of objects in the robot's field of view + set: Set of objects in the robot's field of view """ if not any(isinstance(sensor, VisionSensor) for sensor in self.robot.sensors.values()): raise ValueError("No vision sensors found on robot.") - obj_names = [] + objs = set() names_to_exclude = set(["background", "unlabelled"]) for sensor in self.robot.sensors.values(): if isinstance(sensor, VisionSensor): _, info = sensor.get_obs() - obj_names.extend([name for name in info["seg_instance"].values() if name not in names_to_exclude]) - return [x for x in [self.obj.scene.object_registry("name", x) for x in obj_names] if x is not None] + objs.update(set(self.obj.scene.object_registry("name", name) for name in info["seg_instance"].values() if name not in names_to_exclude)) + # Return all objects, minus any that were mapped to None because they were not found in our object registry + return objs - {None} diff --git a/omnigibson/scene_graphs/graph_builder.py b/omnigibson/scene_graphs/graph_builder.py index 6f9879001..5c72af19f 100644 --- a/omnigibson/scene_graphs/graph_builder.py +++ b/omnigibson/scene_graphs/graph_builder.py @@ -179,7 +179,7 @@ def step(self, scene): # If we're not in full observability mode, only pick the objects in FOV of robots. for robot in self._robots: objs_in_fov = robot.states[object_states.ObjectsInFOVOfRobot].get_value() - objs_to_add &= set(objs_in_fov) + objs_to_add &= objs_in_fov # Remove all BaseRobot objects from the set of objects to add. base_robots = [obj for obj in objs_to_add if isinstance(obj, BaseRobot)] @@ -243,9 +243,9 @@ def _draw_graph(): node_labels = {obj: obj.category for obj in nodes} # get all objects in fov of robots - objects_in_fov = [] + objects_in_fov = set() for robot in all_robots: - objects_in_fov += robot.states[object_states.ObjectsInFOVOfRobot].get_value() + objects_in_fov.update(robot.states[object_states.ObjectsInFOVOfRobot].get_value()) colors = [ ("yellow" if obj.category == "agent" else ("green" if obj in objects_in_fov else "red")) for obj in nodes ] diff --git a/tests/test_robot_states_no_flatcache.py b/tests/test_robot_states_no_flatcache.py index 2ba11d3ac..f8eb610b7 100644 --- a/tests/test_robot_states_no_flatcache.py +++ b/tests/test_robot_states_no_flatcache.py @@ -33,14 +33,15 @@ def test_object_in_FOV_of_robot(): env = setup_environment(False) robot = env.robots[0] env.reset() - assert robot.states[ObjectsInFOVOfRobot].get_value() == [robot] + objs_in_fov = robot.states[ObjectsInFOVOfRobot].get_value() + assert len(objs_in_fov) == 1 and next(iter(objs_in_fov)) == robot sensors = [s for s in robot.sensors.values() if isinstance(s, VisionSensor)] assert len(sensors) > 0 - vision_sensor = sensors[0] - vision_sensor.set_position_orientation(position=[100, 150, 100]) + for vision_sensor in sensors: + vision_sensor.set_position_orientation(position=[100, 150, 100]) og.sim.step() for _ in range(5): og.sim.render() # Since the sensor is moved away from the robot, the robot should not see itself - assert robot.states[ObjectsInFOVOfRobot].get_value() == [] + assert len(robot.states[ObjectsInFOVOfRobot].get_value()) == 0 og.clear() From 1ca3f870c3dc97477d6343806b67da71cd96e103 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 16:48:22 -0800 Subject: [PATCH 50/61] prune utils, update docs, update package deps version --- docs/tutorials/custom_robot_import.md | 3 +- omnigibson/utils/asset_conversion_utils.py | 85 ---------------------- setup.py | 2 +- 3 files changed, 2 insertions(+), 88 deletions(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index a496d1813..847ce7e6b 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -19,8 +19,7 @@ There are two ways to convert our raw robot URDF into an OmniGibson-compatible U Our custom robot importer [`import_custom_robot.py`](https://github.com/StanfordVL/OmniGibson/tree/main/omnigibson/examples/robots/import_custom_robot.py) wraps the native URDF Importer from Isaac Sim to convert our robot URDF model into USD format. Please see the following steps for running this script: 1. All that is required is a single source config yaml file that dictates how the URDF should be post-processed when being converted into a USD. You can run `import_custom_robot.py --help` to see a detailed example configuration used, which is also shown below (`r1_pro_source_config.yaml`) for your convenience. -2. All output files are written to `/objects/robot/`. Please copy / move this directory to `/objects/` so it can be imported into **OmniGibson**. -3. That's it! +2. All output files are written to `/objects/robot/`. Please move this directory to `/objects/` so it can be imported into **OmniGibson**. Some notes about the importing script: diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 29bde4677..24bd453f6 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -560,91 +560,6 @@ def _import_rendering_channels(obj_prim, obj_category, obj_model, model_root_pat # Lastly, we copy object_state texture maps that are state-conditioned; e.g.: cooked, soaked, etc. _copy_object_state_textures(obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root) - # ################################### - # - # # Iterate over all children of the object prim, if ///visual exists, then we - # # know is a valid link, and we check explicitly for these material files in our set - # # Note: we assume that the link name is included as a string within the mat_file! - # for prim in obj_prim.GetChildren(): - # if prim.GetPrimTypeInfo().GetTypeName() == "Xform": - # # This could be a link, check if it owns a visual subprim - # link_name = prim.GetName() - # visual_prim = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{prim.GetPrimPath().pathString}/visuals") - # log.debug(f"path: {prim.GetPrimPath().pathString}/visuals") - # log.debug(f"visual prim: {visual_prim}") - # - # if visual_prim: - # # Aggregate all material files for this prim - # link_mat_files = [] - # for mat_file in deepcopy(mat_files): - # if link_name in mat_file: - # # Add this mat file and remove it from the set - # link_mat_files.append(mat_file) - # mat_files.remove(mat_file) - # # Potentially write material files for this prim if we have any valid materials - # log.debug("link_mat_files:", link_mat_files) - # if not link_mat_files: - # # Bind default material to the visual prim - # shade = lazy.pxr.UsdShade.Material(default_mat) - # lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind(shade, lazy.pxr.UsdShade.Tokens.strongerThanDescendants) - # default_mat_is_used = True - # else: - # # Create new material for this link - # mtl_created_list = [] - # lazy.omni.kit.commands.execute( - # "CreateAndBindMdlMaterialFromLibrary", - # mdl_name="OmniPBR.mdl", - # mtl_name="OmniPBR", - # mtl_created_list=mtl_created_list, - # ) - # log.debug(f"Created material for link {link_name}:", mtl_created_list[0]) - # mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(mtl_created_list[0]) - # - # shade = lazy.pxr.UsdShade.Material(mat) - # # Bind the created link material to the visual prim - # lazy.pxr.UsdShade.MaterialBindingAPI(visual_prim).Bind(shade, lazy.pxr.UsdShade.Tokens.strongerThanDescendants) - # - # # Iterate over all material channels and write them to the material - # for link_mat_file in link_mat_files: - # # Copy this file into the materials folder - # mat_fpath = os.path.join(usd_dir, "materials") - # shutil.copy(os.path.join(mat_dir, link_mat_file), mat_fpath) - # # Check if any valid rendering channel - # mat_type = link_mat_file.split("_")[-1].split(".")[0].lower() - # # Apply the material if it exists - # render_channel_fcn = rendering_channel_mappings.get(mat_type, None) - # if render_channel_fcn is not None: - # render_channel_fcn(mat, os.path.join("materials", link_mat_file)) - # else: - # # Warn user that we didn't find the correct rendering channel - # log.warning(f"Warning: could not find rendering channel function for material: {mat_type}, skipping") - # - # # Rename material - # mat = rename_prim(prim=mat, name=f"material_{link_name}") - # - # # For any remaining materials, we write them to the default material - # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_material_0") - # # default_mat = lazy.omni.isaac.core.utils.prims.get_prim_at_path(f"{obj_prim.GetPrimPath().pathString}/Looks/material_default") - # log.debug(f"default mat: {default_mat}, obj: {obj_category}, {prim.GetPrimPath().pathString}") - # for mat_file in mat_files: - # # Copy this file into the materials folder - # mat_fpath = os.path.join(usd_dir, "materials") - # shutil.copy(os.path.join(mat_dir, mat_file), mat_fpath) - # # Check if any valid rendering channel - # mat_type = mat_file.split("_")[-1].split(".")[0].lower() - # # Apply the material if it exists - # render_channel_fcn = rendering_channel_mappings.get(mat_type, None) - # if render_channel_fcn is not None: - # render_channel_fcn(default_mat, os.path.join("materials", mat_file)) - # default_mat_is_used = True - # else: - # # Warn user that we didn't find the correct rendering channel - # log.warning(f"Could not find rendering channel function for material: {mat_type}") - # - # # Possibly delete the default material prim if it was never used - # if not default_mat_is_used: - # stage.RemovePrim(default_mat.GetPrimPath()) - def _add_xform_properties(prim): """ diff --git a/setup.py b/setup.py index feaeae568..67c45d20c 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ "rtree>=1.2.0", "graphviz>=0.20", "matplotlib>=3.0.0", - "lxml", + "lxml>=5.3.0", ], extras_require={ "dev": [ From f6b8a14619ee3f6396b6a50d82e3107908a3be3e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 00:48:52 +0000 Subject: [PATCH 51/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/object_states/robot_related_states.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/omnigibson/object_states/robot_related_states.py b/omnigibson/object_states/robot_related_states.py index 1e38265ae..6567951d8 100644 --- a/omnigibson/object_states/robot_related_states.py +++ b/omnigibson/object_states/robot_related_states.py @@ -64,6 +64,12 @@ def _get_value(self): for sensor in self.robot.sensors.values(): if isinstance(sensor, VisionSensor): _, info = sensor.get_obs() - objs.update(set(self.obj.scene.object_registry("name", name) for name in info["seg_instance"].values() if name not in names_to_exclude)) + objs.update( + set( + self.obj.scene.object_registry("name", name) + for name in info["seg_instance"].values() + if name not in names_to_exclude + ) + ) # Return all objects, minus any that were mapped to None because they were not found in our object registry return objs - {None} From b4b7c2139e67652caea6eba4a7b8cdffb510760d Mon Sep 17 00:00:00 2001 From: Josiah Wong <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 17:21:31 -0800 Subject: [PATCH 52/61] update docs --- docs/tutorials/custom_robot_import.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index 847ce7e6b..bfa3a4694 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -37,7 +37,7 @@ Some notes about the importing script: 10. Generates configuration files needed for CuRobo motion planning (as specified by the source config) - If `merge_fixed_joints=true`, all robot links that are connected to a parent link via a fixed joint will be merged directly into the parent joint. This means that the USD will _not_ contain these links! However, this collapsing occurs during the final URDF to USD conversion, meaning that these links _can_ be referenced beforehand (e.g.: when specifying desired per-link collision decomposition behavior) -- [CuRobo](https://curobo.org/index.html) is a highly performant motion planner that is used in **OmniGibson** for specific use-cases, such as skills. However, CuRobo requires a manually-specified sphere representation of the robot to be defined. These values can be generated using [IsaacSim's interactive GUI](https://curobo.org/tutorials/1_robot_configuration.html#robot-collision-representation), and should be exported and copied into the robot source config yaml file used for importing into **OmniGibson**. You can see the set of values used for the R1 robot below. For more information regarding specific keys specified, please see CuRobo's [Configuring a New Robot](https://curobo.org/tutorials/1_robot_configuration.html) tutorial! +- [CuRobo](https://curobo.org/index.html) is a highly performant motion planner that is used in **OmniGibson** for specific use-cases, such as skills. However, CuRobo requires a manually-specified sphere representation of the robot to be defined. These values can be generated using [IsaacSim's interactive GUI](https://curobo.org/tutorials/1_robot_configuration.html#robot-collision-representation), and should be exported and copied into the robot source config yaml file used for importing into **OmniGibson**. You can see the set of values used for the R1 robot below. For more information regarding specific keys specified, please see CuRobo's [Configuring a New Robot](https://curobo.org/tutorials/1_robot_configuration.html) tutorial. ??? code "r1_pro_source_config.yaml" ``` yaml linenums="1" From 93926067bc5ca29ab7d68f0c2b13a724bcacfcc8 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 11 Dec 2024 19:38:01 -0800 Subject: [PATCH 53/61] fix freight and locobot offsets --- .../starter_semantic_action_primitives.py | 8 ++------ omnigibson/robots/freight.py | 2 +- omnigibson/robots/locobot.py | 2 +- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 1b1aec0d0..d79c074c2 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -59,7 +59,7 @@ Stretch: 0.5, Turtlebot: 0.3, Husky: 0.05, - Freight: 0.05, + Freight: 0.2, Locobot: 1.5, BehaviorRobot: 0.3, R1: 0.3, @@ -70,7 +70,7 @@ Stretch: 0.7, Turtlebot: 0.2, Husky: 0.05, - Freight: 0.05, + Freight: 0.1, Locobot: 1.5, BehaviorRobot: 0.2, R1: 0.2, @@ -1763,10 +1763,6 @@ def _rotate_in_place(self, end_pose, angle_threshold=m.DEFAULT_ANGLE_THRESHOLD): direction = -1.0 if diff_yaw < 0.0 else 1.0 ang_vel = m.KP_ANGLE_VEL[type(self.robot)] * direction - if isinstance(self.robot, Locobot) or isinstance(self.robot, Freight): - # Locobot and Freight wheel joints are reversed - ang_vel = -ang_vel - base_action = action[self.robot.controller_action_idx["base"]] if not self._base_controller_is_joint: diff --git a/omnigibson/robots/freight.py b/omnigibson/robots/freight.py index 1f7608791..c88fbc814 100644 --- a/omnigibson/robots/freight.py +++ b/omnigibson/robots/freight.py @@ -26,7 +26,7 @@ def wheel_axle_length(self): @property def base_joint_names(self): - return ["r_wheel_joint", "l_wheel_joint"] + return ["l_wheel_joint", "r_wheel_joint"] @property def _default_joint_pos(self): diff --git a/omnigibson/robots/locobot.py b/omnigibson/robots/locobot.py index a80ee6421..d27655f33 100644 --- a/omnigibson/robots/locobot.py +++ b/omnigibson/robots/locobot.py @@ -22,7 +22,7 @@ def wheel_axle_length(self): @property def base_joint_names(self): - return ["wheel_right_joint", "wheel_left_joint"] + return ["wheel_left_joint", "wheel_right_joint"] @property def _default_joint_pos(self): From b648a2f13aa4c7323d70f21317d12eecb7dac7eb Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 09:59:17 -0800 Subject: [PATCH 54/61] update curobo dep version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 67c45d20c..d27e3757a 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ "telemoma~=0.1.2", ], "primitives": [ - "nvidia-curobo @ git+https://github.com/StanfordVL/curobo@6a4eb2ca8677829b0f57451ad107e0a3186525e9", + "nvidia-curobo @ git+https://github.com/StanfordVL/curobo@dc52be668a8d0b426b8639de3c8d6443e58cc348", "ompl @ https://storage.googleapis.com/gibson_scenes/ompl-1.6.0-cp310-cp310-manylinux_2_28_x86_64.whl", ], }, From e515c8a824d74f5a6a3fad64cde798588e2c5c71 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 10:29:36 -0800 Subject: [PATCH 55/61] fix curobo naming convention --- omnigibson/robots/manipulation_robot.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 36593ec08..be725b24f 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -978,11 +978,11 @@ def _freeze_gripper(self, arm="default"): def curobo_path(self): """ Returns: - str or Dict[CuroboEmbodimentSelection, str]: file path to the robot curobo file or a mapping from - CuroboEmbodimentSelection to the file path + str or Dict[CuRoboEmbodimentSelection, str]: file path to the robot curobo file or a mapping from + CuRoboEmbodimentSelection to the file path """ # Import here to avoid circular imports - from omnigibson.action_primitives.curobo import CuroboEmbodimentSelection + from omnigibson.action_primitives.curobo import CuRoboEmbodimentSelection # By default, sets the standardized path model = self.model_name.lower() @@ -990,7 +990,7 @@ def curobo_path(self): emb_sel: os.path.join( gm.ASSET_PATH, f"models/{model}/curobo/{model}_description_curobo_{emb_sel.value}.yaml" ) - for emb_sel in CuroboEmbodimentSelection + for emb_sel in CuRoboEmbodimentSelection } @property From 73d757b8b8e70ce0d5f5d3420bfe77997c460065 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 10:30:21 -0800 Subject: [PATCH 56/61] modify scene graph for updated robots --- omnigibson/scene_graphs/graph_builder.py | 2 +- tests/test_scene_graph.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/omnigibson/scene_graphs/graph_builder.py b/omnigibson/scene_graphs/graph_builder.py index 5c72af19f..61c51a732 100644 --- a/omnigibson/scene_graphs/graph_builder.py +++ b/omnigibson/scene_graphs/graph_builder.py @@ -283,7 +283,7 @@ def _draw_graph(): # Prepare pyplot figure that's sized to match the robot video. robot = all_robots[0] # If there are multiple robots, we only include the first one (robot_camera_sensor,) = [ - s for s in robot.sensors.values() if isinstance(s, VisionSensor) and "rgb" in s.modalities + s for s in robot.sensors.values() if isinstance(s, VisionSensor) and "eyes" in s.name and "rgb" in s.modalities ] robot_view = (robot_camera_sensor.get_obs()[0]["rgb"][..., :3]).to(th.uint8) imgheight, imgwidth, _ = robot_view.shape diff --git a/tests/test_scene_graph.py b/tests/test_scene_graph.py index 50227a613..a6cf6d458 100644 --- a/tests/test_scene_graph.py +++ b/tests/test_scene_graph.py @@ -23,7 +23,7 @@ def create_robot_config(name, position): return { "name": name, "type": "Fetch", - "obs_modalities": "all", + "obs_modalities": ["rgb", "seg_instance"], "position": position, "orientation": T.euler2quat(th.tensor([0, 0, -math.pi / 2], dtype=th.float32)), "controller_config": { From ac165ef6068317c2e22e9783c69717654c493a49 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 10:30:30 -0800 Subject: [PATCH 57/61] ensure pillow compatibility with isaac pip prebundle --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index d27e3757a..381624d45 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ "graphviz>=0.20", "matplotlib>=3.0.0", "lxml>=5.3.0", + "pillow~=10.2.0", ], extras_require={ "dev": [ From 1eb8e0e61755124e644e6837f0619d2897fc5287 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 11:00:09 -0800 Subject: [PATCH 58/61] add low pass filter for multi finger gripper to reduce velocity noise during grasp state inference --- .../multi_finger_gripper_controller.py | 50 ++++++++++++++++++- omnigibson/utils/processing_utils.py | 17 +++++++ 2 files changed, 65 insertions(+), 2 deletions(-) diff --git a/omnigibson/controllers/multi_finger_gripper_controller.py b/omnigibson/controllers/multi_finger_gripper_controller.py index 35e83b48a..70dbc9681 100644 --- a/omnigibson/controllers/multi_finger_gripper_controller.py +++ b/omnigibson/controllers/multi_finger_gripper_controller.py @@ -1,8 +1,8 @@ import torch as th -import omnigibson.utils.transform_utils as T from omnigibson.controllers import ControlType, GripperController, IsGraspingState from omnigibson.macros import create_module_macros +from omnigibson.utils.processing_utils import MovingAverageFilter from omnigibson.utils.python_utils import assert_valid_key VALID_MODES = { @@ -101,6 +101,9 @@ def __init__( # Create other args to be filled in at runtime self._is_grasping = IsGraspingState.FALSE + # Create ring buffer for velocity history to avoid high frequency nosie during grasp state inference + self._vel_filter = MovingAverageFilter(obs_dim=len(dof_idx), filter_width=5) + # If we're using binary signal, we override the command output limits if mode == "binary": command_output_limits = (-1.0, 1.0) @@ -118,9 +121,17 @@ def reset(self): # Call super first super().reset() + # Reset the filter + self._vel_filter.reset() + # reset grasping state self._is_grasping = IsGraspingState.FALSE + @property + def state_size(self): + # Add state size from the control filter + return super().state_size + self._vel_filter.state_size + def _preprocess_command(self, command): # We extend this method to make sure command is always n-dimensional if self._mode != "independent": @@ -208,6 +219,9 @@ def _update_grasping_state(self, control_dict): joint_position: Array of current joint positions joint_velocity: Array of current joint velocities """ + # Update velocity history + finger_vel = self._vel_filter.estimate(control_dict["joint_velocity"][self.dof_idx]) + # Calculate grasping state based on mode of this controller # Independent mode of MultiFingerGripperController does not have any good heuristics to determine is_grasping @@ -240,7 +254,6 @@ def _update_grasping_state(self, control_dict): # Otherwise, the last control signal intends to "move" the gripper else: - finger_vel = control_dict["joint_velocity"][self.dof_idx] min_pos = self._control_limits[ControlType.POSITION][0][self.dof_idx] max_pos = self._control_limits[ControlType.POSITION][1][self.dof_idx] @@ -297,6 +310,39 @@ def is_grasping(self): # Return cached value return self._is_grasping + def _dump_state(self): + # Run super first + state = super()._dump_state() + + # Add filter state + state["vel_filter"] = self._vel_filter.dump_state(serialized=False) + + return state + + def _load_state(self, state): + # Run super first + super()._load_state(state=state) + + # Also load velocity filter state + self._vel_filter.load_state(state["vel_filter"], serialized=False) + + def serialize(self, state): + # Run super first + state_flat = super().serialize(state=state) + + # Serialize state for this controller + return th.cat([state_flat, self._vel_filter.serialize(state=state["vel_filter"])]) + + def deserialize(self, state): + # Run super first + state_dict, idx = super().deserialize(state=state) + + # Deserialize state for the velocity filter + state_dict["vel_filter"], deserialized_items = self._vel_filter.deserialize(state=state[idx:]) + idx += deserialized_items + + return state_dict, idx + @property def control_type(self): return ControlType.get_type(type_str=self._motor_type) diff --git a/omnigibson/utils/processing_utils.py b/omnigibson/utils/processing_utils.py index bb32f35f5..0f2527cd8 100644 --- a/omnigibson/utils/processing_utils.py +++ b/omnigibson/utils/processing_utils.py @@ -42,6 +42,13 @@ def deserialize(self, state): # Default is no state, so do nothing return dict(), 0 + @property + def state_size(self): + """ + Size of the serialized state of this filter + """ + raise NotImplementedError + class MovingAverageFilter(Filter): """ @@ -98,6 +105,11 @@ def reset(self): self.current_idx = 0 self.fully_filled = False + @property + def state_size(self): + # This is the size of the internal buffer plus the current index and fully filled single values + return th.prod(self.past_samples.shape) + 2 + def _dump_state(self): # Run super init first state = super()._dump_state() @@ -185,6 +197,11 @@ def reset(self): self.avg *= 0.0 self.num_samples = 0 + @property + def state_size(self): + # This is the size of the internal value as well as a num samples + return len(self.avg) + 1 + def _dump_state(self): # Run super init first state = super()._dump_state() From 813b4f7d7c38ec150bad73f3eed1d00e2b2651dd Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 14:26:26 -0800 Subject: [PATCH 59/61] align prim getter with inputted stage --- omnigibson/examples/robots/import_custom_robot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 6a33887bd..48c765ef2 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -351,7 +351,7 @@ def create_rigid_prim(stage, link_prim_path): Usd.Prim: Newly created rigid prim """ # Make sure link prim does NOT already exist (this should be a new link) - link_prim_exists = lazy.omni.isaac.core.utils.prims.is_prim_path_valid(link_prim_path) + link_prim_exists = stage.GetPrimAtPath(link_prim_path).IsValid() assert ( not link_prim_exists ), f"Cannot create new link because there already exists a link at prim path {link_prim_path}!" From fea8975c6ac6f13fedbf271fa6531fe064d57cf1 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 12 Dec 2024 14:30:36 -0800 Subject: [PATCH 60/61] rename EXTERNAL_DATASET -> CUSTOM_DATASET --- docs/tutorials/custom_robot_import.md | 4 ++-- omnigibson/examples/objects/import_custom_object.py | 2 +- omnigibson/examples/robots/import_custom_robot.py | 4 ++-- omnigibson/macros.py | 4 ++-- omnigibson/objects/dataset_object.py | 8 ++++---- omnigibson/utils/asset_conversion_utils.py | 8 ++++---- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index bfa3a4694..c05c25aac 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -19,13 +19,13 @@ There are two ways to convert our raw robot URDF into an OmniGibson-compatible U Our custom robot importer [`import_custom_robot.py`](https://github.com/StanfordVL/OmniGibson/tree/main/omnigibson/examples/robots/import_custom_robot.py) wraps the native URDF Importer from Isaac Sim to convert our robot URDF model into USD format. Please see the following steps for running this script: 1. All that is required is a single source config yaml file that dictates how the URDF should be post-processed when being converted into a USD. You can run `import_custom_robot.py --help` to see a detailed example configuration used, which is also shown below (`r1_pro_source_config.yaml`) for your convenience. -2. All output files are written to `/objects/robot/`. Please move this directory to `/objects/` so it can be imported into **OmniGibson**. +2. All output files are written to `/objects/robot/`. Please move this directory to `/objects/` so it can be imported into **OmniGibson**. Some notes about the importing script: - The importing procedure can be summarized as follows: - 1. Copy the raw URDF file + any dependencies into the `gm.EXTERNAL_DATASET_PATH` directory + 1. Copy the raw URDF file + any dependencies into the `gm.CUSTOM_DATASET_PATH` directory 2. Updates the URDF + meshes to ensure all scaling is positive 3. Generates collision meshes for each robot link (as specified by the source config) 4. Generates metadata necessary for **OmniGibson** diff --git a/omnigibson/examples/objects/import_custom_object.py b/omnigibson/examples/objects/import_custom_object.py index 0624d6c05..688b95d29 100644 --- a/omnigibson/examples/objects/import_custom_object.py +++ b/omnigibson/examples/objects/import_custom_object.py @@ -62,7 +62,7 @@ def import_custom_object( ): """ Imports an externally-defined object asset into an OmniGibson-compatible USD format and saves the imported asset - files to the external dataset directory (gm.EXTERNAL_DATASET_PATH) + files to the external dataset directory (gm.CUSTOM_DATASET_PATH) """ assert len(model) == 6 and model.isalpha(), "Model name must be 6 characters long and contain only letters." collision_method = None if collision_method == "none" else collision_method diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 48c765ef2..462812e7a 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -29,8 +29,8 @@ _DOCSTRING = """ -Imports an externally-defined robot URDF asset into an OmniGibson-compatible USD format and saves the imported asset -files to the external dataset directory (gm.EXTERNAL_DATASET_PATH) +Imports an custom-defined robot URDF asset into an OmniGibson-compatible USD format and saves the imported asset +files to the custom dataset directory (gm.CUSTOM_DATASET_PATH) Note that @config is expected to follow the following format (R1 config shown as an example): diff --git a/omnigibson/macros.py b/omnigibson/macros.py index de90df920..d76bb4532 100644 --- a/omnigibson/macros.py +++ b/omnigibson/macros.py @@ -60,8 +60,8 @@ def determine_gm_path(default_path, env_var_name): # can override assets_path and dataset_path from environment variable gm.ASSET_PATH = determine_gm_path(os.path.join("data", "assets"), "OMNIGIBSON_ASSET_PATH") gm.DATASET_PATH = determine_gm_path(os.path.join("data", "og_dataset"), "OMNIGIBSON_DATASET_PATH") -gm.EXTERNAL_DATASET_PATH = determine_gm_path( - os.path.join("data", "external_dataset"), "OMNIGIBSON_EXTERNAL_DATASET_PATH" +gm.CUSTOM_DATASET_PATH = determine_gm_path( + os.path.join("data", "custom_dataset"), "OMNIGIBSON_CUSTOM_DATASET_PATH" ) gm.KEY_PATH = determine_gm_path(os.path.join("data", "omnigibson.key"), "OMNIGIBSON_KEY_PATH") diff --git a/omnigibson/objects/dataset_object.py b/omnigibson/objects/dataset_object.py index a2058af2b..722ab2a06 100644 --- a/omnigibson/objects/dataset_object.py +++ b/omnigibson/objects/dataset_object.py @@ -27,7 +27,7 @@ class DatasetType(IntEnum): BEHAVIOR = 0 - EXTERNAL = 1 + CUSTOM = 1 class DatasetObject(USDObject): @@ -70,8 +70,8 @@ def __init__( Otherwise, will randomly sample a model given @category dataset_type (DatasetType): Dataset to search for this object. Default is BEHAVIOR, corresponding to the - proprietary (encrypted) BEHAVIOR-1K dataset (gm.DATASET_PATH). Possible values are {BEHAVIOR, EXTERNAL}. - If EXTERNAL, assumes asset is found at gm.EXTERNAL_DATASET_PATH and additionally not encrypted. + proprietary (encrypted) BEHAVIOR-1K dataset (gm.DATASET_PATH). Possible values are {BEHAVIOR, CUSTOM}. + If CUSTOM, assumes asset is found at gm.CUSTOM_DATASET_PATH and additionally not encrypted. scale (None or float or 3-array): if specified, sets either the uniform (float) or x,y,z (3-array) scale for this object. A single number corresponds to uniform scaling along the x,y,z axes, whereas a 3-array specifies per-axis scaling. @@ -167,7 +167,7 @@ def get_usd_path(cls, category, model, dataset_type=DatasetType.BEHAVIOR): Returns: str: Absolute filepath to the corresponding USD asset file """ - dataset_path = gm.DATASET_PATH if dataset_type == DatasetType.BEHAVIOR else gm.EXTERNAL_DATASET_PATH + dataset_path = gm.DATASET_PATH if dataset_type == DatasetType.BEHAVIOR else gm.CUSTOM_DATASET_PATH return os.path.join(dataset_path, "objects", category, model, "usd", f"{model}.usd") def sample_orientation(self): diff --git a/omnigibson/utils/asset_conversion_utils.py b/omnigibson/utils/asset_conversion_utils.py index 24bd453f6..85a5bc2ef 100644 --- a/omnigibson/utils/asset_conversion_utils.py +++ b/omnigibson/utils/asset_conversion_utils.py @@ -1128,7 +1128,7 @@ def import_obj_urdf( urdf_path, obj_category, obj_model, - dataset_root=gm.EXTERNAL_DATASET_PATH, + dataset_root=gm.CUSTOM_DATASET_PATH, use_omni_convex_decomp=False, use_usda=False, merge_fixed_joints=False, @@ -2018,7 +2018,7 @@ def get_collision_approximation_for_urdf( def copy_urdf_to_dataset( - urdf_path, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, suffix="original", overwrite=False + urdf_path, category, mdl, dataset_root=gm.CUSTOM_DATASET_PATH, suffix="original", overwrite=False ): # Create a directory for the object obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl / "urdf" @@ -2052,7 +2052,7 @@ def copy_urdf_to_dataset( def generate_urdf_for_obj( - visual_mesh, collision_meshes, category, mdl, dataset_root=gm.EXTERNAL_DATASET_PATH, overwrite=False + visual_mesh, collision_meshes, category, mdl, dataset_root=gm.CUSTOM_DATASET_PATH, overwrite=False ): # Create a directory for the object obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl @@ -2286,7 +2286,7 @@ def import_og_asset_from_urdf( no_decompose_links=None, visual_only_links=None, merge_fixed_joints=False, - dataset_root=gm.EXTERNAL_DATASET_PATH, + dataset_root=gm.CUSTOM_DATASET_PATH, hull_count=32, overwrite=False, use_usda=False, From 002a536f666aa2e90d8501f6526b346f83403ed1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:32:59 +0000 Subject: [PATCH 61/61] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/macros.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/omnigibson/macros.py b/omnigibson/macros.py index d76bb4532..56073daff 100644 --- a/omnigibson/macros.py +++ b/omnigibson/macros.py @@ -60,9 +60,7 @@ def determine_gm_path(default_path, env_var_name): # can override assets_path and dataset_path from environment variable gm.ASSET_PATH = determine_gm_path(os.path.join("data", "assets"), "OMNIGIBSON_ASSET_PATH") gm.DATASET_PATH = determine_gm_path(os.path.join("data", "og_dataset"), "OMNIGIBSON_DATASET_PATH") -gm.CUSTOM_DATASET_PATH = determine_gm_path( - os.path.join("data", "custom_dataset"), "OMNIGIBSON_CUSTOM_DATASET_PATH" -) +gm.CUSTOM_DATASET_PATH = determine_gm_path(os.path.join("data", "custom_dataset"), "OMNIGIBSON_CUSTOM_DATASET_PATH") gm.KEY_PATH = determine_gm_path(os.path.join("data", "omnigibson.key"), "OMNIGIBSON_KEY_PATH") # Which GPU to use -- None will result in omni automatically using an appropriate GPU. Otherwise, set with either