From adf79994ee8842ccf3cd1ab4e37ef30bb11ea2b9 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Fri, 20 Dec 2024 14:22:11 -0800 Subject: [PATCH 01/33] add ability to specify sensors to include / exclude, remove enforcing of controller existence due to subsume feature --- omnigibson/robots/a1.py | 10 +++++ omnigibson/robots/active_camera_robot.py | 11 ----- omnigibson/robots/behavior_robot.py | 4 ++ omnigibson/robots/fetch.py | 10 +++++ omnigibson/robots/franka.py | 10 +++++ omnigibson/robots/holonomic_base_robot.py | 10 +++++ omnigibson/robots/locomotion_robot.py | 12 +++--- omnigibson/robots/manipulation_robot.py | 40 ++++++++++--------- .../robots/mobile_manipulation_robot.py | 10 +++++ omnigibson/robots/r1.py | 10 +++++ omnigibson/robots/robot_base.py | 20 +++++++++- omnigibson/robots/tiago.py | 10 +++++ omnigibson/robots/untucked_arm_pose_robot.py | 10 +++++ omnigibson/robots/vx300s.py | 10 +++++ 14 files changed, 139 insertions(+), 38 deletions(-) diff --git a/omnigibson/robots/a1.py b/omnigibson/robots/a1.py index 17b9549fe..3656723f5 100644 --- a/omnigibson/robots/a1.py +++ b/omnigibson/robots/a1.py @@ -33,6 +33,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -70,6 +72,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -128,6 +136,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/active_camera_robot.py b/omnigibson/robots/active_camera_robot.py index 10153bdc8..4f6c9defc 100644 --- a/omnigibson/robots/active_camera_robot.py +++ b/omnigibson/robots/active_camera_robot.py @@ -23,17 +23,6 @@ class ActiveCameraRobot(BaseRobot): """ - def _validate_configuration(self): - # Make sure a camera controller is specified - assert ( - "camera" in self._controllers - ), "Controller 'camera' must exist in controllers! Current controllers: {}".format( - list(self._controllers.keys()) - ) - - # run super - super()._validate_configuration() - def _get_proprioception_dict(self): dic = super()._get_proprioception_dict() diff --git a/omnigibson/robots/behavior_robot.py b/omnigibson/robots/behavior_robot.py index b1eebd3de..c512e68aa 100644 --- a/omnigibson/robots/behavior_robot.py +++ b/omnigibson/robots/behavior_robot.py @@ -80,6 +80,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities="rgb", + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", # Unique to ManipulationRobot grasping_mode="assisted", @@ -109,6 +111,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, grasping_mode=grasping_mode, grasping_direction="upper", diff --git a/omnigibson/robots/fetch.py b/omnigibson/robots/fetch.py index 47df3eea3..6d4ac0b1a 100644 --- a/omnigibson/robots/fetch.py +++ b/omnigibson/robots/fetch.py @@ -46,6 +46,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -88,6 +90,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -126,6 +134,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index 30619c71e..29d3bcf2b 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -34,6 +34,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -72,6 +74,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -192,6 +200,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/holonomic_base_robot.py b/omnigibson/robots/holonomic_base_robot.py index 89abac3ee..596c7030f 100644 --- a/omnigibson/robots/holonomic_base_robot.py +++ b/omnigibson/robots/holonomic_base_robot.py @@ -56,6 +56,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, **kwargs, @@ -91,6 +93,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -119,6 +127,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, **kwargs, diff --git a/omnigibson/robots/locomotion_robot.py b/omnigibson/robots/locomotion_robot.py index 5f5d06926..e615908cc 100644 --- a/omnigibson/robots/locomotion_robot.py +++ b/omnigibson/robots/locomotion_robot.py @@ -26,13 +26,11 @@ class LocomotionRobot(BaseRobot): """ def _validate_configuration(self): - # We make sure that our base controller exists and is a locomotion controller - assert ( - "base" in self._controllers - ), "Controller 'base' must exist in controllers! Current controllers: {}".format(list(self._controllers.keys())) - assert isinstance( - self._controllers["base"], LocomotionController - ), "Base controller must be a LocomotionController!" + # If we have a base controller, make sure it is a locomotion controller + if "base" in self._controllers: + assert isinstance( + self._controllers["base"], LocomotionController + ), "Base controller must be a LocomotionController!" # run super super()._validate_configuration() diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 752447992..09b4f2bd5 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -85,6 +85,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -123,6 +125,12 @@ def __init__( Otherwise, valid options should be part of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -178,6 +186,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, **kwargs, @@ -186,25 +196,17 @@ def __init__( def _validate_configuration(self): # Iterate over all arms for arm in self.arm_names: - # We make sure that our arm controller exists and is a manipulation controller - assert ( - "arm_{}".format(arm) in self._controllers - ), "Controller 'arm_{}' must exist in controllers! Current controllers: {}".format( - arm, list(self._controllers.keys()) - ) - assert isinstance( - self._controllers["arm_{}".format(arm)], ManipulationController - ), "Arm {} controller must be a ManipulationController!".format(arm) - - # We make sure that our gripper controller exists and is a gripper controller - assert ( - "gripper_{}".format(arm) in self._controllers - ), "Controller 'gripper_{}' must exist in controllers! Current controllers: {}".format( - arm, list(self._controllers.keys()) - ) - assert isinstance( - self._controllers["gripper_{}".format(arm)], GripperController - ), "Gripper {} controller must be a GripperController!".format(arm) + # If we have an arm controller, make sure it is a manipulation controller + if f"arm_{arm}" in self._controllers: + assert isinstance( + self._controllers["arm_{}".format(arm)], ManipulationController + ), "Arm {} controller must be a ManipulationController!".format(arm) + + # If we have a gripper controller, make sure it is a manipulation controller + if f"gripper_{arm}" in self._controllers: + assert isinstance( + self._controllers["gripper_{}".format(arm)], GripperController + ), "Gripper {} controller must be a GripperController!".format(arm) # run super super()._validate_configuration() diff --git a/omnigibson/robots/mobile_manipulation_robot.py b/omnigibson/robots/mobile_manipulation_robot.py index 957abf26d..4474c0966 100644 --- a/omnigibson/robots/mobile_manipulation_robot.py +++ b/omnigibson/robots/mobile_manipulation_robot.py @@ -46,6 +46,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -85,6 +87,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -122,6 +130,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/r1.py b/omnigibson/robots/r1.py index 3321192c4..939623328 100644 --- a/omnigibson/robots/r1.py +++ b/omnigibson/robots/r1.py @@ -41,6 +41,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -80,6 +82,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -114,6 +122,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 969a8d0b1..1939c23c5 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -64,6 +64,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, **kwargs, @@ -99,6 +101,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -122,6 +130,8 @@ def __init__( abilities = robot_abilities if abilities is None else robot_abilities.update(abilities) # Initialize internal attributes that will be loaded later + self._include_sensor_names = None if include_sensor_names is None else set(include_sensor_names) + self._exclude_sensor_names = None if exclude_sensor_names is None else set(exclude_sensor_names) self._sensors = None # e.g.: scan sensor, vision sensor # All BaseRobots should have xform properties pre-loaded @@ -208,11 +218,19 @@ def _load_sensors(self): # If the modalities list is empty, don't import the sensor. if not sensor_kwargs["modalities"]: continue + + # Possibly filter out the sensor based on name + prim_path = str(prim.GetPrimPath()) + not_blacklisted = self._exclude_sensor_names is None or not any(name in prim_path for name in self._exclude_sensor_names) + whitelisted = self._include_sensor_names is None or any(name in prim_path for name in self._include_sensor_names) + if not (not_blacklisted and whitelisted): + continue + obs_modalities = obs_modalities.union(sensor_kwargs["modalities"]) # Create the sensor and store it internally sensor = create_sensor( sensor_type=prim_type, - relative_prim_path=absolute_prim_path_to_scene_relative(self.scene, str(prim.GetPrimPath())), + relative_prim_path=absolute_prim_path_to_scene_relative(self.scene, prim_path), name=f"{self.name}:{link_name}:{prim_type}:{sensor_counts[prim_type]}", **sensor_kwargs, ) diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 2cfa8876a..980fa3066 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -45,6 +45,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -88,6 +90,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -131,6 +139,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/untucked_arm_pose_robot.py b/omnigibson/robots/untucked_arm_pose_robot.py index 8acbb3d7c..e3cf701a4 100644 --- a/omnigibson/robots/untucked_arm_pose_robot.py +++ b/omnigibson/robots/untucked_arm_pose_robot.py @@ -41,6 +41,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -82,6 +84,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -119,6 +127,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/vx300s.py b/omnigibson/robots/vx300s.py index 624a4206c..922b3884d 100644 --- a/omnigibson/robots/vx300s.py +++ b/omnigibson/robots/vx300s.py @@ -36,6 +36,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -72,6 +74,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -102,6 +110,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, From 08d811d549e0f57821c8e933a9134ae9a7ddbe66 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Fri, 20 Dec 2024 14:22:21 -0800 Subject: [PATCH 02/33] update configs --- omnigibson/configs/fetch_behavior.yaml | 2 ++ omnigibson/configs/fetch_primitives.yaml | 2 ++ omnigibson/configs/robots/fetch.yaml | 2 ++ omnigibson/configs/robots/freight.yaml | 2 ++ omnigibson/configs/robots/husky.yaml | 2 ++ omnigibson/configs/robots/locobot.yaml | 2 ++ omnigibson/configs/robots/turtlebot.yaml | 2 ++ omnigibson/configs/tiago_primitives.yaml | 2 ++ omnigibson/configs/turtlebot_nav.yaml | 2 ++ 9 files changed, 18 insertions(+) diff --git a/omnigibson/configs/fetch_behavior.yaml b/omnigibson/configs/fetch_behavior.yaml index cfde1f7d0..ce0e831f1 100644 --- a/omnigibson/configs/fetch_behavior.yaml +++ b/omnigibson/configs/fetch_behavior.yaml @@ -40,6 +40,8 @@ scene: robots: - type: Fetch obs_modalities: [scan, rgb, depth] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collision: false action_normalize: true diff --git a/omnigibson/configs/fetch_primitives.yaml b/omnigibson/configs/fetch_primitives.yaml index 09978bd0d..34fba5407 100644 --- a/omnigibson/configs/fetch_primitives.yaml +++ b/omnigibson/configs/fetch_primitives.yaml @@ -33,6 +33,8 @@ scene: robots: - type: Fetch obs_modalities: [rgb] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collisions: true action_normalize: false diff --git a/omnigibson/configs/robots/fetch.yaml b/omnigibson/configs/robots/fetch.yaml index bb1a53b1d..30af214a4 100644 --- a/omnigibson/configs/robots/fetch.yaml +++ b/omnigibson/configs/robots/fetch.yaml @@ -17,6 +17,8 @@ robot: self_collision: true grasping_mode: physical default_arm_pose: vertical + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/freight.yaml b/omnigibson/configs/robots/freight.yaml index 7431b464d..e4d5d6c7d 100644 --- a/omnigibson/configs/robots/freight.yaml +++ b/omnigibson/configs/robots/freight.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/husky.yaml b/omnigibson/configs/robots/husky.yaml index d35680b5b..0de969c48 100644 --- a/omnigibson/configs/robots/husky.yaml +++ b/omnigibson/configs/robots/husky.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/locobot.yaml b/omnigibson/configs/robots/locobot.yaml index c36e144f3..6a067926f 100644 --- a/omnigibson/configs/robots/locobot.yaml +++ b/omnigibson/configs/robots/locobot.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/turtlebot.yaml b/omnigibson/configs/robots/turtlebot.yaml index 0431a430f..ec3b3d2ab 100644 --- a/omnigibson/configs/robots/turtlebot.yaml +++ b/omnigibson/configs/robots/turtlebot.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/tiago_primitives.yaml b/omnigibson/configs/tiago_primitives.yaml index 10dec2e7c..f498728f6 100644 --- a/omnigibson/configs/tiago_primitives.yaml +++ b/omnigibson/configs/tiago_primitives.yaml @@ -33,6 +33,8 @@ scene: robots: - type: Tiago obs_modalities: [rgb, depth, seg_semantic, normal, seg_instance, seg_instance_id] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collisions: true action_normalize: false diff --git a/omnigibson/configs/turtlebot_nav.yaml b/omnigibson/configs/turtlebot_nav.yaml index 844208e90..957ce9c5f 100644 --- a/omnigibson/configs/turtlebot_nav.yaml +++ b/omnigibson/configs/turtlebot_nav.yaml @@ -30,6 +30,8 @@ scene: robots: - type: Turtlebot obs_modalities: [scan, rgb, depth] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collision: false action_normalize: true From 341a5a2c09250c7dda120500764e31a348e83136 Mon Sep 17 00:00:00 2001 From: Chengshu Li Date: Fri, 20 Dec 2024 13:02:57 -0800 Subject: [PATCH 03/33] add reverse_preprocess_command to all curobo joint pos execution --- .../starter_semantic_action_primitives.py | 9 +++++++++ omnigibson/examples/robots/curobo_example.py | 17 +++++++++-------- tests/test_curobo.py | 11 ++++++----- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 2f11fc5b8..10ec99a77 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -1065,6 +1065,15 @@ def _move_hand_ik(self, eef_pose, stop_if_stuck=False): (target_pos, target_quat), ignore_failure=True, in_world_frame=False, stop_if_stuck=stop_if_stuck ) + def _q_to_action(self, q): + action = [] + for controller in self.robot.controllers.values(): + command = q[controller.dof_idx] + action.append(controller._reverse_preprocess_command(command)) + action = th.cat(action, dim=0) + assert action.shape[0] == self.robot.action_dim + return action + def _add_linearly_interpolated_waypoints(self, plan, max_inter_dist): """ Adds waypoints to the plan so the distance between values in the plan never exceeds the max_inter_dist. diff --git a/omnigibson/examples/robots/curobo_example.py b/omnigibson/examples/robots/curobo_example.py index ae54ce866..570f3b25f 100644 --- a/omnigibson/examples/robots/curobo_example.py +++ b/omnigibson/examples/robots/curobo_example.py @@ -37,7 +37,7 @@ def execute_trajectory(q_traj, env, robot, attached_obj): for i, q in enumerate(q_traj): q = q.cpu() q = set_gripper_joint_positions(robot, q, attached_obj) - command = q_to_command(q, robot) + command = q_to_action(q, robot) num_repeat = 5 print(f"Executing waypoint {i}/{len(q_traj)}") @@ -79,21 +79,22 @@ def control_gripper(env, robot, attached_obj): # Control the gripper to open or close, while keeping the rest of the robot still q = robot.get_joint_positions() q = set_gripper_joint_positions(robot, q, attached_obj) - command = q_to_command(q, robot) + command = q_to_action(q, robot) num_repeat = 30 print(f"Gripper (attached_obj={attached_obj})") for _ in range(num_repeat): env.step(command) -def q_to_command(q, robot): +def q_to_action(q, robot): # Convert target joint positions to command - command = [] + action = [] for controller in robot.controllers.values(): - command.append(q[controller.dof_idx]) - command = th.cat(command, dim=0) - assert command.shape[0] == robot.action_dim - return command + command = q[controller.dof_idx] + action.append(controller._reverse_preprocess_command(command)) + action = th.cat(action, dim=0) + assert action.shape[0] == robot.action_dim + return action def test_curobo(): diff --git a/tests/test_curobo.py b/tests/test_curobo.py index caf230df4..d20befd33 100644 --- a/tests/test_curobo.py +++ b/tests/test_curobo.py @@ -430,16 +430,17 @@ def test_curobo(): else: # Convert target joint positions to command q = q.cpu() - command = [] + action = [] for controller in robot.controllers.values(): - command.append(q[controller.dof_idx]) - command = th.cat(command, dim=0) - assert command.shape[0] == robot.action_dim + command = q[controller.dof_idx] + action.append(controller._reverse_preprocess_command(command)) + action = th.cat(action, dim=0) + assert action.shape[0] == robot.action_dim num_repeat = 3 for j in range(num_repeat): print(f"Executing waypoint {i}/{len(q_traj)}, step {j}") - env.step(command) + env.step(action) for contact in robot.contact_list(): assert contact.body0 in robot.link_prim_paths From c1f5de665bf0e6a88c53603be4c467e832df5601 Mon Sep 17 00:00:00 2001 From: Chengshu Li Date: Fri, 20 Dec 2024 15:09:55 -0800 Subject: [PATCH 04/33] fix minor bug for is_grasping in manipulation_robot --- omnigibson/robots/manipulation_robot.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 09b4f2bd5..667c88373 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -256,8 +256,8 @@ def is_grasping(self, arm="default", candidate_obj=None): # If candidate obj is not None, we also check to see if our fingers are in contact with the object if is_grasping == IsGraspingState.TRUE and candidate_obj is not None: finger_links = {link for link in self.finger_links[arm]} - is_grasping = len(candidate_obj.states[ContactBodies].get_value().intersection(finger_links)) > 0 - + if len(candidate_obj.states[ContactBodies].get_value().intersection(finger_links)) == 0: + is_grasping = IsGraspingState.FALSE return is_grasping def _find_gripper_contacts(self, arm="default", return_contact_positions=False): From bc46733a591b49b23a3919d8783d0267de1afb00 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 02:14:40 +0000 Subject: [PATCH 05/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/robots/robot_base.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 0237b4b4c..4a938dc74 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -223,8 +223,12 @@ def _load_sensors(self): # Possibly filter out the sensor based on name prim_path = str(prim.GetPrimPath()) - not_blacklisted = self._exclude_sensor_names is None or not any(name in prim_path for name in self._exclude_sensor_names) - whitelisted = self._include_sensor_names is None or any(name in prim_path for name in self._include_sensor_names) + not_blacklisted = self._exclude_sensor_names is None or not any( + name in prim_path for name in self._exclude_sensor_names + ) + whitelisted = self._include_sensor_names is None or any( + name in prim_path for name in self._include_sensor_names + ) if not (not_blacklisted and whitelisted): continue From cfddd5b32b9d3850a1f60aa446e1c52701006ba9 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Fri, 17 Jan 2025 16:45:31 -0800 Subject: [PATCH 06/33] add updated compute function for joint controller --- omnigibson/controllers/joint_controller.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/omnigibson/controllers/joint_controller.py b/omnigibson/controllers/joint_controller.py index bf2a2545e..b5292675c 100644 --- a/omnigibson/controllers/joint_controller.py +++ b/omnigibson/controllers/joint_controller.py @@ -13,7 +13,7 @@ ) from omnigibson.macros import create_module_macros from omnigibson.utils.backend_utils import _compute_backend as cb -from omnigibson.utils.backend_utils import _ComputeBackend, _ComputeNumpyBackend, _ComputeTorchBackend +from omnigibson.utils.backend_utils import add_compute_function from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.ui_utils import create_module_logger @@ -223,7 +223,7 @@ def compute_control(self, goal_dict, control_dict): else: # effort u = target - u = cb.compute_joint_torques(u, control_dict["mass_matrix"], self.dof_idx) + u = cb.get_custom_method("compute_joint_torques")(u, control_dict["mass_matrix"], self.dof_idx) # Add gravity compensation if self._use_gravity_compensation: @@ -341,6 +341,4 @@ def _compute_joint_torques_numpy( # Set these as part of the backend values -setattr(_ComputeBackend, "compute_joint_torques", None) -setattr(_ComputeTorchBackend, "compute_joint_torques", _compute_joint_torques_torch) -setattr(_ComputeNumpyBackend, "compute_joint_torques", _compute_joint_torques_numpy) +add_compute_function(name="compute_joint_torques", np_function=_compute_joint_torques_numpy, th_function=_compute_joint_torques_torch) From 73d7f796cf65f5861041849ab4420f43c7614073 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Fri, 17 Jan 2025 16:46:13 -0800 Subject: [PATCH 07/33] fix data wrapper bugs --- omnigibson/envs/data_wrapper.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index 30326a463..dc44be8f1 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -37,8 +37,8 @@ def __init__(self, env, output_path, only_successes=True): """ # Make sure the wrapped environment inherits correct omnigibson format assert isinstance( - env, og.Environment - ), "Expected wrapped @env to be a subclass of OmniGibson's Environment class!" + env, (og.Environment, EnvironmentWrapper) + ), "Expected wrapped @env to be a subclass of OmniGibson's Environment class or EnvironmentWrapper!" # Only one scene is supported for now assert len(og.sim.scenes) == 1, "Only one scene is currently supported for DataWrapper env!" @@ -419,6 +419,8 @@ def create_from_hdf5( robot_obs_modalities, robot_sensor_config=None, external_sensors_config=None, + include_sensor_names=None, + exclude_sensor_names=None, n_render_iterations=5, only_successes=False, ): @@ -440,6 +442,12 @@ def create_from_hdf5( dictionary specifying an individual external sensor's relevant parameters. See the example external_sensors key in fetch_behavior.yaml env config. This can be used to specify additional sensors to collect observations during playback. + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors during playback + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors during playback n_render_iterations (int): Number of rendering iterations to use when loading each stored frame from the recorded data. This is needed because the omniverse real-time raytracing always lags behind the underlying physical state by a few frames, and additionally produces transient visual artifacts when @@ -472,6 +480,9 @@ def create_from_hdf5( # Set observation modalities and update sensor config for robot_cfg in config["robots"]: robot_cfg["obs_modalities"] = robot_obs_modalities + robot_cfg["include_sensor_names"] = include_sensor_names + robot_cfg["exclude_sensor_names"] = exclude_sensor_names + if robot_sensor_config is not None: robot_cfg["sensor_config"] = robot_sensor_config if external_sensors_config is not None: From 33f457485bf6998c046810d1af49297e8bb9a7bb Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Fri, 17 Jan 2025 16:46:56 -0800 Subject: [PATCH 08/33] fix deprecated API bug --- omnigibson/prims/xform_prim.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/omnigibson/prims/xform_prim.py b/omnigibson/prims/xform_prim.py index 27f71f8f0..6fea9561c 100644 --- a/omnigibson/prims/xform_prim.py +++ b/omnigibson/prims/xform_prim.py @@ -344,7 +344,7 @@ def get_local_pose(self): logger.warning( 'get_local_pose is deprecated and will be removed in a future release. Use get_position_orientation(frame="parent") instead' ) - return self.get_position_orientation(self.prim_path, frame="parent") + return self.get_position_orientation(frame="parent") def set_local_pose(self, position=None, orientation=None): """ @@ -359,7 +359,7 @@ def set_local_pose(self, position=None, orientation=None): logger.warning( 'set_local_pose is deprecated and will be removed in a future release. Use set_position_orientation(position=position, orientation=orientation, frame="parent") instead' ) - return self.set_position_orientation(self.prim_path, position, orientation, frame="parent") + return self.set_position_orientation(position, orientation, frame="parent") def get_world_scale(self): """ From 97903ac0d956b3e5003388381f631de69b0ff5cb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 18 Jan 2025 00:47:12 +0000 Subject: [PATCH 09/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/controllers/joint_controller.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/omnigibson/controllers/joint_controller.py b/omnigibson/controllers/joint_controller.py index b5292675c..3ec3871e4 100644 --- a/omnigibson/controllers/joint_controller.py +++ b/omnigibson/controllers/joint_controller.py @@ -341,4 +341,6 @@ def _compute_joint_torques_numpy( # Set these as part of the backend values -add_compute_function(name="compute_joint_torques", np_function=_compute_joint_torques_numpy, th_function=_compute_joint_torques_torch) +add_compute_function( + name="compute_joint_torques", np_function=_compute_joint_torques_numpy, th_function=_compute_joint_torques_torch +) From d49b5f99f3c029f80ebd348271454b433eef903f Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 21 Jan 2025 10:38:16 -0800 Subject: [PATCH 10/33] add ability to process obs in data wrapper env, fix bug where objects initially sleeping are not tracked --- omnigibson/envs/data_wrapper.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index dc44be8f1..9bf3cd3e5 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -330,6 +330,10 @@ def reset(self): # Call super first init_obs, init_info = super().reset() + # Make sure all objects are awake to begin to guarantee we save their initial states + for obj in self.scene.objects: + obj.wake() + # Store this initial state as part of the trajectory state = og.sim.dump_state(serialized=True) step_data = { @@ -523,10 +527,21 @@ def __init__(self, env, input_path, output_path, n_render_iterations=5, only_suc # Run super super().__init__(env=env, output_path=output_path, only_successes=only_successes) + def _process_obs(self, obs, info): + """ + Modifies @obs inplace for any relevant post-processing + + Args: + obs (dict): Keyword-mapped relevant observations from the immediate env step + info (dict): Keyword-mapped relevant information from the immediate env step + """ + # Default is a no-op + return obs + def _parse_step_data(self, action, obs, reward, terminated, truncated, info): # Store action, obs, reward, terminated, truncated, info step_data = dict() - step_data["obs"] = obs + step_data["obs"] = self._process_obs(obs=obs, info=info) step_data["action"] = action step_data["reward"] = reward step_data["terminated"] = terminated @@ -565,8 +580,8 @@ def playback_episode(self, episode_id, record=True): # If record, record initial observations if record: - init_obs, _, _, _, _ = self.env.step(action=action[0], n_render_iterations=self.n_render_iterations) - step_data = {"obs": init_obs} + init_obs, _, _, _, init_info = self.env.step(action=action[0], n_render_iterations=self.n_render_iterations) + step_data = {"obs": self._process_obs(obs=init_obs, info=init_info)} self.current_traj_history.append(step_data) for i, (a, s, ss, r, te, tr) in enumerate( From efe7808ce1adcfe3aa0c8778dd497fe2940ef652 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 22 Jan 2025 14:11:10 -0800 Subject: [PATCH 11/33] update data wrapper for recording videos --- docs/tutorials/demo_collection.md | 2 +- omnigibson/envs/data_wrapper.py | 66 +++++++++++++++++++++++++------ tests/test_data_collection.py | 2 +- 3 files changed, 56 insertions(+), 14 deletions(-) diff --git a/docs/tutorials/demo_collection.md b/docs/tutorials/demo_collection.md index bd52e5ba9..3fe5be681 100644 --- a/docs/tutorials/demo_collection.md +++ b/docs/tutorials/demo_collection.md @@ -101,7 +101,7 @@ playback_env = DataPlaybackWrapper.create_from_hdf5( ) # Playback the entire dataset and record observations -playback_env.playback_dataset(record=True) +playback_env.playback_dataset(record_data=True) # Save the recorded playback data playback_env.save_data() diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index 9bf3cd3e5..e7d9908ec 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -5,16 +5,17 @@ from pathlib import Path import h5py +import imageio import torch as th import omnigibson as og import omnigibson.lazy as lazy -from omnigibson.envs.env_wrapper import EnvironmentWrapper +from omnigibson.envs.env_wrapper import EnvironmentWrapper, create_wrapper from omnigibson.macros import gm from omnigibson.objects.object_base import BaseObject from omnigibson.sensors.vision_sensor import VisionSensor from omnigibson.utils.config_utils import TorchEncoder -from omnigibson.utils.python_utils import create_object_from_init_info, h5py_group_to_torch +from omnigibson.utils.python_utils import create_object_from_init_info, h5py_group_to_torch, assert_valid_key from omnigibson.utils.ui_utils import create_module_logger # Create module logger @@ -420,13 +421,14 @@ def create_from_hdf5( cls, input_path, output_path, - robot_obs_modalities, + robot_obs_modalities=tuple(), robot_sensor_config=None, external_sensors_config=None, include_sensor_names=None, exclude_sensor_names=None, n_render_iterations=5, only_successes=False, + include_env_wrapper=False, ): """ Create a DataPlaybackWrapper environment instance form the recorded demonstration info @@ -458,6 +460,7 @@ def create_from_hdf5( the physical state changes. Increasing this number will improve the rendered quality at the expense of speed. only_successes (bool): Whether to only save successful episodes + include_env_wrapper (bool): Whether to include environment wrapper stored in the underlying env config Returns: DataPlaybackWrapper: Generated playback environment @@ -483,7 +486,7 @@ def create_from_hdf5( # Set observation modalities and update sensor config for robot_cfg in config["robots"]: - robot_cfg["obs_modalities"] = robot_obs_modalities + robot_cfg["obs_modalities"] = list(robot_obs_modalities) robot_cfg["include_sensor_names"] = include_sensor_names robot_cfg["exclude_sensor_names"] = exclude_sensor_names @@ -491,10 +494,19 @@ def create_from_hdf5( robot_cfg["sensor_config"] = robot_sensor_config if external_sensors_config is not None: config["env"]["external_sensors"] = external_sensors_config + # config["env"]["external_sensors"] = merge_nested_dicts( + # base_dict=config["env"]["external_sensors"], + # extra_dict=external_sensors_config, + # inplace=True, + # ) # Load env env = og.Environment(configs=config) + # Optionally include the desired environment wrapper specified in the config + if include_env_wrapper: + env = create_wrapper(env=env) + # Wrap and return env return cls( env=env, @@ -548,14 +560,17 @@ def _parse_step_data(self, action, obs, reward, terminated, truncated, info): step_data["truncated"] = truncated return step_data - def playback_episode(self, episode_id, record=True): + def playback_episode(self, episode_id, record_data=True, video_writer=None, video_rgb_key=None): """ Playback episode @episode_id, and optionally record observation data if @record is True Args: episode_id (int): Episode to playback. This should be a valid demo ID number from the inputted collected data hdf5 file - record (bool): Whether to record data during playback or not + record_data (bool): Whether to record data during playback or not + video_writer (None or imageio.Writer): If specified, writer object that RGB frames will be written to + video_rgb_key (None or str): If specified, observation key representing the RGB frames to write to video. + If @video_writer is specified, this must also be specified! """ data_grp = self.input_hdf5["data"] assert f"demo_{episode_id}" in data_grp, f"No valid episode with ID {episode_id} found!" @@ -579,7 +594,7 @@ def playback_episode(self, episode_id, record=True): og.sim.load_state(state[0, : int(state_size[0])], serialized=True) # If record, record initial observations - if record: + if record_data: init_obs, _, _, _, init_info = self.env.step(action=action[0], n_render_iterations=self.n_render_iterations) step_data = {"obs": self._process_obs(obs=init_obs, info=init_info)} self.current_traj_history.append(step_data) @@ -611,7 +626,7 @@ def playback_episode(self, episode_id, record=True): self.current_obs, _, _, _, info = self.env.step(action=a, n_render_iterations=self.n_render_iterations) # If recording, record data - if record: + if record_data: step_data = self._parse_step_data( action=a, obs=self.current_obs, @@ -622,17 +637,44 @@ def playback_episode(self, episode_id, record=True): ) self.current_traj_history.append(step_data) + # If writing to video, write desired frame + if video_writer is not None: + assert_valid_key(video_rgb_key, self.current_obs.keys(), "video_rgb_key") + video_writer.append_data(self.current_obs[video_rgb_key][:, :, :3].numpy()) + self.step_count += 1 - if record: + if record_data: self.flush_current_traj() - def playback_dataset(self, record=True): + def playback_dataset(self, record_data=False, video_writer=None, video_rgb_key=None): """ Playback all episodes from the input HDF5 file, and optionally record observation data if @record is True Args: - record (bool): Whether to record data during playback or not + record_data (bool): Whether to record data during playback or not + video_writer (None or imageio.Writer): If specified, writer object that RGB frames will be written to + video_rgb_key (None or str): If specified, observation key representing the RGB frames to write to video. + If @video_writer is specified, this must also be specified! """ for episode_id in range(self.input_hdf5["data"].attrs["n_episodes"]): - self.playback_episode(episode_id=episode_id, record=record) + self.playback_episode( + episode_id=episode_id, + record_data=record_data, + video_writer=video_writer, + video_rgb_key=video_rgb_key, + ) + + def create_video_writer(self, fpath, fps=30): + """ + Creates a video writer to write video frames to when playing back the dataset + + Args: + fpath (str): Absolute path that the generated video writer will write to. Should end in .mp4 + fps (int): Desired frames per second when generating video + + Returns: + imageio.Writer: Generated video writer + """ + assert fpath.endswith(".mp4"), f"Video writer fpath must end with .mp4! Got: {fpath}" + return imageio.get_writer(fpath, fps=fps) diff --git a/tests/test_data_collection.py b/tests/test_data_collection.py index 30625b3b1..fac7532c2 100644 --- a/tests/test_data_collection.py +++ b/tests/test_data_collection.py @@ -136,5 +136,5 @@ def test_data_collect_and_playback(): n_render_iterations=1, only_successes=False, ) - env.playback_dataset(record=True) + env.playback_dataset(record_data=True) env.save_data() From f7aa37a873804fd7617faa971cacedd127ea2dbe Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 23 Jan 2025 12:56:34 -0800 Subject: [PATCH 12/33] add ability to wrap additional envs for data wrapper; standardize wrapper step() kwargs --- omnigibson/envs/data_wrapper.py | 12 ++++++++++-- omnigibson/envs/env_wrapper.py | 16 +++++++++++----- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index e7d9908ec..8d417e28e 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -64,12 +64,13 @@ def __init__(self, env, output_path, only_successes=True): # Run super super().__init__(env=env) - def step(self, action): + def step(self, action, n_render_iterations=1): """ Run the environment step() function and collect data Args: action (th.Tensor): action to take in environment + n_render_iterations (int): Number of rendering iterations to use before returning observations Returns: 5-tuple: @@ -84,7 +85,7 @@ def step(self, action): if isinstance(action, dict): action = th.cat([act for act in action.values()]) - next_obs, reward, terminated, truncated, info = self.env.step(action) + next_obs, reward, terminated, truncated, info = self.env.step(action, n_render_iterations=n_render_iterations) self.step_count += 1 # Aggregate step data @@ -429,6 +430,7 @@ def create_from_hdf5( n_render_iterations=5, only_successes=False, include_env_wrapper=False, + additional_wrapper_configs=None, ): """ Create a DataPlaybackWrapper environment instance form the recorded demonstration info @@ -461,6 +463,8 @@ def create_from_hdf5( speed. only_successes (bool): Whether to only save successful episodes include_env_wrapper (bool): Whether to include environment wrapper stored in the underlying env config + additional_wrapper_configs (None or list of dict): If specified, list of wrapper config(s) specifying + environment wrappers to wrap the internal environment class in Returns: DataPlaybackWrapper: Generated playback environment @@ -507,6 +511,10 @@ def create_from_hdf5( if include_env_wrapper: env = create_wrapper(env=env) + if additional_wrapper_configs is not None: + for wrapper_cfg in additional_wrapper_configs: + env = create_wrapper(env=env, wrapper_cfg=wrapper_cfg) + # Wrap and return env return cls( env=env, diff --git a/omnigibson/envs/env_wrapper.py b/omnigibson/envs/env_wrapper.py index 09f69d80e..ce8a9aa8e 100644 --- a/omnigibson/envs/env_wrapper.py +++ b/omnigibson/envs/env_wrapper.py @@ -10,11 +10,16 @@ log = create_module_logger(module_name=__name__) -def create_wrapper(env): +def create_wrapper(env, wrapper_cfg=None): """ - Wraps environment @env with wrapper defined by env.wrapper_config + Wraps environment @env with wrapper defined @wrapper_cfg + + Args: + env (og.Environment): environment to wrap + wrapper_cfg (None or dict): Specified, configuration to wrap environment with + If not specified, will default to env.wrapper_config """ - wrapper_cfg = deepcopy(env.wrapper_config) + wrapper_cfg = deepcopy(env.wrapper_config if wrapper_cfg is None else wrapper_cfg) wrapper_type = wrapper_cfg.pop("type") wrapper_cfg["env"] = env @@ -41,12 +46,13 @@ def __init__(self, env): # Run super super().__init__(obj=env) - def step(self, action): + def step(self, action, n_render_iterations=1): """ By default, run the normal environment step() function Args: action (th.tensor): action to take in environment + n_render_iterations (int): Number of rendering iterations to use before returning observations Returns: 4-tuple: @@ -56,7 +62,7 @@ def step(self, action): - (bool) whether the current episode is truncated - (dict) misc information """ - return self.env.step(action) + return self.env.step(action, n_render_iterations=n_render_iterations) def reset(self): """ From a1ae1b94df08c883fa07c36bca2ba691457cdae4 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 23 Jan 2025 16:34:52 -0800 Subject: [PATCH 13/33] programmatically infer grasping points --- omnigibson/robots/a1.py | 10 +- omnigibson/robots/behavior_robot.py | 4 +- omnigibson/robots/fetch.py | 22 --- omnigibson/robots/franka.py | 8 +- omnigibson/robots/franka_mounted.py | 8 +- omnigibson/robots/manipulation_robot.py | 195 +++++++++++++++++++++++- omnigibson/robots/r1.py | 24 --- omnigibson/robots/stretch.py | 8 +- omnigibson/robots/tiago.py | 32 ---- omnigibson/robots/vx300s.py | 20 --- 10 files changed, 207 insertions(+), 124 deletions(-) diff --git a/omnigibson/robots/a1.py b/omnigibson/robots/a1.py index 3f9726652..773f9006b 100644 --- a/omnigibson/robots/a1.py +++ b/omnigibson/robots/a1.py @@ -175,8 +175,8 @@ def _default_joint_pos(self): return self._default_robot_model_joint_pos @property - def finger_lengths(self): - return {self.default_arm: 0.087} + def eef_to_fingertip_lengths(self): + return {arm: {name: 0.087 for name in names} for arm, names in self.finger_link_names.items()} @cached_property def arm_link_names(self): @@ -215,12 +215,12 @@ def teleop_rotation_offset(self): return {self.default_arm: self._teleop_rotation_offset} @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return {self.default_arm: self._ag_start_points} @property - def assisted_grasp_end_points(self): - return {self.default_arm: self._ag_start_points} + def _assisted_grasp_end_points(self): + return {self.default_arm: self._ag_end_points} @property def disabled_collision_pairs(self): diff --git a/omnigibson/robots/behavior_robot.py b/omnigibson/robots/behavior_robot.py index 53a75d796..6dfac40cc 100644 --- a/omnigibson/robots/behavior_robot.py +++ b/omnigibson/robots/behavior_robot.py @@ -404,7 +404,7 @@ def set_position_orientation( ) @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): side_coefficients = {"left": th.tensor([1, -1, 1]), "right": th.tensor([1, 1, 1])} return { arm: [ @@ -419,7 +419,7 @@ def assisted_grasp_start_points(self): } @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): side_coefficients = {"left": th.tensor([1, -1, 1]), "right": th.tensor([1, 1, 1])} return { arm: [ diff --git a/omnigibson/robots/fetch.py b/omnigibson/robots/fetch.py index 51165d466..36bc9ea54 100644 --- a/omnigibson/robots/fetch.py +++ b/omnigibson/robots/fetch.py @@ -214,28 +214,6 @@ def wheel_radius(self): def wheel_axle_length(self): return 0.372 - @property - def finger_lengths(self): - return {self.default_arm: 0.1} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="r_gripper_finger_link", position=th.tensor([0.025, -0.012, 0.0])), - GraspingPoint(link_name="r_gripper_finger_link", position=th.tensor([-0.025, -0.012, 0.0])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="l_gripper_finger_link", position=th.tensor([0.025, 0.012, 0.0])), - GraspingPoint(link_name="l_gripper_finger_link", position=th.tensor([-0.025, 0.012, 0.0])), - ] - } - @cached_property def base_joint_names(self): return ["l_wheel_joint", "r_wheel_joint"] diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index 142d80d0e..daaaa03ac 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -238,8 +238,8 @@ def _default_joint_pos(self): return self._default_robot_model_joint_pos @property - def finger_lengths(self): - return {self.default_arm: 0.1} + def eef_to_fingertip_lengths(self): + return {arm: {name: 0.1 for name in names} for arm, names in self.finger_link_names.items()} @cached_property def arm_link_names(self): @@ -286,11 +286,11 @@ def teleop_rotation_offset(self): return {self.default_arm: self._teleop_rotation_offset} @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return {self.default_arm: self._ag_start_points} @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): return {self.default_arm: self._ag_start_points} @property diff --git a/omnigibson/robots/franka_mounted.py b/omnigibson/robots/franka_mounted.py index bf531aa1f..d449e4190 100644 --- a/omnigibson/robots/franka_mounted.py +++ b/omnigibson/robots/franka_mounted.py @@ -24,8 +24,8 @@ def _default_controllers(self): return controllers @property - def finger_lengths(self): - return {self.default_arm: 0.15} + def eef_to_fingertip_lengths(self): + return {arm: {name: 0.15 for name in names} for arm, names in self.finger_link_names.items()} @property def usd_path(self): @@ -40,7 +40,7 @@ def curobo_path(self): return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted_description_curobo.yaml") @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return { self.default_arm: [ GraspingPoint(link_name="panda_rightfinger", position=th.tensor([0.0, 0.001, 0.045])), @@ -48,7 +48,7 @@ def assisted_grasp_start_points(self): } @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): return { self.default_arm: [ GraspingPoint(link_name="panda_leftfinger", position=th.tensor([0.0, 0.001, 0.045])), diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 1ea69555f..5afbe6247 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -22,13 +22,19 @@ ) from omnigibson.macros import create_module_macros, gm from omnigibson.object_states import ContactBodies +from omnigibson.prims.geom_prim import VisualGeomPrim from omnigibson.robots.robot_base import BaseRobot from omnigibson.utils.backend_utils import _compute_backend as cb from omnigibson.utils.constants import JointType, PrimType from omnigibson.utils.geometry_utils import generate_points_in_volume_checker_function from omnigibson.utils.python_utils import assert_valid_key, classproperty from omnigibson.utils.sampling_utils import raytest_batch -from omnigibson.utils.usd_utils import ControllableObjectViewAPI, GripperRigidContactAPI, create_joint +from omnigibson.utils.usd_utils import ControllableObjectViewAPI, GripperRigidContactAPI, create_joint, \ + create_primitive_mesh, absolute_prim_path_to_scene_relative +from omnigibson.utils.ui_utils import create_module_logger + +# Create module logger +log = create_module_logger(module_name=__name__) # Create settings for this module m = create_module_macros(module_path=__file__) @@ -157,6 +163,9 @@ def __init__( self._grasping_direction = grasping_direction self._disable_grasp_handling = disable_grasp_handling + # Other variables filled in at runtime + self._eef_to_fingertip_lengths = None # dict mapping arm name to finger name to offset + # Initialize other variables used for assistive grasping self._ag_freeze_joint_pos = { arm: {} for arm in self.arm_names @@ -214,6 +223,14 @@ def _validate_configuration(self): def _initialize(self): super()._initialize() + # Infer relevant link properties, e.g.: fingertip location, AG grasping points + # We use a try / except to maintain backwards-compatibility with robots that do not follow our + # OG-specified convention + try: + self._infer_finger_properties() + except AssertionError as e: + log.warning(f"Could not infer relevant finger link properties because:\n\n{e}") + if gm.AG_CLOTH: for arm in self.arm_names: self._ag_check_in_volume[arm], self._ag_calculate_volume[arm] = ( @@ -222,6 +239,136 @@ def _initialize(self): ) ) + def _infer_finger_properties(self): + """ + Infers relevant finger properties based on the given finger meshes of the robot + + NOTE: This assumes the given EEF convention for parallel jaw grippers -- i.e.: + z points in the direction of the fingers, y points in the direction of grasp articulation, and x + is then inferred automatically + """ + # Calculate and cache fingertip to eef frame offsets, as well as AG grasping points + self._eef_to_fingertip_lengths = dict() + self._default_ag_start_points = dict() + self._default_ag_end_points = dict() + for arm, finger_links in self.finger_links.items(): + self._eef_to_fingertip_lengths[arm] = dict() + eef_link = self.eef_links[arm] + world_to_eef_tf = T.pose2mat(eef_link.get_position_orientation()) + eef_to_world_tf = T.pose_inv(world_to_eef_tf) + + # Infer parent link for this finger + finger_parent_link, finger_parent_max_z = None, None + is_parallel_jaw = len(finger_links) == 2 + if not is_parallel_jaw: + self._default_ag_start_points[arm] = None + self._default_ag_end_points[arm] = None + for i, finger_link in enumerate(finger_links): + # Find parent, and make sure one exists + parent_prim_path, parent_link = None, None + for joint in self.joints.values(): + if finger_link.prim_path == joint.body1: + parent_prim_path = joint.body0 + break + assert parent_prim_path is not None, f"Expected articulated parent joint for finger link {finger_link.name} but found none!" + for link in self.links.values(): + if parent_prim_path == link.prim_path: + parent_link = link + break + assert parent_link is not None, f"Expected parent link located at {parent_prim_path} but found none!" + # Make sure all fingers share the same parent + if finger_parent_link is None: + finger_parent_link = parent_link + finger_parent_pts = finger_parent_link.collision_boundary_points_world + assert finger_parent_pts is not None, f"Expected finger parent points to be defined for parent link {finger_parent_link.name}, but got None!" + # Convert from world frame -> eef frame + finger_parent_pts = th.concatenate([finger_parent_pts, th.ones(len(finger_parent_pts), 1)], dim=-1) + finger_parent_pts = (finger_parent_pts @ eef_to_world_tf.T)[:, :3] + finger_parent_max_z = finger_parent_pts[:, 2].max().item() + else: + assert finger_parent_link == parent_link, \ + f"Expected all fingers to have same parent link, but found multiple parents at {finger_parent_link.prim_path} and {parent_link.prim_path}" + + # Calculate this finger's collision boundary points in the world frame + finger_pts = finger_link.collision_boundary_points_world + assert finger_pts is not None, f"Expected finger points to be defined for link {finger_link.name}, but got None!" + # Convert from world frame -> eef frame + finger_pts = th.concatenate([finger_pts, th.ones(len(finger_pts), 1)], dim=-1) + finger_pts = (finger_pts @ eef_to_world_tf.T)[:, :3] + + # Since we know the EEF frame always points with z outwards towards the fingers, the outer-most point / + # fingertip is the maximum z value + finger_max_z = finger_pts[:, 2].max().item() + assert finger_max_z > 0, f"Expected positive fingertip to eef frame offset for link {finger_link.name}, but got: {finger_max_z}. Does the EEF frame z-axis point in the direction of the fingers?" + self._eef_to_fingertip_lengths[arm][finger_link.name] = finger_max_z + + # Now, only keep points that are above the parent max z by 20% for inferring y values + finger_range = finger_max_z - finger_parent_max_z + valid_idxs = th.where(finger_pts[:, 2] > (finger_parent_max_z + finger_range * 0.2))[0] + finger_pts = finger_pts[valid_idxs] + # Make sure all points lie on a single side of the EEF's y-axis + y_signs = th.sign(finger_pts[:, 1]) + assert th.all(y_signs == y_signs[0]).item(), "Expected all finger points to lie on single side of EEF y-axis!" + y_sign = y_signs[0].item() + y_abs_min = th.abs(finger_pts[:, 1]).min().item() + + # Compute the default grasping points for this finger + # For now, we only have a strong heuristic defined for parallel jaw grippers, which assumes that + # there are exactly 2 fingers + # In this case, this is defined as the x2 (x,y,z) tuples where: + # z - the 20% and 80% length between the range from [finger_parent_max_z, finger_max_z] + # This is synonymous to inferring the length of the finger (lower bounded by the gripper base, + # assumed to be the parent link), and then taking the values 20% and 80% along its length + # y - the value closest to the edge of the finger surface in the direction of +/- EEF y-axis. + # This assumes that each individual finger lies completely on one side of the EEF y-axis + # x - 0. This assumes that the EEF axis evenly splits each finger symmetrically on each side + if is_parallel_jaw: + # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame + grasp_pts = th.tensor([ + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.2, 1], + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.8, 1], + ]) + finger_to_world_tf = T.pose_inv(T.pose2mat(finger_link.get_position_orientation())) + finger_to_eef_tf = finger_to_world_tf @ world_to_eef_tf + grasp_pts = (grasp_pts @ finger_to_eef_tf.T)[:, :3] + grasping_points = [GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts] + if i == 0: + # Start point + self._default_ag_start_points[arm] = grasping_points + else: + # End point + self._default_ag_end_points[arm] = grasping_points + + # For each grasping point, if we're in DEBUG mode, visualize with spheres + if gm.DEBUG: + for ag_points in (self.assisted_grasp_start_points, self.assisted_grasp_end_points): + for arm_ag_points in ag_points.values(): + # Skip if None exist + if arm_ag_points is None: + continue + # For each ag point, generate a small sphere at that point + for i, arm_ag_point in enumerate(arm_ag_points): + link = self.links[arm_ag_point.link_name] + local_pos = arm_ag_point.position + vis_mesh_prim_path = f"{link.prim_path}/ag_point_{i}" + vis_mesh = create_primitive_mesh( + prim_path=vis_mesh_prim_path, + extents=0.005, + primitive_type="Sphere", + ) + vis_geom = VisualGeomPrim( + relative_prim_path=absolute_prim_path_to_scene_relative( + scene=self.scene, + absolute_prim_path=vis_mesh_prim_path, + ), + name=f"ag_point_{i}", + ) + vis_geom.load(self.scene) + vis_geom.set_position_orientation( + position=local_pos, + frame="parent", + ) + def is_grasping(self, arm="default", candidate_obj=None): """ Returns True if the robot is grasping the target option @candidate_obj or any object if @candidate_obj is None. @@ -657,6 +804,23 @@ def finger_joints(self): """ return {arm: [self._joints[joint] for joint in self.finger_joint_names[arm]] for arm in self.arm_names} + @property + def _assisted_grasp_start_points(self): + """ + Returns: + dict: Dictionary mapping individual arm appendage names to array of GraspingPoint tuples, + composed of (link_name, position) values specifying valid grasping start points located at + cartesian (x,y,z) coordinates specified in link_name's local coordinate frame. + These values will be used in conjunction with + @self.assisted_grasp_end_points to trigger assisted grasps, where objects that intersect + with any ray starting at any point in @self.assisted_grasp_start_points and terminating at any point in + @self.assisted_grasp_end_points will trigger an assisted grasp (calculated individually for each gripper + appendage). By default, each entry returns None, and must be implemented by any robot subclass that + wishes to use assisted grasping. + """ + # Should be optionally implemented by subclass + return None + @property def assisted_grasp_start_points(self): """ @@ -671,7 +835,24 @@ def assisted_grasp_start_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return {arm: None for arm in self.arm_names} + return self._assisted_grasp_start_points if self._assisted_grasp_start_points is not None else self._default_ag_start_points + + @property + def _assisted_grasp_end_points(self): + """ + Returns: + dict: Dictionary mapping individual arm appendage names to array of GraspingPoint tuples, + composed of (link_name, position) values specifying valid grasping end points located at + cartesian (x,y,z) coordinates specified in link_name's local coordinate frame. + These values will be used in conjunction with + @self.assisted_grasp_start_points to trigger assisted grasps, where objects that intersect + with any ray starting at any point in @self.assisted_grasp_start_points and terminating at any point in + @self.assisted_grasp_end_points will trigger an assisted grasp (calculated individually for each gripper + appendage). By default, each entry returns None, and must be implemented by any robot subclass that + wishes to use assisted grasping. + """ + # Should be optionally implemented by subclass + return None @property def assisted_grasp_end_points(self): @@ -687,16 +868,16 @@ def assisted_grasp_end_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return {arm: None for arm in self.arm_names} + return self._assisted_grasp_end_points if self._assisted_grasp_end_points is not None else self._default_ag_end_points @property - def finger_lengths(self): + def eef_to_fingertip_lengths(self): """ Returns: - dict: Dictionary mapping arm appendage name to corresponding length of the fingers in that - hand defined from the palm (assuming all fingers in one hand are equally long) + dict: Dictionary mapping arm appendage name to per-finger corresponding z-distance between EEF and each + respective fingertip """ - raise NotImplementedError + return self._eef_to_fingertip_lengths @property def arm_workspace_range(self): diff --git a/omnigibson/robots/r1.py b/omnigibson/robots/r1.py index bacd7a69c..cc765f3a9 100644 --- a/omnigibson/robots/r1.py +++ b/omnigibson/robots/r1.py @@ -176,30 +176,6 @@ def untucked_default_joint_pos(self): pos[self.arm_control_idx[arm]] = th.tensor([0.0, 1.906, -0.991, 1.571, 0.915, -1.571]) return pos - @property - def finger_lengths(self): - return {self.default_arm: 0.087} - - @property - def assisted_grasp_start_points(self): - return { - arm: [ - GraspingPoint(link_name=f"{arm}_gripper_link1", position=th.tensor([-0.032, 0.0, -0.01])), - GraspingPoint(link_name=f"{arm}_gripper_link1", position=th.tensor([0.025, 0.0, -0.01])), - ] - for arm in self.arm_names - } - - @property - def assisted_grasp_end_points(self): - return { - arm: [ - GraspingPoint(link_name=f"{arm}_gripper_link2", position=th.tensor([-0.032, 0.0, -0.01])), - GraspingPoint(link_name=f"{arm}_gripper_link2", position=th.tensor([0.025, 0.0, -0.01])), - ] - for arm in self.arm_names - } - @cached_property def floor_touching_base_link_names(self): return ["wheel_link1", "wheel_link2", "wheel_link3"] diff --git a/omnigibson/robots/stretch.py b/omnigibson/robots/stretch.py index 8ba67b4ad..4466e9930 100644 --- a/omnigibson/robots/stretch.py +++ b/omnigibson/robots/stretch.py @@ -55,11 +55,11 @@ def wheel_axle_length(self): return 0.330 @property - def finger_lengths(self): - return {self.default_arm: 0.04} + def eef_to_fingertip_lengths(self): + return {arm: {name: 0.04 for name in names} for arm, names in self.finger_link_names.items()} @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return { self.default_arm: [ GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), @@ -68,7 +68,7 @@ def assisted_grasp_start_points(self): } @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): return { self.default_arm: [ GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 63857518a..1c39ddd79 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -241,34 +241,6 @@ def _default_controllers(self): controllers["gripper_{}".format(arm)] = "MultiFingerGripperController" return controllers - @property - def assisted_grasp_start_points(self): - return { - arm: [ - GraspingPoint( - link_name="gripper_{}_right_finger_link".format(arm), position=th.tensor([-0.001, 0.0, -0.2]) - ), - GraspingPoint( - link_name="gripper_{}_right_finger_link".format(arm), position=th.tensor([-0.001, 0.0, -0.13]) - ), - ] - for arm in self.arm_names - } - - @property - def assisted_grasp_end_points(self): - return { - arm: [ - GraspingPoint( - link_name="gripper_{}_left_finger_link".format(arm), position=th.tensor([0.001, 0.0, -0.2]) - ), - GraspingPoint( - link_name="gripper_{}_left_finger_link".format(arm), position=th.tensor([0.001, 0.0, -0.13]) - ), - ] - for arm in self.arm_names - } - @property def arm_control_idx(self): # Add combined entry @@ -277,10 +249,6 @@ def arm_control_idx(self): idxs["combined"] = th.sort(th.cat([val for val in idxs.values()]))[0] return idxs - @property - def finger_lengths(self): - return {arm: 0.12 for arm in self.arm_names} - @property def disabled_collision_link_names(self): # These should NEVER have collisions in the first place (i.e.: these are poorly modeled geoms from the source diff --git a/omnigibson/robots/vx300s.py b/omnigibson/robots/vx300s.py index 04a0a19cd..683bcf5e0 100644 --- a/omnigibson/robots/vx300s.py +++ b/omnigibson/robots/vx300s.py @@ -138,10 +138,6 @@ def _default_controllers(self): def _default_joint_pos(self): return th.tensor([0.0, -0.849879, 0.258767, 0.0, 1.2831712, 0.0, 0.057, 0.057]) - @property - def finger_lengths(self): - return {self.default_arm: 0.1} - @cached_property def arm_link_names(self): return { @@ -184,19 +180,3 @@ def finger_joint_names(self): @property def teleop_rotation_offset(self): return {self.default_arm: euler2quat([-math.pi, 0, 0])} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="right_finger_link", position=th.tensor([0.0, 0.001, 0.057])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="left_finger_link", position=th.tensor([0.0, 0.001, 0.057])), - ] - } From 91d4233b9244296ece225399563a8920c22fc552 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 24 Jan 2025 00:35:30 +0000 Subject: [PATCH 14/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/robots/manipulation_robot.py | 62 ++++++++++++++++++------- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 5afbe6247..b83f40c40 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -29,8 +29,13 @@ from omnigibson.utils.geometry_utils import generate_points_in_volume_checker_function from omnigibson.utils.python_utils import assert_valid_key, classproperty from omnigibson.utils.sampling_utils import raytest_batch -from omnigibson.utils.usd_utils import ControllableObjectViewAPI, GripperRigidContactAPI, create_joint, \ - create_primitive_mesh, absolute_prim_path_to_scene_relative +from omnigibson.utils.usd_utils import ( + ControllableObjectViewAPI, + GripperRigidContactAPI, + create_joint, + create_primitive_mesh, + absolute_prim_path_to_scene_relative, +) from omnigibson.utils.ui_utils import create_module_logger # Create module logger @@ -164,7 +169,7 @@ def __init__( self._disable_grasp_handling = disable_grasp_handling # Other variables filled in at runtime - self._eef_to_fingertip_lengths = None # dict mapping arm name to finger name to offset + self._eef_to_fingertip_lengths = None # dict mapping arm name to finger name to offset # Initialize other variables used for assistive grasping self._ag_freeze_joint_pos = { @@ -270,7 +275,9 @@ def _infer_finger_properties(self): if finger_link.prim_path == joint.body1: parent_prim_path = joint.body0 break - assert parent_prim_path is not None, f"Expected articulated parent joint for finger link {finger_link.name} but found none!" + assert ( + parent_prim_path is not None + ), f"Expected articulated parent joint for finger link {finger_link.name} but found none!" for link in self.links.values(): if parent_prim_path == link.prim_path: parent_link = link @@ -280,18 +287,23 @@ def _infer_finger_properties(self): if finger_parent_link is None: finger_parent_link = parent_link finger_parent_pts = finger_parent_link.collision_boundary_points_world - assert finger_parent_pts is not None, f"Expected finger parent points to be defined for parent link {finger_parent_link.name}, but got None!" + assert ( + finger_parent_pts is not None + ), f"Expected finger parent points to be defined for parent link {finger_parent_link.name}, but got None!" # Convert from world frame -> eef frame finger_parent_pts = th.concatenate([finger_parent_pts, th.ones(len(finger_parent_pts), 1)], dim=-1) finger_parent_pts = (finger_parent_pts @ eef_to_world_tf.T)[:, :3] finger_parent_max_z = finger_parent_pts[:, 2].max().item() else: - assert finger_parent_link == parent_link, \ - f"Expected all fingers to have same parent link, but found multiple parents at {finger_parent_link.prim_path} and {parent_link.prim_path}" + assert ( + finger_parent_link == parent_link + ), f"Expected all fingers to have same parent link, but found multiple parents at {finger_parent_link.prim_path} and {parent_link.prim_path}" # Calculate this finger's collision boundary points in the world frame finger_pts = finger_link.collision_boundary_points_world - assert finger_pts is not None, f"Expected finger points to be defined for link {finger_link.name}, but got None!" + assert ( + finger_pts is not None + ), f"Expected finger points to be defined for link {finger_link.name}, but got None!" # Convert from world frame -> eef frame finger_pts = th.concatenate([finger_pts, th.ones(len(finger_pts), 1)], dim=-1) finger_pts = (finger_pts @ eef_to_world_tf.T)[:, :3] @@ -299,7 +311,9 @@ def _infer_finger_properties(self): # Since we know the EEF frame always points with z outwards towards the fingers, the outer-most point / # fingertip is the maximum z value finger_max_z = finger_pts[:, 2].max().item() - assert finger_max_z > 0, f"Expected positive fingertip to eef frame offset for link {finger_link.name}, but got: {finger_max_z}. Does the EEF frame z-axis point in the direction of the fingers?" + assert ( + finger_max_z > 0 + ), f"Expected positive fingertip to eef frame offset for link {finger_link.name}, but got: {finger_max_z}. Does the EEF frame z-axis point in the direction of the fingers?" self._eef_to_fingertip_lengths[arm][finger_link.name] = finger_max_z # Now, only keep points that are above the parent max z by 20% for inferring y values @@ -308,7 +322,9 @@ def _infer_finger_properties(self): finger_pts = finger_pts[valid_idxs] # Make sure all points lie on a single side of the EEF's y-axis y_signs = th.sign(finger_pts[:, 1]) - assert th.all(y_signs == y_signs[0]).item(), "Expected all finger points to lie on single side of EEF y-axis!" + assert th.all( + y_signs == y_signs[0] + ).item(), "Expected all finger points to lie on single side of EEF y-axis!" y_sign = y_signs[0].item() y_abs_min = th.abs(finger_pts[:, 1]).min().item() @@ -324,14 +340,18 @@ def _infer_finger_properties(self): # x - 0. This assumes that the EEF axis evenly splits each finger symmetrically on each side if is_parallel_jaw: # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame - grasp_pts = th.tensor([ - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.2, 1], - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.8, 1], - ]) + grasp_pts = th.tensor( + [ + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.2, 1], + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.8, 1], + ] + ) finger_to_world_tf = T.pose_inv(T.pose2mat(finger_link.get_position_orientation())) finger_to_eef_tf = finger_to_world_tf @ world_to_eef_tf grasp_pts = (grasp_pts @ finger_to_eef_tf.T)[:, :3] - grasping_points = [GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts] + grasping_points = [ + GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts + ] if i == 0: # Start point self._default_ag_start_points[arm] = grasping_points @@ -835,7 +855,11 @@ def assisted_grasp_start_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return self._assisted_grasp_start_points if self._assisted_grasp_start_points is not None else self._default_ag_start_points + return ( + self._assisted_grasp_start_points + if self._assisted_grasp_start_points is not None + else self._default_ag_start_points + ) @property def _assisted_grasp_end_points(self): @@ -868,7 +892,11 @@ def assisted_grasp_end_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return self._assisted_grasp_end_points if self._assisted_grasp_end_points is not None else self._default_ag_end_points + return ( + self._assisted_grasp_end_points + if self._assisted_grasp_end_points is not None + else self._default_ag_end_points + ) @property def eef_to_fingertip_lengths(self): From 2acd530711ebb5982465567ad21b25daaa9efcb1 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 15:44:53 -0800 Subject: [PATCH 15/33] prune outdated code --- omnigibson/envs/data_wrapper.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index 8d417e28e..360046958 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -498,11 +498,6 @@ def create_from_hdf5( robot_cfg["sensor_config"] = robot_sensor_config if external_sensors_config is not None: config["env"]["external_sensors"] = external_sensors_config - # config["env"]["external_sensors"] = merge_nested_dicts( - # base_dict=config["env"]["external_sensors"], - # extra_dict=external_sensors_config, - # inplace=True, - # ) # Load env env = og.Environment(configs=config) From 86aa45bb6fe824f41cc7a0a2e0de2a7755f59b04 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 15:46:00 -0800 Subject: [PATCH 16/33] use_impedances --- omnigibson/controllers/ik_controller.py | 2 +- omnigibson/examples/robots/curobo_example.py | 14 ++++----- omnigibson/robots/manipulation_robot.py | 6 ++-- tests/test_curobo.py | 30 ++++++++++---------- 4 files changed, 27 insertions(+), 25 deletions(-) diff --git a/omnigibson/controllers/ik_controller.py b/omnigibson/controllers/ik_controller.py index 5c1fc1323..d4c4395a9 100644 --- a/omnigibson/controllers/ik_controller.py +++ b/omnigibson/controllers/ik_controller.py @@ -53,7 +53,7 @@ def __init__( pos_kp=None, pos_damping_ratio=None, vel_kp=None, - use_impedances=True, + use_impedances=False, mode="pose_delta_ori", smoothing_filter_size=None, workspace_pose_limiter=None, diff --git a/omnigibson/examples/robots/curobo_example.py b/omnigibson/examples/robots/curobo_example.py index a174f0de7..cb85cd6ce 100644 --- a/omnigibson/examples/robots/curobo_example.py +++ b/omnigibson/examples/robots/curobo_example.py @@ -102,21 +102,21 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "trunk": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "arm_left": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, "pos_kp": 200.0, }, "arm_right": { @@ -124,7 +124,7 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, "pos_kp": 200.0, }, "gripper_left": { @@ -132,7 +132,7 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, "pos_kp": 1500.0, }, "gripper_right": { @@ -140,7 +140,7 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, "pos_kp": 1500.0, }, }, @@ -151,7 +151,7 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, } # Create env diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 5afbe6247..09e3321ae 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -782,7 +782,9 @@ def eef_links(self): """ Returns: dict: Dictionary mapping arm appendage name to robot link corresponding to that arm's - eef link + eef link. NOTE: These links always have a canonical local orientation frame -- assuming a parallel jaw + eef morphology, it is assumed that the eef z-axis points out from the tips of the fingers, the y-axis + points from the left finger to the right finger, and the x-axis inferred programmatically """ return {arm: self._links[self.eef_link_names[arm]] for arm in self.arm_names} @@ -1204,7 +1206,7 @@ def _default_arm_joint_controller_configs(self): "command_output_limits": None, "motor_type": "position", "use_delta_commands": True, - "use_impedances": True, + "use_impedances": False, } return dic diff --git a/tests/test_curobo.py b/tests/test_curobo.py index adab8b9e9..e8626fbd1 100644 --- a/tests/test_curobo.py +++ b/tests/test_curobo.py @@ -74,14 +74,14 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "gripper_0": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, }, }, @@ -98,42 +98,42 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "trunk": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "arm_left": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "arm_right": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "gripper_left": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "gripper_right": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, }, }, @@ -150,49 +150,49 @@ def test_curobo(): "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "trunk": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "camera": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "arm_left": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "arm_right": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "gripper_left": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, "gripper_right": { "name": "JointController", "motor_type": "position", "command_input_limits": None, "use_delta_commands": False, - "use_impedances": True, + "use_impedances": False, }, }, }, From b1dea6e8a6b355cf9e29ed0e603d81a9f4b248fd Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 15:46:44 -0800 Subject: [PATCH 17/33] maintain original default arm link convention during curobo config generation during custom robot import --- omnigibson/examples/robots/import_custom_robot.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 99fcc32be..4837bae65 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -86,7 +86,10 @@ parent_link: left_arm_link6 offset: position: [0, 0, 0.06] - orientation: [0, 0, 0, 1] + orientation: [0, 0, 0, 1] # NOTE: Convention for these eef vis links should be tuned such that: + # z-axis points out from the tips of the fingers + # y-axis points in the direction from the left finger to the right finger + # x-axis is automatically inferred from the above two axes - link: right_eef_link # same format as @camera_links parent_link: right_arm_link6 offset: @@ -722,7 +725,9 @@ def create_curobo_cfgs(robot_prim, robot_urdf_path, curobo_cfg, root_link, save_ is_holonomic (bool): Whether the robot has a holonomic base applied or not """ robot_name = robot_prim.GetName() - ee_links = list(curobo_cfg.eef_to_gripper_info.keys()) + + # Left, then right by default if sorted alphabetically + ee_links = list(sorted(curobo_cfg.eef_to_gripper_info.keys())) # Find all joints that have a negative axis specified so we know to flip them in curobo tree = ET.parse(robot_urdf_path) From af6759713f77e925dd4a6a21612cc1755ad2036f Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 18:19:38 -0800 Subject: [PATCH 18/33] fix grasping type inference for MultiFingerGripperController --- .../controllers/multi_finger_gripper_controller.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/omnigibson/controllers/multi_finger_gripper_controller.py b/omnigibson/controllers/multi_finger_gripper_controller.py index 854609275..3f3fd9ce1 100644 --- a/omnigibson/controllers/multi_finger_gripper_controller.py +++ b/omnigibson/controllers/multi_finger_gripper_controller.py @@ -126,8 +126,15 @@ def _generate_default_command_output_limits(self): if self._mode == "binary": command_output_limits = (-1.0, 1.0) # If we're in smoothing mode, output limits should be the average of the independent limits - elif self._mode == "smoothing": - command_output_limits = cb.mean(command_output_limits[0]), cb.mean(command_output_limits[1]) + elif self._mode == "smooth": + command_output_limits = ( + cb.mean(command_output_limits[0]), + cb.mean(command_output_limits[1]), + ) + elif self._mode == "independent": + pass + else: + raise ValueError(f"Invalid mode {self._mode}") return command_output_limits From 08fc2709139bc0903e52ef87c144e7dbde84b7ac Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 18:46:29 -0800 Subject: [PATCH 19/33] retune test_controllers.py --- tests/test_controllers.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/test_controllers.py b/tests/test_controllers.py index fbb4130c4..a83e2d91a 100644 --- a/tests/test_controllers.py +++ b/tests/test_controllers.py @@ -149,11 +149,11 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): n_steps = { "pose_delta_ori": { - "zero": 10, - "forward": 10, - "side": 10, - "up": 10, - "rotate": 10, + "zero": 40, + "forward": 40, + "side": 40, + "up": 40, + "rotate": 20, "base_move": 30, }, "absolute_pose": { @@ -184,13 +184,11 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): # Update initial state (robot should be stable and still) env.scene.update_initial_state() - # Reset the environment - env.scene.reset() - # Record initial eef pose of all robots initial_eef_pose = dict() for i, robot in enumerate(env.robots): - initial_eef_pose[robot.name] = {arm: robot.get_relative_eef_pose(arm=arm) for arm in robot.arm_names} + initial_eef_pose[robot.name] = {arm: robot.get_relative_eef_pose(arm=arm) for arm in + robot.arm_names} for controller in ["InverseKinematicsController", "OperationalSpaceController"]: for controller_mode in ["pose_delta_ori", "absolute_pose"]: @@ -233,10 +231,10 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): break start_idx += robot.controllers[c].command_dim if controller_mode == "pose_delta_ori": - forward_action[start_idx] = 0.1 - side_action[start_idx + 1] = 0.1 - up_action[start_idx + 2] = 0.1 - rot_action[start_idx + 3] = 0.1 + forward_action[start_idx] = 0.02 + side_action[start_idx + 1] = 0.02 + up_action[start_idx + 2] = 0.02 + rot_action[start_idx + 3] = 0.02 elif controller_mode == "absolute_pose": for act in [zero_action, forward_action, side_action, up_action, rot_action]: act[start_idx : start_idx + 3] = init_eef_pos.clone() From 85d4fb126ec1b239d90b649d366139ffb7ae1a59 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Mon, 27 Jan 2025 18:48:48 -0800 Subject: [PATCH 20/33] retune ground truth camera pose for test robot states --- tests/test_robot_states_flatcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_robot_states_flatcache.py b/tests/test_robot_states_flatcache.py index 3de592249..ed7caf041 100644 --- a/tests/test_robot_states_flatcache.py +++ b/tests/test_robot_states_flatcache.py @@ -63,8 +63,8 @@ def camera_pose_test(flatcache): relative_pose_transform(sensor_world_pos, sensor_world_ori, robot_world_pos, robot_world_ori) ) - sensor_world_pos_gt = th.tensor([150.5187, 149.8295, 101.0960]) - sensor_world_ori_gt = th.tensor([0.0199, -0.1330, 0.9892, -0.0580]) + sensor_world_pos_gt = th.tensor([150.5134, 149.8278, 101.0816]) + sensor_world_ori_gt = th.tensor([0.0176, -0.1205, 0.9910, -0.0549]) assert th.allclose(sensor_world_pos, sensor_world_pos_gt, atol=1e-3) assert quaternions_close(sensor_world_ori, sensor_world_ori_gt, atol=1e-3) From d13f10a5091baaba413fb16701ff274da402461e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 02:49:05 +0000 Subject: [PATCH 21/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/test_controllers.py | 3 +-- tests/test_robot_states_flatcache.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_controllers.py b/tests/test_controllers.py index a83e2d91a..32346ac42 100644 --- a/tests/test_controllers.py +++ b/tests/test_controllers.py @@ -187,8 +187,7 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): # Record initial eef pose of all robots initial_eef_pose = dict() for i, robot in enumerate(env.robots): - initial_eef_pose[robot.name] = {arm: robot.get_relative_eef_pose(arm=arm) for arm in - robot.arm_names} + initial_eef_pose[robot.name] = {arm: robot.get_relative_eef_pose(arm=arm) for arm in robot.arm_names} for controller in ["InverseKinematicsController", "OperationalSpaceController"]: for controller_mode in ["pose_delta_ori", "absolute_pose"]: diff --git a/tests/test_robot_states_flatcache.py b/tests/test_robot_states_flatcache.py index ed7caf041..522f801af 100644 --- a/tests/test_robot_states_flatcache.py +++ b/tests/test_robot_states_flatcache.py @@ -64,7 +64,7 @@ def camera_pose_test(flatcache): ) sensor_world_pos_gt = th.tensor([150.5134, 149.8278, 101.0816]) - sensor_world_ori_gt = th.tensor([0.0176, -0.1205, 0.9910, -0.0549]) + sensor_world_ori_gt = th.tensor([0.0176, -0.1205, 0.9910, -0.0549]) assert th.allclose(sensor_world_pos, sensor_world_pos_gt, atol=1e-3) assert quaternions_close(sensor_world_ori, sensor_world_ori_gt, atol=1e-3) From fb91b98eff7ff0b145bbb2808a99bbec60e13b27 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 14:47:20 -0800 Subject: [PATCH 22/33] make sure exclude and include sensor names are mutually exclusive --- omnigibson/robots/robot_base.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 4a938dc74..d0c8c1cb1 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -208,6 +208,22 @@ def _load_sensors(self): for prim in link.prim.GetChildren(): prim_type = prim.GetPrimTypeInfo().GetTypeName() if prim_type in SENSOR_PRIMS_TO_SENSOR_CLS: + # Possibly filter out the sensor based on name + prim_path = str(prim.GetPrimPath()) + not_blacklisted = self._exclude_sensor_names is None or not any( + name in prim_path for name in self._exclude_sensor_names + ) + whitelisted = self._include_sensor_names is None or any( + name in prim_path for name in self._include_sensor_names + ) + # Also make sure that the include / exclude sensor names are mutually exclusive + if self._exclude_sensor_names is not None and self._include_sensor_names is not None: + assert len(set(self._exclude_sensor_names).intersection(set(self._include_sensor_names))) == 0, \ + (f"include_sensor_names and exclude_sensor_names must be mutually exclusive! " + f"Got: {self._include_sensor_names} and {self._exclude_sensor_names}") + if not (not_blacklisted and whitelisted): + continue + # Infer what obs modalities to use for this sensor sensor_cls = SENSOR_PRIMS_TO_SENSOR_CLS[prim_type] sensor_kwargs = self._sensor_config[sensor_cls.__name__] @@ -221,17 +237,6 @@ def _load_sensors(self): if not sensor_kwargs["modalities"]: continue - # Possibly filter out the sensor based on name - prim_path = str(prim.GetPrimPath()) - not_blacklisted = self._exclude_sensor_names is None or not any( - name in prim_path for name in self._exclude_sensor_names - ) - whitelisted = self._include_sensor_names is None or any( - name in prim_path for name in self._include_sensor_names - ) - if not (not_blacklisted and whitelisted): - continue - obs_modalities = obs_modalities.union(sensor_kwargs["modalities"]) # Create the sensor and store it internally sensor = create_sensor( From d05a1b386560b22a80bc8c2c1f33968ff59888ac Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 18:24:34 -0800 Subject: [PATCH 23/33] update test_controllers to improve determinacy --- tests/test_controllers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_controllers.py b/tests/test_controllers.py index a83e2d91a..1d93a84d4 100644 --- a/tests/test_controllers.py +++ b/tests/test_controllers.py @@ -181,9 +181,17 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): for robot in env.robots: robot.keep_still() + # We need to explicitly reset the controllers to unify the initial state that will be seen + # during downstream action executions -- i.e.: the state seen after robot.reload_controllers() + # is called each time + for controller in robot.controllers.values(): + controller.reset() + # Update initial state (robot should be stable and still) env.scene.update_initial_state() + env.scene.reset() + # Record initial eef pose of all robots initial_eef_pose = dict() for i, robot in enumerate(env.robots): From 99f91749acde9eb6e028be70281d1af1064c8f3a Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 18:28:26 -0800 Subject: [PATCH 24/33] make waypoint tolerance more strict, always run every waypoint as an action at least once --- .../starter_semantic_action_primitives.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 7cb5a235b..8e8183b72 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -102,7 +102,7 @@ m.LOW_PRECISION_DIST_THRESHOLD = 0.1 m.LOW_PRECISION_ANGLE_THRESHOLD = 0.2 -m.JOINT_POS_DIFF_THRESHOLD = 0.01 +m.JOINT_POS_DIFF_THRESHOLD = 0.005 m.LOW_PRECISION_JOINT_POS_DIFF_THRESHOLD = 0.05 m.JOINT_CONTROL_MIN_ACTION = 0.0 m.MAX_ALLOWED_JOINT_ERROR_FOR_LINEAR_MOTION = math.radians(45) @@ -506,7 +506,8 @@ def _sample_grasp_pose(self, obj): # Identity quaternion for top-down grasping (x-forward, y-right, z-down) approach_dir = T.quat2mat(grasp_quat) @ th.tensor([0.0, 0.0, -1.0]) - pregrasp_offset = self.robot.finger_lengths[self.arm] / 2.0 + m.GRASP_APPROACH_DISTANCE + avg_finger_offset = th.mean(th.tensor([length for length in self.robot.eef_to_fingertip_lengths[self.arm].values()])) + pregrasp_offset = avg_finger_offset + m.GRASP_APPROACH_DISTANCE pregrasp_pos = grasp_pos - approach_dir * pregrasp_offset @@ -992,6 +993,12 @@ def _execute_motion_plan( articulation_control_idx = th.cat(articulation_control_idx) for j in range(m.MAX_STEPS_FOR_JOINT_MOTION): # indented_print(f"Step {j + 1}/{m.MAX_STEPS_FOR_JOINT_MOTION}") + + # We need to call @q_to_action for every step because as the robot base moves, the same base joint_pos will be + # converted to different actions, since HolonomicBaseJointController accepts an action in the robot local frame. + action = self.robot.q_to_action(joint_pos) + yield self._postprocess_action(action) + current_joint_pos = self.robot.get_joint_positions() joint_pos_diff = joint_pos - current_joint_pos base_joint_diff = joint_pos_diff[self.robot.base_control_idx] @@ -1017,12 +1024,6 @@ def _execute_motion_plan( indented_print(f"Collision detected at step {i + 1}/{len(q_traj)}") return - # We need to call @q_to_action for every step because as the robot base moves, the same base joint_pos will be - # converted to different actions, since HolonomicBaseJointController accepts an action in the robot local frame. - action = self.robot.q_to_action(joint_pos) - - yield self._postprocess_action(action) - if not ignore_failure: if not base_target_reached: indented_print(f"max base pos diff: {max_base_pos_diff}") @@ -1334,13 +1335,13 @@ def _move_fingers_to_limit(self, limit_type): target_joint_positions = self._get_joint_position_with_fingers_at_limit(limit_type) action = self.robot.q_to_action(target_joint_positions) for _ in range(m.MAX_STEPS_FOR_GRASP_OR_RELEASE): - current_joint_positinos = self.robot.get_joint_positions() - if th.allclose(current_joint_positinos, target_joint_positions, atol=0.005): + yield self._postprocess_action(action) + current_joint_positions = self.robot.get_joint_positions() + if th.allclose(current_joint_positions, target_joint_positions, atol=m.JOINT_POS_DIFF_THRESHOLD): break elif limit_type == "lower" and self._get_obj_in_hand() is not None: # If we are grasping an object, we should stop when object is detected in hand break - yield self._postprocess_action(action) def _execute_grasp(self): """ From 63015524dbb2e4d529e2a79f81769073c5bc0cb1 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 18:29:22 -0800 Subject: [PATCH 25/33] fix leaking tensors in control filter --- omnigibson/utils/backend_utils.py | 4 ++-- omnigibson/utils/processing_utils.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/omnigibson/utils/backend_utils.py b/omnigibson/utils/backend_utils.py index dfdf8a32d..99b162753 100644 --- a/omnigibson/utils/backend_utils.py +++ b/omnigibson/utils/backend_utils.py @@ -96,7 +96,7 @@ class _ComputeTorchBackend(_ComputeBackend): zeros = lambda *args: th.zeros(*args, dtype=th.float32) ones = lambda *args: th.ones(*args, dtype=th.float32) to_numpy = lambda x: x.numpy() - from_numpy = lambda x: th.from_numpy() + from_numpy = lambda x: th.from_numpy(x) to_torch = lambda x: x from_torch = lambda x: x from_torch_recursive = lambda dic: dic @@ -145,7 +145,7 @@ class _ComputeNumpyBackend(_ComputeBackend): abs = np.abs sqrt = np.sqrt mean = lambda val, dim=None, keepdim=False: np.mean(val, axis=dim, keepdims=keepdim) - copy = lambda arr: np.array(arr) + copy = lambda arr: arr.copy() eye = np.eye view = lambda arr, shape: arr.reshape(shape) arange = np.arange diff --git a/omnigibson/utils/processing_utils.py b/omnigibson/utils/processing_utils.py index bd81ceba8..5acc088a7 100644 --- a/omnigibson/utils/processing_utils.py +++ b/omnigibson/utils/processing_utils.py @@ -116,7 +116,7 @@ def _dump_state(self): state = super()._dump_state() # Add info from this filter - state["past_samples"] = cb.to_torch(self.past_samples) + state["past_samples"] = cb.to_torch(cb.copy(self.past_samples)) state["current_idx"] = self.current_idx state["fully_filled"] = self.fully_filled @@ -127,7 +127,7 @@ def _load_state(self, state): super()._load_state(state=state) # Load relevant info for this filter - self.past_samples = cb.from_torch(state["past_samples"]) + self.past_samples = cb.copy(cb.from_torch(state["past_samples"])) self.current_idx = state["current_idx"] self.fully_filled = state["fully_filled"] From a679de361c5406edf82f6b37e3df2c6f58fb5754 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 18:29:48 -0800 Subject: [PATCH 26/33] update grasping_planning_utils.py with new robot finger properties --- omnigibson/utils/grasping_planning_utils.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/omnigibson/utils/grasping_planning_utils.py b/omnigibson/utils/grasping_planning_utils.py index 726706841..de397e1b9 100644 --- a/omnigibson/utils/grasping_planning_utils.py +++ b/omnigibson/utils/grasping_planning_utils.py @@ -231,7 +231,8 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint ) # Now apply the grasp offset. - dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05 + avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) + dist_from_grasp_pos = avg_finger_offset + 0.05 offset_grasp_pose_in_bbox_frame = ( grasp_position_in_bbox_frame + canonical_push_axis * push_axis_closer_side_sign * dist_from_grasp_pos, grasp_quat_in_bbox_frame, @@ -263,14 +264,15 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint waypoint_start_offset = ( -0.05 * approach_direction_in_world_frame if should_open else 0.05 * approach_direction_in_world_frame ) + avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) waypoint_start_pose = ( grasp_pose_in_world_frame[0] - + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm] + waypoint_start_offset), + + -1 * approach_direction_in_world_frame * (avg_finger_offset + waypoint_start_offset), grasp_pose_in_world_frame[1], ) waypoint_end_pose = ( target_hand_pose_in_world_frame[0] - + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm]), + + -1 * approach_direction_in_world_frame * avg_finger_offset, target_hand_pose_in_world_frame[1], ) waypoints = interpolate_waypoints(waypoint_start_pose, waypoint_end_pose, num_waypoints=num_waypoints) @@ -409,7 +411,8 @@ def grasp_position_for_open_on_revolute_joint(robot, target_obj, relevant_joint, ) # Now apply the grasp offset. - dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05 + avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) + dist_from_grasp_pos = avg_finger_offset + 0.05 offset_in_bbox_frame = canonical_open_direction * open_axis_closer_side_sign * dist_from_grasp_pos offset_grasp_pose_in_bbox_frame = (grasp_position + offset_in_bbox_frame, grasp_quat_in_bbox_frame) offset_grasp_pose_in_world_frame = T.pose_transform( From 62c1a64dacca27ab7c23f5453d268e8d25d1cb32 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 29 Jan 2025 02:30:09 +0000 Subject: [PATCH 27/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../starter_semantic_action_primitives.py | 4 +++- omnigibson/robots/robot_base.py | 9 ++++++--- omnigibson/utils/grasping_planning_utils.py | 15 ++++++++++----- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 8e8183b72..b34bdf2e3 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -506,7 +506,9 @@ def _sample_grasp_pose(self, obj): # Identity quaternion for top-down grasping (x-forward, y-right, z-down) approach_dir = T.quat2mat(grasp_quat) @ th.tensor([0.0, 0.0, -1.0]) - avg_finger_offset = th.mean(th.tensor([length for length in self.robot.eef_to_fingertip_lengths[self.arm].values()])) + avg_finger_offset = th.mean( + th.tensor([length for length in self.robot.eef_to_fingertip_lengths[self.arm].values()]) + ) pregrasp_offset = avg_finger_offset + m.GRASP_APPROACH_DISTANCE pregrasp_pos = grasp_pos - approach_dir * pregrasp_offset diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index d0c8c1cb1..9e3fcc8bc 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -218,9 +218,12 @@ def _load_sensors(self): ) # Also make sure that the include / exclude sensor names are mutually exclusive if self._exclude_sensor_names is not None and self._include_sensor_names is not None: - assert len(set(self._exclude_sensor_names).intersection(set(self._include_sensor_names))) == 0, \ - (f"include_sensor_names and exclude_sensor_names must be mutually exclusive! " - f"Got: {self._include_sensor_names} and {self._exclude_sensor_names}") + assert ( + len(set(self._exclude_sensor_names).intersection(set(self._include_sensor_names))) == 0 + ), ( + f"include_sensor_names and exclude_sensor_names must be mutually exclusive! " + f"Got: {self._include_sensor_names} and {self._exclude_sensor_names}" + ) if not (not_blacklisted and whitelisted): continue diff --git a/omnigibson/utils/grasping_planning_utils.py b/omnigibson/utils/grasping_planning_utils.py index de397e1b9..db59225f0 100644 --- a/omnigibson/utils/grasping_planning_utils.py +++ b/omnigibson/utils/grasping_planning_utils.py @@ -231,7 +231,9 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint ) # Now apply the grasp offset. - avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) dist_from_grasp_pos = avg_finger_offset + 0.05 offset_grasp_pose_in_bbox_frame = ( grasp_position_in_bbox_frame + canonical_push_axis * push_axis_closer_side_sign * dist_from_grasp_pos, @@ -264,15 +266,16 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint waypoint_start_offset = ( -0.05 * approach_direction_in_world_frame if should_open else 0.05 * approach_direction_in_world_frame ) - avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) waypoint_start_pose = ( grasp_pose_in_world_frame[0] + -1 * approach_direction_in_world_frame * (avg_finger_offset + waypoint_start_offset), grasp_pose_in_world_frame[1], ) waypoint_end_pose = ( - target_hand_pose_in_world_frame[0] - + -1 * approach_direction_in_world_frame * avg_finger_offset, + target_hand_pose_in_world_frame[0] + -1 * approach_direction_in_world_frame * avg_finger_offset, target_hand_pose_in_world_frame[1], ) waypoints = interpolate_waypoints(waypoint_start_pose, waypoint_end_pose, num_waypoints=num_waypoints) @@ -411,7 +414,9 @@ def grasp_position_for_open_on_revolute_joint(robot, target_obj, relevant_joint, ) # Now apply the grasp offset. - avg_finger_offset = th.mean(th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()])) + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) dist_from_grasp_pos = avg_finger_offset + 0.05 offset_in_bbox_frame = canonical_open_direction * open_axis_closer_side_sign * dist_from_grasp_pos offset_grasp_pose_in_bbox_frame = (grasp_position + offset_in_bbox_frame, grasp_quat_in_bbox_frame) From 40e9a271931cf17bfc7c709241c2c84210ab5e28 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 19:29:59 -0800 Subject: [PATCH 28/33] update docs and prune outdated robot properties --- docs/tutorials/custom_robot_import.md | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index c05c25aac..e2685138d 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -494,28 +494,6 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo def wheel_axle_length(self): return 0.330 - @property - def finger_lengths(self): - return {self.default_arm: 0.04} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - @property def disabled_collision_pairs(self): return [ From 595ab3f04250c357ea0ef5836a0b8b99ba0f7d6a Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Tue, 28 Jan 2025 19:30:41 -0800 Subject: [PATCH 29/33] make AG default grasping point proportions a macro, prune outdated, hardcoded finger properties from robot model classes --- omnigibson/robots/a1.py | 4 -- omnigibson/robots/franka.py | 12 +++--- omnigibson/robots/franka_mounted.py | 26 ++----------- omnigibson/robots/manipulation_robot.py | 49 +++++++++++++------------ omnigibson/robots/stretch.py | 22 ----------- 5 files changed, 33 insertions(+), 80 deletions(-) diff --git a/omnigibson/robots/a1.py b/omnigibson/robots/a1.py index 773f9006b..478d1d211 100644 --- a/omnigibson/robots/a1.py +++ b/omnigibson/robots/a1.py @@ -174,10 +174,6 @@ def _default_controllers(self): def _default_joint_pos(self): return self._default_robot_model_joint_pos - @property - def eef_to_fingertip_lengths(self): - return {arm: {name: 0.087 for name in names} for arm, names in self.finger_link_names.items()} - @cached_property def arm_link_names(self): return {self.default_arm: [f"arm_seg{i+1}" for i in range(5)]} diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index daaaa03ac..93791db85 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -237,10 +237,6 @@ def _default_controllers(self): def _default_joint_pos(self): return self._default_robot_model_joint_pos - @property - def eef_to_fingertip_lengths(self): - return {arm: {name: 0.1 for name in names} for arm, names in self.finger_link_names.items()} - @cached_property def arm_link_names(self): return {self.default_arm: [f"panda_link{i}" for i in range(8)]} @@ -263,11 +259,13 @@ def finger_joint_names(self): @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/usd/franka_panda.usda") \ + if self.model_name == "franka_panda" else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") @property def urdf_path(self): - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/urdf/franka_panda.urdf") \ + if self.model_name == "franka_panda" else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") @property def curobo_path(self): @@ -275,7 +273,7 @@ def curobo_path(self): assert ( self._model_name == "franka_panda" ), f"Only franka_panda is currently supported for curobo. Got: {self._model_name}" - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}_description_curobo.yaml") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/curobo/franka_panda_description_curobo_default.yaml") @cached_property def curobo_attached_object_link_names(self): diff --git a/omnigibson/robots/franka_mounted.py b/omnigibson/robots/franka_mounted.py index d449e4190..52956386e 100644 --- a/omnigibson/robots/franka_mounted.py +++ b/omnigibson/robots/franka_mounted.py @@ -23,34 +23,14 @@ def _default_controllers(self): controllers[f"gripper_{self.default_arm}"] = "MultiFingerGripperController" return controllers - @property - def eef_to_fingertip_lengths(self): - return {arm: {name: 0.15 for name in names} for arm, names in self.finger_link_names.items()} - @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.usd") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/usd/franka_mounted.usda") @property def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.urdf") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/urdf/franka_mounted.urdf") @property def curobo_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted_description_curobo.yaml") - - @property - def _assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="panda_rightfinger", position=th.tensor([0.0, 0.001, 0.045])), - ] - } - - @property - def _assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="panda_leftfinger", position=th.tensor([0.0, 0.001, 0.045])), - ] - } + return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/curobo/franka_mounted_description_curobo_default.yaml") diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 617a70bc9..f128de4b7 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -50,6 +50,9 @@ m.ARTICULATED_ASSIST_FRACTION = 0.7 m.MIN_ASSIST_FORCE = 0 m.MAX_ASSIST_FORCE = 100 +m.MIN_AG_DEFAULT_GRASP_POINT_PROP = 0.2 +m.MAX_AG_DEFAULT_GRASP_POINT_PROP = 0.8 + m.CONSTRAINT_VIOLATION_THRESHOLD = 0.1 m.RELEASE_WINDOW = 1 / 30.0 # release window in seconds @@ -265,9 +268,7 @@ def _infer_finger_properties(self): # Infer parent link for this finger finger_parent_link, finger_parent_max_z = None, None is_parallel_jaw = len(finger_links) == 2 - if not is_parallel_jaw: - self._default_ag_start_points[arm] = None - self._default_ag_end_points[arm] = None + assert is_parallel_jaw, "Inferring finger link information can only be done for parallel jaw gripper robots!" for i, finger_link in enumerate(finger_links): # Find parent, and make sure one exists parent_prim_path, parent_link = None, None @@ -318,7 +319,7 @@ def _infer_finger_properties(self): # Now, only keep points that are above the parent max z by 20% for inferring y values finger_range = finger_max_z - finger_parent_max_z - valid_idxs = th.where(finger_pts[:, 2] > (finger_parent_max_z + finger_range * 0.2))[0] + valid_idxs = th.where(finger_pts[:, 2] > (finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP))[0] finger_pts = finger_pts[valid_idxs] # Make sure all points lie on a single side of the EEF's y-axis y_signs = th.sign(finger_pts[:, 1]) @@ -334,30 +335,30 @@ def _infer_finger_properties(self): # In this case, this is defined as the x2 (x,y,z) tuples where: # z - the 20% and 80% length between the range from [finger_parent_max_z, finger_max_z] # This is synonymous to inferring the length of the finger (lower bounded by the gripper base, - # assumed to be the parent link), and then taking the values 20% and 80% along its length + # assumed to be the parent link), and then taking the values MIN% and MAX% along its length # y - the value closest to the edge of the finger surface in the direction of +/- EEF y-axis. # This assumes that each individual finger lies completely on one side of the EEF y-axis # x - 0. This assumes that the EEF axis evenly splits each finger symmetrically on each side - if is_parallel_jaw: - # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame - grasp_pts = th.tensor( - [ - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.2, 1], - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * 0.8, 1], - ] - ) - finger_to_world_tf = T.pose_inv(T.pose2mat(finger_link.get_position_orientation())) - finger_to_eef_tf = finger_to_world_tf @ world_to_eef_tf - grasp_pts = (grasp_pts @ finger_to_eef_tf.T)[:, :3] - grasping_points = [ - GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts + # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame + grasp_pts = th.tensor( + [ + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP, 1], + [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * m.MAX_AG_DEFAULT_GRASP_POINT_PROP, 1], ] - if i == 0: - # Start point - self._default_ag_start_points[arm] = grasping_points - else: - # End point - self._default_ag_end_points[arm] = grasping_points + ) + # Convert the grasping points from the EEF frame -> finger frame + finger_to_world_tf = T.pose_inv(T.pose2mat(finger_link.get_position_orientation())) + finger_to_eef_tf = finger_to_world_tf @ world_to_eef_tf + grasp_pts = (grasp_pts @ finger_to_eef_tf.T)[:, :3] + grasping_points = [ + GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts + ] + if i == 0: + # Start point + self._default_ag_start_points[arm] = grasping_points + else: + # End point + self._default_ag_end_points[arm] = grasping_points # For each grasping point, if we're in DEBUG mode, visualize with spheres if gm.DEBUG: diff --git a/omnigibson/robots/stretch.py b/omnigibson/robots/stretch.py index 4466e9930..4d6f32948 100644 --- a/omnigibson/robots/stretch.py +++ b/omnigibson/robots/stretch.py @@ -54,28 +54,6 @@ def wheel_radius(self): def wheel_axle_length(self): return 0.330 - @property - def eef_to_fingertip_lengths(self): - return {arm: {name: 0.04 for name in names} for arm, names in self.finger_link_names.items()} - - @property - def _assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - - @property - def _assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - @property def disabled_collision_pairs(self): return [ From 455298ca583b71ed51a5537eea677ba2aa6f01c6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 29 Jan 2025 03:31:18 +0000 Subject: [PATCH 30/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- omnigibson/robots/franka.py | 18 +++++++++++++----- omnigibson/robots/franka_mounted.py | 4 +++- omnigibson/robots/manipulation_robot.py | 22 ++++++++++++++++++---- 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index 93791db85..599b7bfec 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -259,13 +259,19 @@ def finger_joint_names(self): @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/usd/franka_panda.usda") \ - if self.model_name == "franka_panda" else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") + return ( + os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/usd/franka_panda.usda") + if self.model_name == "franka_panda" + else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") + ) @property def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/urdf/franka_panda.urdf") \ - if self.model_name == "franka_panda" else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") + return ( + os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/urdf/franka_panda.urdf") + if self.model_name == "franka_panda" + else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") + ) @property def curobo_path(self): @@ -273,7 +279,9 @@ def curobo_path(self): assert ( self._model_name == "franka_panda" ), f"Only franka_panda is currently supported for curobo. Got: {self._model_name}" - return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/curobo/franka_panda_description_curobo_default.yaml") + return os.path.join( + gm.ASSET_PATH, "models/franka/franka_panda/curobo/franka_panda_description_curobo_default.yaml" + ) @cached_property def curobo_attached_object_link_names(self): diff --git a/omnigibson/robots/franka_mounted.py b/omnigibson/robots/franka_mounted.py index 52956386e..75b5792ae 100644 --- a/omnigibson/robots/franka_mounted.py +++ b/omnigibson/robots/franka_mounted.py @@ -33,4 +33,6 @@ def urdf_path(self): @property def curobo_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/curobo/franka_mounted_description_curobo_default.yaml") + return os.path.join( + gm.ASSET_PATH, "models/franka/franka_mounted/curobo/franka_mounted_description_curobo_default.yaml" + ) diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index f128de4b7..117d0ca75 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -268,7 +268,9 @@ def _infer_finger_properties(self): # Infer parent link for this finger finger_parent_link, finger_parent_max_z = None, None is_parallel_jaw = len(finger_links) == 2 - assert is_parallel_jaw, "Inferring finger link information can only be done for parallel jaw gripper robots!" + assert ( + is_parallel_jaw + ), "Inferring finger link information can only be done for parallel jaw gripper robots!" for i, finger_link in enumerate(finger_links): # Find parent, and make sure one exists parent_prim_path, parent_link = None, None @@ -319,7 +321,9 @@ def _infer_finger_properties(self): # Now, only keep points that are above the parent max z by 20% for inferring y values finger_range = finger_max_z - finger_parent_max_z - valid_idxs = th.where(finger_pts[:, 2] > (finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP))[0] + valid_idxs = th.where( + finger_pts[:, 2] > (finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP) + )[0] finger_pts = finger_pts[valid_idxs] # Make sure all points lie on a single side of the EEF's y-axis y_signs = th.sign(finger_pts[:, 1]) @@ -342,8 +346,18 @@ def _infer_finger_properties(self): # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame grasp_pts = th.tensor( [ - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP, 1], - [0, (y_abs_min - 0.002) * y_sign, finger_parent_max_z + finger_range * m.MAX_AG_DEFAULT_GRASP_POINT_PROP, 1], + [ + 0, + (y_abs_min - 0.002) * y_sign, + finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP, + 1, + ], + [ + 0, + (y_abs_min - 0.002) * y_sign, + finger_parent_max_z + finger_range * m.MAX_AG_DEFAULT_GRASP_POINT_PROP, + 1, + ], ] ) # Convert the grasping points from the EEF frame -> finger frame From 29428bcd4ce4ec2b52fbed0e02f2d42dfd643250 Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Wed, 29 Jan 2025 17:29:26 -0800 Subject: [PATCH 31/33] primitive sticky grasping top down move hand use stop_on_ag, instead of stop_on_contact --- .../starter_semantic_action_primitives.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index b34bdf2e3..9b95fac58 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -584,7 +584,7 @@ def _grasp(self, obj): # By default, it's NOT the z-axis of the world frame unless `project_pose_to_goal_frame=False` is set in curobo. # For sticky grasping, we also need to ignore the object during motion planning because the fingers are already closed. yield from self._move_hand( - grasp_pose, motion_constraint=[1, 1, 1, 1, 1, 0], stop_on_contact=True, ignore_objects=[obj] + grasp_pose, motion_constraint=[1, 1, 1, 1, 1, 0], stop_on_ag=True, ignore_objects=[obj] ) elif self.robot.grasping_mode == "assisted": indented_print("Assisted grasping: approach") @@ -856,6 +856,7 @@ def _move_hand( self, target_pose, stop_on_contact=False, + stop_on_ag=False, motion_constraint=None, low_precision=False, lock_auxiliary_arm=False, @@ -867,6 +868,7 @@ def _move_hand( Args: target_pose (Tuple[th.tensor, th.tensor]): target pose to reach for the default end-effector in the world frame stop_on_contact (bool): Whether to stop executing motion plan if contact is detected + stop_on_ag (bool): Whether to stop executing motion plan if assisted grasping is activated motion_constraint (MotionConstraint): Motion constraint for the motion low_precision (bool): Whether to use low precision for the motion lock_auxiliary_arm (bool): Whether to lock the other arm in place @@ -909,7 +911,7 @@ def _move_hand( ) indented_print(f"Plan has {len(q_traj)} steps") - yield from self._execute_motion_plan(q_traj, stop_on_contact=stop_on_contact, low_precision=low_precision) + yield from self._execute_motion_plan(q_traj, stop_on_contact=stop_on_contact, stop_on_ag=stop_on_ag, low_precision=low_precision) def _plan_joint_motion( self, @@ -975,7 +977,7 @@ def _plan_joint_motion( return q_traj def _execute_motion_plan( - self, q_traj, stop_on_contact=False, ignore_failure=False, low_precision=False, ignore_physics=False + self, q_traj, stop_on_contact=False, stop_on_ag=False, ignore_failure=False, low_precision=False, ignore_physics=False ): for i, joint_pos in enumerate(q_traj): # indented_print(f"Executing motion plan step {i + 1}/{len(q_traj)}") @@ -1023,7 +1025,9 @@ def _execute_motion_plan( collision_detected = detect_robot_collision_in_sim(self.robot) if stop_on_contact and collision_detected: - indented_print(f"Collision detected at step {i + 1}/{len(q_traj)}") + return + + if stop_on_ag and self._get_obj_in_hand() is not None: return if not ignore_failure: From b3bc5c6e062cb1d32e15417c00be92204fc4e1ea Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:30:14 +0000 Subject: [PATCH 32/33] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../starter_semantic_action_primitives.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 9b95fac58..875c9a148 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -911,7 +911,9 @@ def _move_hand( ) indented_print(f"Plan has {len(q_traj)} steps") - yield from self._execute_motion_plan(q_traj, stop_on_contact=stop_on_contact, stop_on_ag=stop_on_ag, low_precision=low_precision) + yield from self._execute_motion_plan( + q_traj, stop_on_contact=stop_on_contact, stop_on_ag=stop_on_ag, low_precision=low_precision + ) def _plan_joint_motion( self, @@ -977,7 +979,13 @@ def _plan_joint_motion( return q_traj def _execute_motion_plan( - self, q_traj, stop_on_contact=False, stop_on_ag=False, ignore_failure=False, low_precision=False, ignore_physics=False + self, + q_traj, + stop_on_contact=False, + stop_on_ag=False, + ignore_failure=False, + low_precision=False, + ignore_physics=False, ): for i, joint_pos in enumerate(q_traj): # indented_print(f"Executing motion plan step {i + 1}/{len(q_traj)}") From 7f57a3e1cbdeefd804d487b0769c6f4c5626519d Mon Sep 17 00:00:00 2001 From: cremebrule <84cremebrule@gmail.com> Date: Thu, 30 Jan 2025 13:08:31 -0800 Subject: [PATCH 33/33] fix outdated franka eef name, tune ik / osc keyboard control gains --- omnigibson/robots/franka.py | 9 ++++--- omnigibson/utils/ui_utils.py | 48 ++++++++++++++++++------------------ 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index 599b7bfec..b7c8a00ca 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -98,10 +98,10 @@ def __init__( if end_effector == "gripper": self._model_name = "franka_panda" self._gripper_control_idx = th.arange(7, 9) - self._eef_link_names = "panda_hand" + self._eef_link_names = "eef_link" self._finger_link_names = ["panda_leftfinger", "panda_rightfinger"] self._finger_joint_names = ["panda_finger_joint1", "panda_finger_joint2"] - self._default_robot_model_joint_pos = th.tensor([0.00, -1.3, 0.00, -2.87, 0.00, 2.00, 0.75, 0.00, 0.00]) + self._default_robot_model_joint_pos = th.tensor([0.00, -1.3, 0.00, -2.87, 0.00, 2.00, 0.75, 0.04, 0.04]) self._teleop_rotation_offset = th.tensor([-1, 0, 0, 0]) self._ag_start_points = [ GraspingPoint(link_name="panda_rightfinger", position=th.tensor([0.0, 0.001, 0.045])), @@ -285,7 +285,10 @@ def curobo_path(self): @cached_property def curobo_attached_object_link_names(self): - return {self._eef_link_names: "attached_object"} + assert ( + self._model_name == "franka_panda" + ), f"Only franka_panda is currently supported for curobo. Got: {self._model_name}" + return super().curobo_attached_object_link_names @property def teleop_rotation_offset(self): diff --git a/omnigibson/utils/ui_utils.py b/omnigibson/utils/ui_utils.py index 8df735478..9b96898eb 100644 --- a/omnigibson/utils/ui_utils.py +++ b/omnigibson/utils/ui_utils.py @@ -659,18 +659,18 @@ def generate_ik_keypress_mapping(self, controller_info): """ mapping = {} - mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.5} + mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.1} return mapping @@ -687,18 +687,18 @@ def generate_osc_keypress_mapping(self, controller_info): """ mapping = {} - mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.5} + mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.25} return mapping