diff --git a/docs/tutorials/custom_robot_import.md b/docs/tutorials/custom_robot_import.md index c05c25aac..e2685138d 100644 --- a/docs/tutorials/custom_robot_import.md +++ b/docs/tutorials/custom_robot_import.md @@ -494,28 +494,6 @@ Now that we have the USD file for the robot, let's write our own robot class. Fo def wheel_axle_length(self): return 0.330 - @property - def finger_lengths(self): - return {self.default_arm: 0.04} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - @property def disabled_collision_pairs(self): return [ diff --git a/docs/tutorials/demo_collection.md b/docs/tutorials/demo_collection.md index bd52e5ba9..3fe5be681 100644 --- a/docs/tutorials/demo_collection.md +++ b/docs/tutorials/demo_collection.md @@ -101,7 +101,7 @@ playback_env = DataPlaybackWrapper.create_from_hdf5( ) # Playback the entire dataset and record observations -playback_env.playback_dataset(record=True) +playback_env.playback_dataset(record_data=True) # Save the recorded playback data playback_env.save_data() diff --git a/omnigibson/action_primitives/starter_semantic_action_primitives.py b/omnigibson/action_primitives/starter_semantic_action_primitives.py index 7cb5a235b..875c9a148 100644 --- a/omnigibson/action_primitives/starter_semantic_action_primitives.py +++ b/omnigibson/action_primitives/starter_semantic_action_primitives.py @@ -102,7 +102,7 @@ m.LOW_PRECISION_DIST_THRESHOLD = 0.1 m.LOW_PRECISION_ANGLE_THRESHOLD = 0.2 -m.JOINT_POS_DIFF_THRESHOLD = 0.01 +m.JOINT_POS_DIFF_THRESHOLD = 0.005 m.LOW_PRECISION_JOINT_POS_DIFF_THRESHOLD = 0.05 m.JOINT_CONTROL_MIN_ACTION = 0.0 m.MAX_ALLOWED_JOINT_ERROR_FOR_LINEAR_MOTION = math.radians(45) @@ -506,7 +506,10 @@ def _sample_grasp_pose(self, obj): # Identity quaternion for top-down grasping (x-forward, y-right, z-down) approach_dir = T.quat2mat(grasp_quat) @ th.tensor([0.0, 0.0, -1.0]) - pregrasp_offset = self.robot.finger_lengths[self.arm] / 2.0 + m.GRASP_APPROACH_DISTANCE + avg_finger_offset = th.mean( + th.tensor([length for length in self.robot.eef_to_fingertip_lengths[self.arm].values()]) + ) + pregrasp_offset = avg_finger_offset + m.GRASP_APPROACH_DISTANCE pregrasp_pos = grasp_pos - approach_dir * pregrasp_offset @@ -581,7 +584,7 @@ def _grasp(self, obj): # By default, it's NOT the z-axis of the world frame unless `project_pose_to_goal_frame=False` is set in curobo. # For sticky grasping, we also need to ignore the object during motion planning because the fingers are already closed. yield from self._move_hand( - grasp_pose, motion_constraint=[1, 1, 1, 1, 1, 0], stop_on_contact=True, ignore_objects=[obj] + grasp_pose, motion_constraint=[1, 1, 1, 1, 1, 0], stop_on_ag=True, ignore_objects=[obj] ) elif self.robot.grasping_mode == "assisted": indented_print("Assisted grasping: approach") @@ -853,6 +856,7 @@ def _move_hand( self, target_pose, stop_on_contact=False, + stop_on_ag=False, motion_constraint=None, low_precision=False, lock_auxiliary_arm=False, @@ -864,6 +868,7 @@ def _move_hand( Args: target_pose (Tuple[th.tensor, th.tensor]): target pose to reach for the default end-effector in the world frame stop_on_contact (bool): Whether to stop executing motion plan if contact is detected + stop_on_ag (bool): Whether to stop executing motion plan if assisted grasping is activated motion_constraint (MotionConstraint): Motion constraint for the motion low_precision (bool): Whether to use low precision for the motion lock_auxiliary_arm (bool): Whether to lock the other arm in place @@ -906,7 +911,9 @@ def _move_hand( ) indented_print(f"Plan has {len(q_traj)} steps") - yield from self._execute_motion_plan(q_traj, stop_on_contact=stop_on_contact, low_precision=low_precision) + yield from self._execute_motion_plan( + q_traj, stop_on_contact=stop_on_contact, stop_on_ag=stop_on_ag, low_precision=low_precision + ) def _plan_joint_motion( self, @@ -972,7 +979,13 @@ def _plan_joint_motion( return q_traj def _execute_motion_plan( - self, q_traj, stop_on_contact=False, ignore_failure=False, low_precision=False, ignore_physics=False + self, + q_traj, + stop_on_contact=False, + stop_on_ag=False, + ignore_failure=False, + low_precision=False, + ignore_physics=False, ): for i, joint_pos in enumerate(q_traj): # indented_print(f"Executing motion plan step {i + 1}/{len(q_traj)}") @@ -992,6 +1005,12 @@ def _execute_motion_plan( articulation_control_idx = th.cat(articulation_control_idx) for j in range(m.MAX_STEPS_FOR_JOINT_MOTION): # indented_print(f"Step {j + 1}/{m.MAX_STEPS_FOR_JOINT_MOTION}") + + # We need to call @q_to_action for every step because as the robot base moves, the same base joint_pos will be + # converted to different actions, since HolonomicBaseJointController accepts an action in the robot local frame. + action = self.robot.q_to_action(joint_pos) + yield self._postprocess_action(action) + current_joint_pos = self.robot.get_joint_positions() joint_pos_diff = joint_pos - current_joint_pos base_joint_diff = joint_pos_diff[self.robot.base_control_idx] @@ -1014,14 +1033,10 @@ def _execute_motion_plan( collision_detected = detect_robot_collision_in_sim(self.robot) if stop_on_contact and collision_detected: - indented_print(f"Collision detected at step {i + 1}/{len(q_traj)}") return - # We need to call @q_to_action for every step because as the robot base moves, the same base joint_pos will be - # converted to different actions, since HolonomicBaseJointController accepts an action in the robot local frame. - action = self.robot.q_to_action(joint_pos) - - yield self._postprocess_action(action) + if stop_on_ag and self._get_obj_in_hand() is not None: + return if not ignore_failure: if not base_target_reached: @@ -1334,13 +1349,13 @@ def _move_fingers_to_limit(self, limit_type): target_joint_positions = self._get_joint_position_with_fingers_at_limit(limit_type) action = self.robot.q_to_action(target_joint_positions) for _ in range(m.MAX_STEPS_FOR_GRASP_OR_RELEASE): - current_joint_positinos = self.robot.get_joint_positions() - if th.allclose(current_joint_positinos, target_joint_positions, atol=0.005): + yield self._postprocess_action(action) + current_joint_positions = self.robot.get_joint_positions() + if th.allclose(current_joint_positions, target_joint_positions, atol=m.JOINT_POS_DIFF_THRESHOLD): break elif limit_type == "lower" and self._get_obj_in_hand() is not None: # If we are grasping an object, we should stop when object is detected in hand break - yield self._postprocess_action(action) def _execute_grasp(self): """ diff --git a/omnigibson/configs/fetch_behavior.yaml b/omnigibson/configs/fetch_behavior.yaml index cfde1f7d0..ce0e831f1 100644 --- a/omnigibson/configs/fetch_behavior.yaml +++ b/omnigibson/configs/fetch_behavior.yaml @@ -40,6 +40,8 @@ scene: robots: - type: Fetch obs_modalities: [scan, rgb, depth] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collision: false action_normalize: true diff --git a/omnigibson/configs/r1_primitives.yaml b/omnigibson/configs/r1_primitives.yaml index aed776401..147c94b52 100644 --- a/omnigibson/configs/r1_primitives.yaml +++ b/omnigibson/configs/r1_primitives.yaml @@ -32,6 +32,8 @@ scene: robots: - type: R1 obs_modalities: [rgb] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collisions: true action_normalize: false diff --git a/omnigibson/configs/robots/fetch.yaml b/omnigibson/configs/robots/fetch.yaml index bb1a53b1d..30af214a4 100644 --- a/omnigibson/configs/robots/fetch.yaml +++ b/omnigibson/configs/robots/fetch.yaml @@ -17,6 +17,8 @@ robot: self_collision: true grasping_mode: physical default_arm_pose: vertical + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/freight.yaml b/omnigibson/configs/robots/freight.yaml index 7431b464d..e4d5d6c7d 100644 --- a/omnigibson/configs/robots/freight.yaml +++ b/omnigibson/configs/robots/freight.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/husky.yaml b/omnigibson/configs/robots/husky.yaml index d35680b5b..0de969c48 100644 --- a/omnigibson/configs/robots/husky.yaml +++ b/omnigibson/configs/robots/husky.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/locobot.yaml b/omnigibson/configs/robots/locobot.yaml index c36e144f3..6a067926f 100644 --- a/omnigibson/configs/robots/locobot.yaml +++ b/omnigibson/configs/robots/locobot.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/robots/turtlebot.yaml b/omnigibson/configs/robots/turtlebot.yaml index 0431a430f..ec3b3d2ab 100644 --- a/omnigibson/configs/robots/turtlebot.yaml +++ b/omnigibson/configs/robots/turtlebot.yaml @@ -10,6 +10,8 @@ robot: base_name: null scale: 1.0 self_collision: false + include_sensor_names: null + exclude_sensor_names: null sensor_config: VisionSensor: sensor_kwargs: diff --git a/omnigibson/configs/tiago_primitives.yaml b/omnigibson/configs/tiago_primitives.yaml index 94d050ecb..714ee6d72 100644 --- a/omnigibson/configs/tiago_primitives.yaml +++ b/omnigibson/configs/tiago_primitives.yaml @@ -33,6 +33,8 @@ scene: robots: - type: Tiago obs_modalities: [rgb] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collisions: true action_normalize: false diff --git a/omnigibson/configs/turtlebot_nav.yaml b/omnigibson/configs/turtlebot_nav.yaml index 844208e90..957ce9c5f 100644 --- a/omnigibson/configs/turtlebot_nav.yaml +++ b/omnigibson/configs/turtlebot_nav.yaml @@ -30,6 +30,8 @@ scene: robots: - type: Turtlebot obs_modalities: [scan, rgb, depth] + include_sensor_names: null + exclude_sensor_names: null scale: 1.0 self_collision: false action_normalize: true diff --git a/omnigibson/controllers/ik_controller.py b/omnigibson/controllers/ik_controller.py index 5c1fc1323..d4c4395a9 100644 --- a/omnigibson/controllers/ik_controller.py +++ b/omnigibson/controllers/ik_controller.py @@ -53,7 +53,7 @@ def __init__( pos_kp=None, pos_damping_ratio=None, vel_kp=None, - use_impedances=True, + use_impedances=False, mode="pose_delta_ori", smoothing_filter_size=None, workspace_pose_limiter=None, diff --git a/omnigibson/controllers/joint_controller.py b/omnigibson/controllers/joint_controller.py index bf2a2545e..3ec3871e4 100644 --- a/omnigibson/controllers/joint_controller.py +++ b/omnigibson/controllers/joint_controller.py @@ -13,7 +13,7 @@ ) from omnigibson.macros import create_module_macros from omnigibson.utils.backend_utils import _compute_backend as cb -from omnigibson.utils.backend_utils import _ComputeBackend, _ComputeNumpyBackend, _ComputeTorchBackend +from omnigibson.utils.backend_utils import add_compute_function from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.ui_utils import create_module_logger @@ -223,7 +223,7 @@ def compute_control(self, goal_dict, control_dict): else: # effort u = target - u = cb.compute_joint_torques(u, control_dict["mass_matrix"], self.dof_idx) + u = cb.get_custom_method("compute_joint_torques")(u, control_dict["mass_matrix"], self.dof_idx) # Add gravity compensation if self._use_gravity_compensation: @@ -341,6 +341,6 @@ def _compute_joint_torques_numpy( # Set these as part of the backend values -setattr(_ComputeBackend, "compute_joint_torques", None) -setattr(_ComputeTorchBackend, "compute_joint_torques", _compute_joint_torques_torch) -setattr(_ComputeNumpyBackend, "compute_joint_torques", _compute_joint_torques_numpy) +add_compute_function( + name="compute_joint_torques", np_function=_compute_joint_torques_numpy, th_function=_compute_joint_torques_torch +) diff --git a/omnigibson/controllers/multi_finger_gripper_controller.py b/omnigibson/controllers/multi_finger_gripper_controller.py index 854609275..3f3fd9ce1 100644 --- a/omnigibson/controllers/multi_finger_gripper_controller.py +++ b/omnigibson/controllers/multi_finger_gripper_controller.py @@ -126,8 +126,15 @@ def _generate_default_command_output_limits(self): if self._mode == "binary": command_output_limits = (-1.0, 1.0) # If we're in smoothing mode, output limits should be the average of the independent limits - elif self._mode == "smoothing": - command_output_limits = cb.mean(command_output_limits[0]), cb.mean(command_output_limits[1]) + elif self._mode == "smooth": + command_output_limits = ( + cb.mean(command_output_limits[0]), + cb.mean(command_output_limits[1]), + ) + elif self._mode == "independent": + pass + else: + raise ValueError(f"Invalid mode {self._mode}") return command_output_limits diff --git a/omnigibson/envs/data_wrapper.py b/omnigibson/envs/data_wrapper.py index 0202e7877..01c4006dc 100644 --- a/omnigibson/envs/data_wrapper.py +++ b/omnigibson/envs/data_wrapper.py @@ -10,12 +10,12 @@ import omnigibson as og import omnigibson.lazy as lazy -from omnigibson.envs.env_wrapper import EnvironmentWrapper +from omnigibson.envs.env_wrapper import EnvironmentWrapper, create_wrapper from omnigibson.macros import gm from omnigibson.objects.object_base import BaseObject from omnigibson.sensors.vision_sensor import VisionSensor from omnigibson.utils.config_utils import TorchEncoder -from omnigibson.utils.python_utils import create_object_from_init_info, h5py_group_to_torch +from omnigibson.utils.python_utils import create_object_from_init_info, h5py_group_to_torch, assert_valid_key from omnigibson.utils.ui_utils import create_module_logger # Create module logger @@ -38,8 +38,8 @@ def __init__(self, env, output_path, only_successes=True): """ # Make sure the wrapped environment inherits correct omnigibson format assert isinstance( - env, og.Environment - ), "Expected wrapped @env to be a subclass of OmniGibson's Environment class!" + env, (og.Environment, EnvironmentWrapper) + ), "Expected wrapped @env to be a subclass of OmniGibson's Environment class or EnvironmentWrapper!" # Only one scene is supported for now assert len(og.sim.scenes) == 1, "Only one scene is currently supported for DataWrapper env!" @@ -64,12 +64,13 @@ def __init__(self, env, output_path, only_successes=True): # Run super super().__init__(env=env) - def step(self, action): + def step(self, action, n_render_iterations=1): """ Run the environment step() function and collect data Args: action (th.Tensor): action to take in environment + n_render_iterations (int): Number of rendering iterations to use before returning observations Returns: 5-tuple: @@ -84,7 +85,7 @@ def step(self, action): if isinstance(action, dict): action = th.cat([act for act in action.values()]) - next_obs, reward, terminated, truncated, info = self.env.step(action) + next_obs, reward, terminated, truncated, info = self.env.step(action, n_render_iterations=n_render_iterations) self.step_count += 1 self._record_step_trajectory(action, next_obs, reward, terminated, truncated, info) @@ -374,6 +375,10 @@ def reset(self): # Call super first init_obs, init_info = super().reset() + # Make sure all objects are awake to begin to guarantee we save their initial states + for obj in self.scene.objects: + obj.wake() + # Store this initial state as part of the trajectory state = og.sim.dump_state(serialized=True) step_data = { @@ -465,11 +470,15 @@ def create_from_hdf5( cls, input_path, output_path, - robot_obs_modalities, + robot_obs_modalities=tuple(), robot_sensor_config=None, external_sensors_config=None, + include_sensor_names=None, + exclude_sensor_names=None, n_render_iterations=5, only_successes=False, + include_env_wrapper=False, + additional_wrapper_configs=None, ): """ Create a DataPlaybackWrapper environment instance form the recorded demonstration info @@ -489,12 +498,21 @@ def create_from_hdf5( dictionary specifying an individual external sensor's relevant parameters. See the example external_sensors key in fetch_behavior.yaml env config. This can be used to specify additional sensors to collect observations during playback. + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors during playback + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors during playback n_render_iterations (int): Number of rendering iterations to use when loading each stored frame from the recorded data. This is needed because the omniverse real-time raytracing always lags behind the underlying physical state by a few frames, and additionally produces transient visual artifacts when the physical state changes. Increasing this number will improve the rendered quality at the expense of speed. only_successes (bool): Whether to only save successful episodes + include_env_wrapper (bool): Whether to include environment wrapper stored in the underlying env config + additional_wrapper_configs (None or list of dict): If specified, list of wrapper config(s) specifying + environment wrappers to wrap the internal environment class in Returns: DataPlaybackWrapper: Generated playback environment @@ -524,7 +542,10 @@ def create_from_hdf5( # Set observation modalities and update sensor config for robot_cfg in config["robots"]: - robot_cfg["obs_modalities"] = robot_obs_modalities + robot_cfg["obs_modalities"] = list(robot_obs_modalities) + robot_cfg["include_sensor_names"] = include_sensor_names + robot_cfg["exclude_sensor_names"] = exclude_sensor_names + if robot_sensor_config is not None: robot_cfg["sensor_config"] = robot_sensor_config if external_sensors_config is not None: @@ -533,6 +554,14 @@ def create_from_hdf5( # Load env env = og.Environment(configs=config) + # Optionally include the desired environment wrapper specified in the config + if include_env_wrapper: + env = create_wrapper(env=env) + + if additional_wrapper_configs is not None: + for wrapper_cfg in additional_wrapper_configs: + env = create_wrapper(env=env, wrapper_cfg=wrapper_cfg) + # Wrap and return env return cls( env=env, @@ -565,31 +594,39 @@ def __init__(self, env, input_path, output_path, n_render_iterations=5, only_suc # Run super super().__init__(env=env, output_path=output_path, only_successes=only_successes) + def _process_obs(self, obs, info): + """ + Modifies @obs inplace for any relevant post-processing + + Args: + obs (dict): Keyword-mapped relevant observations from the immediate env step + info (dict): Keyword-mapped relevant information from the immediate env step + """ + # Default is a no-op + return obs + def _parse_step_data(self, action, obs, reward, terminated, truncated, info): # Store action, obs, reward, terminated, truncated, info step_data = dict() - step_data["obs"] = obs + step_data["obs"] = self._process_obs(obs=obs, info=info) step_data["action"] = action step_data["reward"] = reward step_data["terminated"] = terminated step_data["truncated"] = truncated return step_data - def playback_episode(self, episode_id, record=True, video_path=None, video_writer=None): + def playback_episode(self, episode_id, record_data=True, video_writer=None, video_rgb_key=None): """ Playback episode @episode_id, and optionally record observation data if @record is True Args: episode_id (int): Episode to playback. This should be a valid demo ID number from the inputted collected data hdf5 file - record (bool): Whether to record data during playback or not - video_path (None or str): If specified, path to write the playback video to - video_writer (None or str): If specified, an imageio video writer to use for writing the video (can be specified in place of @video_path) + record_data (bool): Whether to record data during playback or not + video_writer (None or imageio.Writer): If specified, writer object that RGB frames will be written to + video_rgb_key (None or str): If specified, observation key representing the RGB frames to write to video. + If @video_writer is specified, this must also be specified! """ - using_external_writer = video_writer is not None - if video_writer is None and video_path is not None: - video_writer = imageio.get_writer(video_path, fps=30) - data_grp = self.input_hdf5["data"] assert f"demo_{episode_id}" in data_grp, f"No valid episode with ID {episode_id} found!" traj_grp = data_grp[f"demo_{episode_id}"] @@ -612,9 +649,9 @@ def playback_episode(self, episode_id, record=True, video_path=None, video_write og.sim.load_state(state[0, : int(state_size[0])], serialized=True) # If record, record initial observations - if record: - init_obs, _, _, _, _ = self.env.step(action=action[0], n_render_iterations=self.n_render_iterations) - step_data = {"obs": init_obs} + if record_data: + init_obs, _, _, _, init_info = self.env.step(action=action[0], n_render_iterations=self.n_render_iterations) + step_data = {"obs": self._process_obs(obs=init_obs, info=init_info)} self.current_traj_history.append(step_data) for i, (a, s, ss, r, te, tr) in enumerate( @@ -644,7 +681,7 @@ def playback_episode(self, episode_id, record=True, video_path=None, video_write self.current_obs, _, _, _, info = self.env.step(action=a, n_render_iterations=self.n_render_iterations) # If recording, record data - if record: + if record_data: step_data = self._parse_step_data( action=a, obs=self.current_obs, @@ -655,33 +692,44 @@ def playback_episode(self, episode_id, record=True, video_path=None, video_write ) self.current_traj_history.append(step_data) - # If writing video, save the current frame + # If writing to video, write desired frame if video_writer is not None: - video_writer.append_data(og.sim.viewer_camera.get_obs()[0]["rgb"][:, :, :3].numpy()) + assert_valid_key(video_rgb_key, self.current_obs.keys(), "video_rgb_key") + video_writer.append_data(self.current_obs[video_rgb_key][:, :, :3].numpy()) self.step_count += 1 - if record: + if record_data: self.flush_current_traj() - # If we weren't using an external writer but we're still writing a video, close the writer - if video_writer is not None and not using_external_writer: - video_writer.close() - - def playback_dataset(self, record=True, video_path=None, video_writer=None): + def playback_dataset(self, record_data=False, video_writer=None, video_rgb_key=None): """ Playback all episodes from the input HDF5 file, and optionally record observation data if @record is True Args: - record (bool): Whether to record data during playback or not - video_path (None or str): If specified, path to write the playback video to - video_writer (None or str): If specified, an imageio video writer to use for writing the video (can be specified in place of @video_path) + record_data (bool): Whether to record data during playback or not + video_writer (None or imageio.Writer): If specified, writer object that RGB frames will be written to + video_rgb_key (None or str): If specified, observation key representing the RGB frames to write to video. + If @video_writer is specified, this must also be specified! """ - if video_writer is None and video_path is not None: - video_writer = imageio.get_writer(video_path, fps=30) for episode_id in range(self.input_hdf5["data"].attrs["n_episodes"]): - self.playback_episode(episode_id=episode_id, record=record, video_path=None, video_writer=video_writer) + self.playback_episode( + episode_id=episode_id, + record_data=record_data, + video_writer=video_writer, + video_rgb_key=video_rgb_key, + ) + + def create_video_writer(self, fpath, fps=30): + """ + Creates a video writer to write video frames to when playing back the dataset - # Close the video writer at the end if created - if video_writer is not None: - video_writer.close() + Args: + fpath (str): Absolute path that the generated video writer will write to. Should end in .mp4 + fps (int): Desired frames per second when generating video + + Returns: + imageio.Writer: Generated video writer + """ + assert fpath.endswith(".mp4"), f"Video writer fpath must end with .mp4! Got: {fpath}" + return imageio.get_writer(fpath, fps=fps) diff --git a/omnigibson/envs/env_wrapper.py b/omnigibson/envs/env_wrapper.py index 09f69d80e..ce8a9aa8e 100644 --- a/omnigibson/envs/env_wrapper.py +++ b/omnigibson/envs/env_wrapper.py @@ -10,11 +10,16 @@ log = create_module_logger(module_name=__name__) -def create_wrapper(env): +def create_wrapper(env, wrapper_cfg=None): """ - Wraps environment @env with wrapper defined by env.wrapper_config + Wraps environment @env with wrapper defined @wrapper_cfg + + Args: + env (og.Environment): environment to wrap + wrapper_cfg (None or dict): Specified, configuration to wrap environment with + If not specified, will default to env.wrapper_config """ - wrapper_cfg = deepcopy(env.wrapper_config) + wrapper_cfg = deepcopy(env.wrapper_config if wrapper_cfg is None else wrapper_cfg) wrapper_type = wrapper_cfg.pop("type") wrapper_cfg["env"] = env @@ -41,12 +46,13 @@ def __init__(self, env): # Run super super().__init__(obj=env) - def step(self, action): + def step(self, action, n_render_iterations=1): """ By default, run the normal environment step() function Args: action (th.tensor): action to take in environment + n_render_iterations (int): Number of rendering iterations to use before returning observations Returns: 4-tuple: @@ -56,7 +62,7 @@ def step(self, action): - (bool) whether the current episode is truncated - (dict) misc information """ - return self.env.step(action) + return self.env.step(action, n_render_iterations=n_render_iterations) def reset(self): """ diff --git a/omnigibson/examples/robots/import_custom_robot.py b/omnigibson/examples/robots/import_custom_robot.py index 99fcc32be..4837bae65 100644 --- a/omnigibson/examples/robots/import_custom_robot.py +++ b/omnigibson/examples/robots/import_custom_robot.py @@ -86,7 +86,10 @@ parent_link: left_arm_link6 offset: position: [0, 0, 0.06] - orientation: [0, 0, 0, 1] + orientation: [0, 0, 0, 1] # NOTE: Convention for these eef vis links should be tuned such that: + # z-axis points out from the tips of the fingers + # y-axis points in the direction from the left finger to the right finger + # x-axis is automatically inferred from the above two axes - link: right_eef_link # same format as @camera_links parent_link: right_arm_link6 offset: @@ -722,7 +725,9 @@ def create_curobo_cfgs(robot_prim, robot_urdf_path, curobo_cfg, root_link, save_ is_holonomic (bool): Whether the robot has a holonomic base applied or not """ robot_name = robot_prim.GetName() - ee_links = list(curobo_cfg.eef_to_gripper_info.keys()) + + # Left, then right by default if sorted alphabetically + ee_links = list(sorted(curobo_cfg.eef_to_gripper_info.keys())) # Find all joints that have a negative axis specified so we know to flip them in curobo tree = ET.parse(robot_urdf_path) diff --git a/omnigibson/prims/xform_prim.py b/omnigibson/prims/xform_prim.py index 27f71f8f0..6fea9561c 100644 --- a/omnigibson/prims/xform_prim.py +++ b/omnigibson/prims/xform_prim.py @@ -344,7 +344,7 @@ def get_local_pose(self): logger.warning( 'get_local_pose is deprecated and will be removed in a future release. Use get_position_orientation(frame="parent") instead' ) - return self.get_position_orientation(self.prim_path, frame="parent") + return self.get_position_orientation(frame="parent") def set_local_pose(self, position=None, orientation=None): """ @@ -359,7 +359,7 @@ def set_local_pose(self, position=None, orientation=None): logger.warning( 'set_local_pose is deprecated and will be removed in a future release. Use set_position_orientation(position=position, orientation=orientation, frame="parent") instead' ) - return self.set_position_orientation(self.prim_path, position, orientation, frame="parent") + return self.set_position_orientation(position, orientation, frame="parent") def get_world_scale(self): """ diff --git a/omnigibson/robots/a1.py b/omnigibson/robots/a1.py index b4cb60f14..478d1d211 100644 --- a/omnigibson/robots/a1.py +++ b/omnigibson/robots/a1.py @@ -33,6 +33,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -70,6 +72,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -128,6 +136,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -164,10 +174,6 @@ def _default_controllers(self): def _default_joint_pos(self): return self._default_robot_model_joint_pos - @property - def finger_lengths(self): - return {self.default_arm: 0.087} - @cached_property def arm_link_names(self): return {self.default_arm: [f"arm_seg{i+1}" for i in range(5)]} @@ -205,12 +211,12 @@ def teleop_rotation_offset(self): return {self.default_arm: self._teleop_rotation_offset} @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return {self.default_arm: self._ag_start_points} @property - def assisted_grasp_end_points(self): - return {self.default_arm: self._ag_start_points} + def _assisted_grasp_end_points(self): + return {self.default_arm: self._ag_end_points} @property def disabled_collision_pairs(self): diff --git a/omnigibson/robots/active_camera_robot.py b/omnigibson/robots/active_camera_robot.py index ef2a21948..3802533b0 100644 --- a/omnigibson/robots/active_camera_robot.py +++ b/omnigibson/robots/active_camera_robot.py @@ -22,17 +22,6 @@ class ActiveCameraRobot(BaseRobot): """ - def _validate_configuration(self): - # Make sure a camera controller is specified - assert ( - "camera" in self._controllers - ), "Controller 'camera' must exist in controllers! Current controllers: {}".format( - list(self._controllers.keys()) - ) - - # run super - super()._validate_configuration() - def _get_proprioception_dict(self): dic = super()._get_proprioception_dict() diff --git a/omnigibson/robots/behavior_robot.py b/omnigibson/robots/behavior_robot.py index 442358653..6dfac40cc 100644 --- a/omnigibson/robots/behavior_robot.py +++ b/omnigibson/robots/behavior_robot.py @@ -79,6 +79,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities="rgb", + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", # Unique to ManipulationRobot grasping_mode="assisted", @@ -108,6 +110,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, grasping_mode=grasping_mode, grasping_direction="upper", @@ -400,7 +404,7 @@ def set_position_orientation( ) @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): side_coefficients = {"left": th.tensor([1, -1, 1]), "right": th.tensor([1, 1, 1])} return { arm: [ @@ -415,7 +419,7 @@ def assisted_grasp_start_points(self): } @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): side_coefficients = {"left": th.tensor([1, -1, 1]), "right": th.tensor([1, 1, 1])} return { arm: [ diff --git a/omnigibson/robots/fetch.py b/omnigibson/robots/fetch.py index 8b4702cb7..36bc9ea54 100644 --- a/omnigibson/robots/fetch.py +++ b/omnigibson/robots/fetch.py @@ -41,6 +41,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -83,6 +85,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -121,6 +129,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -204,28 +214,6 @@ def wheel_radius(self): def wheel_axle_length(self): return 0.372 - @property - def finger_lengths(self): - return {self.default_arm: 0.1} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="r_gripper_finger_link", position=th.tensor([0.025, -0.02, 0.0])), - GraspingPoint(link_name="r_gripper_finger_link", position=th.tensor([-0.025, -0.02, 0.0])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="l_gripper_finger_link", position=th.tensor([0.025, 0.02, 0.0])), - GraspingPoint(link_name="l_gripper_finger_link", position=th.tensor([-0.025, 0.02, 0.0])), - ] - } - @cached_property def base_joint_names(self): return ["l_wheel_joint", "r_wheel_joint"] diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index ccb64e358..b7c8a00ca 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -33,6 +33,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -71,6 +73,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -90,10 +98,10 @@ def __init__( if end_effector == "gripper": self._model_name = "franka_panda" self._gripper_control_idx = th.arange(7, 9) - self._eef_link_names = "panda_hand" + self._eef_link_names = "eef_link" self._finger_link_names = ["panda_leftfinger", "panda_rightfinger"] self._finger_joint_names = ["panda_finger_joint1", "panda_finger_joint2"] - self._default_robot_model_joint_pos = th.tensor([0.00, -1.3, 0.00, -2.87, 0.00, 2.00, 0.75, 0.00, 0.00]) + self._default_robot_model_joint_pos = th.tensor([0.00, -1.3, 0.00, -2.87, 0.00, 2.00, 0.75, 0.04, 0.04]) self._teleop_rotation_offset = th.tensor([-1, 0, 0, 0]) self._ag_start_points = [ GraspingPoint(link_name="panda_rightfinger", position=th.tensor([0.0, 0.001, 0.045])), @@ -191,6 +199,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -227,10 +237,6 @@ def _default_controllers(self): def _default_joint_pos(self): return self._default_robot_model_joint_pos - @property - def finger_lengths(self): - return {self.default_arm: 0.1} - @cached_property def arm_link_names(self): return {self.default_arm: [f"panda_link{i}" for i in range(8)]} @@ -253,11 +259,19 @@ def finger_joint_names(self): @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") + return ( + os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/usd/franka_panda.usda") + if self.model_name == "franka_panda" + else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.usd") + ) @property def urdf_path(self): - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") + return ( + os.path.join(gm.ASSET_PATH, "models/franka/franka_panda/urdf/franka_panda.urdf") + if self.model_name == "franka_panda" + else os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}.urdf") + ) @property def curobo_path(self): @@ -265,22 +279,27 @@ def curobo_path(self): assert ( self._model_name == "franka_panda" ), f"Only franka_panda is currently supported for curobo. Got: {self._model_name}" - return os.path.join(gm.ASSET_PATH, f"models/franka/{self.model_name}_description_curobo.yaml") + return os.path.join( + gm.ASSET_PATH, "models/franka/franka_panda/curobo/franka_panda_description_curobo_default.yaml" + ) @cached_property def curobo_attached_object_link_names(self): - return {self._eef_link_names: "attached_object"} + assert ( + self._model_name == "franka_panda" + ), f"Only franka_panda is currently supported for curobo. Got: {self._model_name}" + return super().curobo_attached_object_link_names @property def teleop_rotation_offset(self): return {self.default_arm: self._teleop_rotation_offset} @property - def assisted_grasp_start_points(self): + def _assisted_grasp_start_points(self): return {self.default_arm: self._ag_start_points} @property - def assisted_grasp_end_points(self): + def _assisted_grasp_end_points(self): return {self.default_arm: self._ag_start_points} @property diff --git a/omnigibson/robots/franka_mounted.py b/omnigibson/robots/franka_mounted.py index bf531aa1f..75b5792ae 100644 --- a/omnigibson/robots/franka_mounted.py +++ b/omnigibson/robots/franka_mounted.py @@ -23,34 +23,16 @@ def _default_controllers(self): controllers[f"gripper_{self.default_arm}"] = "MultiFingerGripperController" return controllers - @property - def finger_lengths(self): - return {self.default_arm: 0.15} - @property def usd_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.usd") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/usd/franka_mounted.usda") @property def urdf_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted.urdf") + return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted/urdf/franka_mounted.urdf") @property def curobo_path(self): - return os.path.join(gm.ASSET_PATH, "models/franka/franka_mounted_description_curobo.yaml") - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="panda_rightfinger", position=th.tensor([0.0, 0.001, 0.045])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="panda_leftfinger", position=th.tensor([0.0, 0.001, 0.045])), - ] - } + return os.path.join( + gm.ASSET_PATH, "models/franka/franka_mounted/curobo/franka_mounted_description_curobo_default.yaml" + ) diff --git a/omnigibson/robots/holonomic_base_robot.py b/omnigibson/robots/holonomic_base_robot.py index 3cff80ef8..14214708c 100644 --- a/omnigibson/robots/holonomic_base_robot.py +++ b/omnigibson/robots/holonomic_base_robot.py @@ -57,6 +57,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, **kwargs, @@ -91,6 +93,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -125,6 +133,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, **kwargs, diff --git a/omnigibson/robots/locomotion_robot.py b/omnigibson/robots/locomotion_robot.py index 136189bac..d1f361dfa 100644 --- a/omnigibson/robots/locomotion_robot.py +++ b/omnigibson/robots/locomotion_robot.py @@ -25,13 +25,11 @@ class LocomotionRobot(BaseRobot): """ def _validate_configuration(self): - # We make sure that our base controller exists and is a locomotion controller - assert ( - "base" in self._controllers - ), "Controller 'base' must exist in controllers! Current controllers: {}".format(list(self._controllers.keys())) - assert isinstance( - self._controllers["base"], LocomotionController - ), "Base controller must be a LocomotionController!" + # If we have a base controller, make sure it is a locomotion controller + if "base" in self._controllers: + assert isinstance( + self._controllers["base"], LocomotionController + ), "Base controller must be a LocomotionController!" # run super super()._validate_configuration() diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index e338f3bff..117d0ca75 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -22,13 +22,24 @@ ) from omnigibson.macros import create_module_macros, gm from omnigibson.object_states import ContactBodies +from omnigibson.prims.geom_prim import VisualGeomPrim from omnigibson.robots.robot_base import BaseRobot from omnigibson.utils.backend_utils import _compute_backend as cb from omnigibson.utils.constants import JointType, PrimType from omnigibson.utils.geometry_utils import generate_points_in_volume_checker_function from omnigibson.utils.python_utils import assert_valid_key, classproperty from omnigibson.utils.sampling_utils import raytest_batch -from omnigibson.utils.usd_utils import ControllableObjectViewAPI, GripperRigidContactAPI, create_joint +from omnigibson.utils.usd_utils import ( + ControllableObjectViewAPI, + GripperRigidContactAPI, + create_joint, + create_primitive_mesh, + absolute_prim_path_to_scene_relative, +) +from omnigibson.utils.ui_utils import create_module_logger + +# Create module logger +log = create_module_logger(module_name=__name__) # Create settings for this module m = create_module_macros(module_path=__file__) @@ -39,6 +50,9 @@ m.ARTICULATED_ASSIST_FRACTION = 0.7 m.MIN_ASSIST_FORCE = 0 m.MAX_ASSIST_FORCE = 100 +m.MIN_AG_DEFAULT_GRASP_POINT_PROP = 0.2 +m.MAX_AG_DEFAULT_GRASP_POINT_PROP = 0.8 + m.CONSTRAINT_VIOLATION_THRESHOLD = 0.1 m.RELEASE_WINDOW = 1 / 30.0 # release window in seconds @@ -85,6 +99,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -123,6 +139,12 @@ def __init__( Otherwise, valid options should be part of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -149,6 +171,9 @@ def __init__( self._grasping_direction = grasping_direction self._disable_grasp_handling = disable_grasp_handling + # Other variables filled in at runtime + self._eef_to_fingertip_lengths = None # dict mapping arm name to finger name to offset + # Initialize other variables used for assistive grasping self._ag_freeze_joint_pos = { arm: {} for arm in self.arm_names @@ -178,6 +203,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, **kwargs, @@ -186,25 +213,17 @@ def __init__( def _validate_configuration(self): # Iterate over all arms for arm in self.arm_names: - # We make sure that our arm controller exists and is a manipulation controller - assert ( - "arm_{}".format(arm) in self._controllers - ), "Controller 'arm_{}' must exist in controllers! Current controllers: {}".format( - arm, list(self._controllers.keys()) - ) - assert isinstance( - self._controllers["arm_{}".format(arm)], ManipulationController - ), "Arm {} controller must be a ManipulationController!".format(arm) - - # We make sure that our gripper controller exists and is a gripper controller - assert ( - "gripper_{}".format(arm) in self._controllers - ), "Controller 'gripper_{}' must exist in controllers! Current controllers: {}".format( - arm, list(self._controllers.keys()) - ) - assert isinstance( - self._controllers["gripper_{}".format(arm)], GripperController - ), "Gripper {} controller must be a GripperController!".format(arm) + # If we have an arm controller, make sure it is a manipulation controller + if f"arm_{arm}" in self._controllers: + assert isinstance( + self._controllers["arm_{}".format(arm)], ManipulationController + ), "Arm {} controller must be a ManipulationController!".format(arm) + + # If we have a gripper controller, make sure it is a manipulation controller + if f"gripper_{arm}" in self._controllers: + assert isinstance( + self._controllers["gripper_{}".format(arm)], GripperController + ), "Gripper {} controller must be a GripperController!".format(arm) # run super super()._validate_configuration() @@ -212,6 +231,14 @@ def _validate_configuration(self): def _initialize(self): super()._initialize() + # Infer relevant link properties, e.g.: fingertip location, AG grasping points + # We use a try / except to maintain backwards-compatibility with robots that do not follow our + # OG-specified convention + try: + self._infer_finger_properties() + except AssertionError as e: + log.warning(f"Could not infer relevant finger link properties because:\n\n{e}") + if gm.AG_CLOTH: for arm in self.arm_names: self._ag_check_in_volume[arm], self._ag_calculate_volume[arm] = ( @@ -220,6 +247,163 @@ def _initialize(self): ) ) + def _infer_finger_properties(self): + """ + Infers relevant finger properties based on the given finger meshes of the robot + + NOTE: This assumes the given EEF convention for parallel jaw grippers -- i.e.: + z points in the direction of the fingers, y points in the direction of grasp articulation, and x + is then inferred automatically + """ + # Calculate and cache fingertip to eef frame offsets, as well as AG grasping points + self._eef_to_fingertip_lengths = dict() + self._default_ag_start_points = dict() + self._default_ag_end_points = dict() + for arm, finger_links in self.finger_links.items(): + self._eef_to_fingertip_lengths[arm] = dict() + eef_link = self.eef_links[arm] + world_to_eef_tf = T.pose2mat(eef_link.get_position_orientation()) + eef_to_world_tf = T.pose_inv(world_to_eef_tf) + + # Infer parent link for this finger + finger_parent_link, finger_parent_max_z = None, None + is_parallel_jaw = len(finger_links) == 2 + assert ( + is_parallel_jaw + ), "Inferring finger link information can only be done for parallel jaw gripper robots!" + for i, finger_link in enumerate(finger_links): + # Find parent, and make sure one exists + parent_prim_path, parent_link = None, None + for joint in self.joints.values(): + if finger_link.prim_path == joint.body1: + parent_prim_path = joint.body0 + break + assert ( + parent_prim_path is not None + ), f"Expected articulated parent joint for finger link {finger_link.name} but found none!" + for link in self.links.values(): + if parent_prim_path == link.prim_path: + parent_link = link + break + assert parent_link is not None, f"Expected parent link located at {parent_prim_path} but found none!" + # Make sure all fingers share the same parent + if finger_parent_link is None: + finger_parent_link = parent_link + finger_parent_pts = finger_parent_link.collision_boundary_points_world + assert ( + finger_parent_pts is not None + ), f"Expected finger parent points to be defined for parent link {finger_parent_link.name}, but got None!" + # Convert from world frame -> eef frame + finger_parent_pts = th.concatenate([finger_parent_pts, th.ones(len(finger_parent_pts), 1)], dim=-1) + finger_parent_pts = (finger_parent_pts @ eef_to_world_tf.T)[:, :3] + finger_parent_max_z = finger_parent_pts[:, 2].max().item() + else: + assert ( + finger_parent_link == parent_link + ), f"Expected all fingers to have same parent link, but found multiple parents at {finger_parent_link.prim_path} and {parent_link.prim_path}" + + # Calculate this finger's collision boundary points in the world frame + finger_pts = finger_link.collision_boundary_points_world + assert ( + finger_pts is not None + ), f"Expected finger points to be defined for link {finger_link.name}, but got None!" + # Convert from world frame -> eef frame + finger_pts = th.concatenate([finger_pts, th.ones(len(finger_pts), 1)], dim=-1) + finger_pts = (finger_pts @ eef_to_world_tf.T)[:, :3] + + # Since we know the EEF frame always points with z outwards towards the fingers, the outer-most point / + # fingertip is the maximum z value + finger_max_z = finger_pts[:, 2].max().item() + assert ( + finger_max_z > 0 + ), f"Expected positive fingertip to eef frame offset for link {finger_link.name}, but got: {finger_max_z}. Does the EEF frame z-axis point in the direction of the fingers?" + self._eef_to_fingertip_lengths[arm][finger_link.name] = finger_max_z + + # Now, only keep points that are above the parent max z by 20% for inferring y values + finger_range = finger_max_z - finger_parent_max_z + valid_idxs = th.where( + finger_pts[:, 2] > (finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP) + )[0] + finger_pts = finger_pts[valid_idxs] + # Make sure all points lie on a single side of the EEF's y-axis + y_signs = th.sign(finger_pts[:, 1]) + assert th.all( + y_signs == y_signs[0] + ).item(), "Expected all finger points to lie on single side of EEF y-axis!" + y_sign = y_signs[0].item() + y_abs_min = th.abs(finger_pts[:, 1]).min().item() + + # Compute the default grasping points for this finger + # For now, we only have a strong heuristic defined for parallel jaw grippers, which assumes that + # there are exactly 2 fingers + # In this case, this is defined as the x2 (x,y,z) tuples where: + # z - the 20% and 80% length between the range from [finger_parent_max_z, finger_max_z] + # This is synonymous to inferring the length of the finger (lower bounded by the gripper base, + # assumed to be the parent link), and then taking the values MIN% and MAX% along its length + # y - the value closest to the edge of the finger surface in the direction of +/- EEF y-axis. + # This assumes that each individual finger lies completely on one side of the EEF y-axis + # x - 0. This assumes that the EEF axis evenly splits each finger symmetrically on each side + # (x,y,z,1) -- homogenous form for efficient transforming into finger link frame + grasp_pts = th.tensor( + [ + [ + 0, + (y_abs_min - 0.002) * y_sign, + finger_parent_max_z + finger_range * m.MIN_AG_DEFAULT_GRASP_POINT_PROP, + 1, + ], + [ + 0, + (y_abs_min - 0.002) * y_sign, + finger_parent_max_z + finger_range * m.MAX_AG_DEFAULT_GRASP_POINT_PROP, + 1, + ], + ] + ) + # Convert the grasping points from the EEF frame -> finger frame + finger_to_world_tf = T.pose_inv(T.pose2mat(finger_link.get_position_orientation())) + finger_to_eef_tf = finger_to_world_tf @ world_to_eef_tf + grasp_pts = (grasp_pts @ finger_to_eef_tf.T)[:, :3] + grasping_points = [ + GraspingPoint(link_name=finger_link.body_name, position=grasp_pt) for grasp_pt in grasp_pts + ] + if i == 0: + # Start point + self._default_ag_start_points[arm] = grasping_points + else: + # End point + self._default_ag_end_points[arm] = grasping_points + + # For each grasping point, if we're in DEBUG mode, visualize with spheres + if gm.DEBUG: + for ag_points in (self.assisted_grasp_start_points, self.assisted_grasp_end_points): + for arm_ag_points in ag_points.values(): + # Skip if None exist + if arm_ag_points is None: + continue + # For each ag point, generate a small sphere at that point + for i, arm_ag_point in enumerate(arm_ag_points): + link = self.links[arm_ag_point.link_name] + local_pos = arm_ag_point.position + vis_mesh_prim_path = f"{link.prim_path}/ag_point_{i}" + vis_mesh = create_primitive_mesh( + prim_path=vis_mesh_prim_path, + extents=0.005, + primitive_type="Sphere", + ) + vis_geom = VisualGeomPrim( + relative_prim_path=absolute_prim_path_to_scene_relative( + scene=self.scene, + absolute_prim_path=vis_mesh_prim_path, + ), + name=f"ag_point_{i}", + ) + vis_geom.load(self.scene) + vis_geom.set_position_orientation( + position=local_pos, + frame="parent", + ) + def is_grasping(self, arm="default", candidate_obj=None): """ Returns True if the robot is grasping the target option @candidate_obj or any object if @candidate_obj is None. @@ -632,7 +816,9 @@ def eef_links(self): """ Returns: dict: Dictionary mapping arm appendage name to robot link corresponding to that arm's - eef link + eef link. NOTE: These links always have a canonical local orientation frame -- assuming a parallel jaw + eef morphology, it is assumed that the eef z-axis points out from the tips of the fingers, the y-axis + points from the left finger to the right finger, and the x-axis inferred programmatically """ return {arm: self._links[self.eef_link_names[arm]] for arm in self.arm_names} @@ -654,6 +840,23 @@ def finger_joints(self): """ return {arm: [self._joints[joint] for joint in self.finger_joint_names[arm]] for arm in self.arm_names} + @property + def _assisted_grasp_start_points(self): + """ + Returns: + dict: Dictionary mapping individual arm appendage names to array of GraspingPoint tuples, + composed of (link_name, position) values specifying valid grasping start points located at + cartesian (x,y,z) coordinates specified in link_name's local coordinate frame. + These values will be used in conjunction with + @self.assisted_grasp_end_points to trigger assisted grasps, where objects that intersect + with any ray starting at any point in @self.assisted_grasp_start_points and terminating at any point in + @self.assisted_grasp_end_points will trigger an assisted grasp (calculated individually for each gripper + appendage). By default, each entry returns None, and must be implemented by any robot subclass that + wishes to use assisted grasping. + """ + # Should be optionally implemented by subclass + return None + @property def assisted_grasp_start_points(self): """ @@ -668,7 +871,28 @@ def assisted_grasp_start_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return {arm: None for arm in self.arm_names} + return ( + self._assisted_grasp_start_points + if self._assisted_grasp_start_points is not None + else self._default_ag_start_points + ) + + @property + def _assisted_grasp_end_points(self): + """ + Returns: + dict: Dictionary mapping individual arm appendage names to array of GraspingPoint tuples, + composed of (link_name, position) values specifying valid grasping end points located at + cartesian (x,y,z) coordinates specified in link_name's local coordinate frame. + These values will be used in conjunction with + @self.assisted_grasp_start_points to trigger assisted grasps, where objects that intersect + with any ray starting at any point in @self.assisted_grasp_start_points and terminating at any point in + @self.assisted_grasp_end_points will trigger an assisted grasp (calculated individually for each gripper + appendage). By default, each entry returns None, and must be implemented by any robot subclass that + wishes to use assisted grasping. + """ + # Should be optionally implemented by subclass + return None @property def assisted_grasp_end_points(self): @@ -684,16 +908,20 @@ def assisted_grasp_end_points(self): appendage). By default, each entry returns None, and must be implemented by any robot subclass that wishes to use assisted grasping. """ - return {arm: None for arm in self.arm_names} + return ( + self._assisted_grasp_end_points + if self._assisted_grasp_end_points is not None + else self._default_ag_end_points + ) @property - def finger_lengths(self): + def eef_to_fingertip_lengths(self): """ Returns: - dict: Dictionary mapping arm appendage name to corresponding length of the fingers in that - hand defined from the palm (assuming all fingers in one hand are equally long) + dict: Dictionary mapping arm appendage name to per-finger corresponding z-distance between EEF and each + respective fingertip """ - raise NotImplementedError + return self._eef_to_fingertip_lengths @property def arm_workspace_range(self): @@ -1020,7 +1248,7 @@ def _default_arm_joint_controller_configs(self): "command_output_limits": None, "motor_type": "position", "use_delta_commands": True, - "use_impedances": True, + "use_impedances": False, } return dic diff --git a/omnigibson/robots/mobile_manipulation_robot.py b/omnigibson/robots/mobile_manipulation_robot.py index 09ae8d7a2..f481d12c3 100644 --- a/omnigibson/robots/mobile_manipulation_robot.py +++ b/omnigibson/robots/mobile_manipulation_robot.py @@ -42,6 +42,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -81,6 +83,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -118,6 +126,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/r1.py b/omnigibson/robots/r1.py index b9ca8259e..0852b68d9 100644 --- a/omnigibson/robots/r1.py +++ b/omnigibson/robots/r1.py @@ -34,6 +34,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -73,6 +75,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -106,6 +114,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -165,30 +175,6 @@ def untucked_default_joint_pos(self): pos[self.arm_control_idx[arm]] = th.tensor([0.0, 1.906, -0.991, 1.571, 0.915, -1.571]) return pos - @property - def finger_lengths(self): - return {self.default_arm: 0.087} - - @property - def assisted_grasp_start_points(self): - return { - arm: [ - GraspingPoint(link_name=f"{arm}_gripper_link1", position=th.tensor([-0.032, 0.0, -0.01])), - GraspingPoint(link_name=f"{arm}_gripper_link1", position=th.tensor([0.025, 0.0, -0.01])), - ] - for arm in self.arm_names - } - - @property - def assisted_grasp_end_points(self): - return { - arm: [ - GraspingPoint(link_name=f"{arm}_gripper_link2", position=th.tensor([-0.032, 0.0, -0.01])), - GraspingPoint(link_name=f"{arm}_gripper_link2", position=th.tensor([0.025, 0.0, -0.01])), - ] - for arm in self.arm_names - } - @cached_property def floor_touching_base_link_names(self): return ["wheel_link1", "wheel_link2", "wheel_link3"] diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 1a925b8f5..9e3fcc8bc 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -64,6 +64,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, **kwargs, @@ -99,6 +101,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -124,6 +132,8 @@ def __init__( abilities = robot_abilities if abilities is None else robot_abilities.update(abilities) # Initialize internal attributes that will be loaded later + self._include_sensor_names = None if include_sensor_names is None else set(include_sensor_names) + self._exclude_sensor_names = None if exclude_sensor_names is None else set(exclude_sensor_names) self._sensors = None # e.g.: scan sensor, vision sensor # All BaseRobots should have xform properties pre-loaded @@ -198,6 +208,25 @@ def _load_sensors(self): for prim in link.prim.GetChildren(): prim_type = prim.GetPrimTypeInfo().GetTypeName() if prim_type in SENSOR_PRIMS_TO_SENSOR_CLS: + # Possibly filter out the sensor based on name + prim_path = str(prim.GetPrimPath()) + not_blacklisted = self._exclude_sensor_names is None or not any( + name in prim_path for name in self._exclude_sensor_names + ) + whitelisted = self._include_sensor_names is None or any( + name in prim_path for name in self._include_sensor_names + ) + # Also make sure that the include / exclude sensor names are mutually exclusive + if self._exclude_sensor_names is not None and self._include_sensor_names is not None: + assert ( + len(set(self._exclude_sensor_names).intersection(set(self._include_sensor_names))) == 0 + ), ( + f"include_sensor_names and exclude_sensor_names must be mutually exclusive! " + f"Got: {self._include_sensor_names} and {self._exclude_sensor_names}" + ) + if not (not_blacklisted and whitelisted): + continue + # Infer what obs modalities to use for this sensor sensor_cls = SENSOR_PRIMS_TO_SENSOR_CLS[prim_type] sensor_kwargs = self._sensor_config[sensor_cls.__name__] @@ -210,11 +239,12 @@ def _load_sensors(self): # If the modalities list is empty, don't import the sensor. if not sensor_kwargs["modalities"]: continue + obs_modalities = obs_modalities.union(sensor_kwargs["modalities"]) # Create the sensor and store it internally sensor = create_sensor( sensor_type=prim_type, - relative_prim_path=absolute_prim_path_to_scene_relative(self.scene, str(prim.GetPrimPath())), + relative_prim_path=absolute_prim_path_to_scene_relative(self.scene, prim_path), name=f"{self.name}:{link_name}:{prim_type}:{sensor_counts[prim_type]}", **sensor_kwargs, ) diff --git a/omnigibson/robots/stretch.py b/omnigibson/robots/stretch.py index 8ba67b4ad..4d6f32948 100644 --- a/omnigibson/robots/stretch.py +++ b/omnigibson/robots/stretch.py @@ -54,28 +54,6 @@ def wheel_radius(self): def wheel_axle_length(self): return 0.330 - @property - def finger_lengths(self): - return {self.default_arm: 0.04} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_right", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([0.013, 0.0, 0.01])), - GraspingPoint(link_name="link_gripper_finger_left", position=th.tensor([-0.01, 0.0, 0.009])), - ] - } - @property def disabled_collision_pairs(self): return [ diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index 53a72ad07..ceadd19b9 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -36,6 +36,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -79,6 +81,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -121,6 +129,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -230,34 +240,6 @@ def _default_controllers(self): controllers["gripper_{}".format(arm)] = "MultiFingerGripperController" return controllers - @property - def assisted_grasp_start_points(self): - return { - arm: [ - GraspingPoint( - link_name="gripper_{}_right_finger_link".format(arm), position=th.tensor([-0.001, 0.0, -0.2]) - ), - GraspingPoint( - link_name="gripper_{}_right_finger_link".format(arm), position=th.tensor([-0.001, 0.0, -0.13]) - ), - ] - for arm in self.arm_names - } - - @property - def assisted_grasp_end_points(self): - return { - arm: [ - GraspingPoint( - link_name="gripper_{}_left_finger_link".format(arm), position=th.tensor([0.001, 0.0, -0.2]) - ), - GraspingPoint( - link_name="gripper_{}_left_finger_link".format(arm), position=th.tensor([0.001, 0.0, -0.13]) - ), - ] - for arm in self.arm_names - } - @property def arm_control_idx(self): # Add combined entry @@ -266,10 +248,6 @@ def arm_control_idx(self): idxs["combined"] = th.sort(th.cat([val for val in idxs.values()]))[0] return idxs - @property - def finger_lengths(self): - return {arm: 0.12 for arm in self.arm_names} - @property def disabled_collision_link_names(self): # These should NEVER have collisions in the first place (i.e.: these are poorly modeled geoms from the source diff --git a/omnigibson/robots/untucked_arm_pose_robot.py b/omnigibson/robots/untucked_arm_pose_robot.py index 4000969ce..cf7664d9e 100644 --- a/omnigibson/robots/untucked_arm_pose_robot.py +++ b/omnigibson/robots/untucked_arm_pose_robot.py @@ -39,6 +39,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -80,6 +82,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -117,6 +125,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, diff --git a/omnigibson/robots/vx300s.py b/omnigibson/robots/vx300s.py index ac55c2b74..683bcf5e0 100644 --- a/omnigibson/robots/vx300s.py +++ b/omnigibson/robots/vx300s.py @@ -34,6 +34,8 @@ def __init__( reset_joint_pos=None, # Unique to BaseRobot obs_modalities=("rgb", "proprio"), + include_sensor_names=None, + exclude_sensor_names=None, proprio_obs="default", sensor_config=None, # Unique to ManipulationRobot @@ -70,6 +72,12 @@ def __init__( Valid options are "all", or a list containing any subset of omnigibson.sensors.ALL_SENSOR_MODALITIES. Note: If @sensor_config explicitly specifies `modalities` for a given sensor class, it will override any values specified from @obs_modalities! + include_sensor_names (None or list of str): If specified, substring(s) to check for in all raw sensor prim + paths found on the robot. A sensor must include one of the specified substrings in order to be included + in this robot's set of sensors + exclude_sensor_names (None or list of str): If specified, substring(s) to check against in all raw sensor + prim paths found on the robot. A sensor must not include any of the specified substrings in order to + be included in this robot's set of sensors proprio_obs (str or list of str): proprioception observation key(s) to use for generating proprioceptive observations. If str, should be exactly "default" -- this results in the default proprioception observations being used, as defined by self.default_proprio_obs. See self._get_proprioception_dict @@ -100,6 +108,8 @@ def __init__( action_normalize=action_normalize, reset_joint_pos=reset_joint_pos, obs_modalities=obs_modalities, + include_sensor_names=include_sensor_names, + exclude_sensor_names=exclude_sensor_names, proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, @@ -128,10 +138,6 @@ def _default_controllers(self): def _default_joint_pos(self): return th.tensor([0.0, -0.849879, 0.258767, 0.0, 1.2831712, 0.0, 0.057, 0.057]) - @property - def finger_lengths(self): - return {self.default_arm: 0.1} - @cached_property def arm_link_names(self): return { @@ -174,19 +180,3 @@ def finger_joint_names(self): @property def teleop_rotation_offset(self): return {self.default_arm: euler2quat([-math.pi, 0, 0])} - - @property - def assisted_grasp_start_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="right_finger_link", position=th.tensor([0.0, 0.001, 0.057])), - ] - } - - @property - def assisted_grasp_end_points(self): - return { - self.default_arm: [ - GraspingPoint(link_name="left_finger_link", position=th.tensor([0.0, 0.001, 0.057])), - ] - } diff --git a/omnigibson/utils/backend_utils.py b/omnigibson/utils/backend_utils.py index dfdf8a32d..99b162753 100644 --- a/omnigibson/utils/backend_utils.py +++ b/omnigibson/utils/backend_utils.py @@ -96,7 +96,7 @@ class _ComputeTorchBackend(_ComputeBackend): zeros = lambda *args: th.zeros(*args, dtype=th.float32) ones = lambda *args: th.ones(*args, dtype=th.float32) to_numpy = lambda x: x.numpy() - from_numpy = lambda x: th.from_numpy() + from_numpy = lambda x: th.from_numpy(x) to_torch = lambda x: x from_torch = lambda x: x from_torch_recursive = lambda dic: dic @@ -145,7 +145,7 @@ class _ComputeNumpyBackend(_ComputeBackend): abs = np.abs sqrt = np.sqrt mean = lambda val, dim=None, keepdim=False: np.mean(val, axis=dim, keepdims=keepdim) - copy = lambda arr: np.array(arr) + copy = lambda arr: arr.copy() eye = np.eye view = lambda arr, shape: arr.reshape(shape) arange = np.arange diff --git a/omnigibson/utils/grasping_planning_utils.py b/omnigibson/utils/grasping_planning_utils.py index 726706841..db59225f0 100644 --- a/omnigibson/utils/grasping_planning_utils.py +++ b/omnigibson/utils/grasping_planning_utils.py @@ -231,7 +231,10 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint ) # Now apply the grasp offset. - dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05 + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) + dist_from_grasp_pos = avg_finger_offset + 0.05 offset_grasp_pose_in_bbox_frame = ( grasp_position_in_bbox_frame + canonical_push_axis * push_axis_closer_side_sign * dist_from_grasp_pos, grasp_quat_in_bbox_frame, @@ -263,14 +266,16 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint waypoint_start_offset = ( -0.05 * approach_direction_in_world_frame if should_open else 0.05 * approach_direction_in_world_frame ) + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) waypoint_start_pose = ( grasp_pose_in_world_frame[0] - + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm] + waypoint_start_offset), + + -1 * approach_direction_in_world_frame * (avg_finger_offset + waypoint_start_offset), grasp_pose_in_world_frame[1], ) waypoint_end_pose = ( - target_hand_pose_in_world_frame[0] - + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm]), + target_hand_pose_in_world_frame[0] + -1 * approach_direction_in_world_frame * avg_finger_offset, target_hand_pose_in_world_frame[1], ) waypoints = interpolate_waypoints(waypoint_start_pose, waypoint_end_pose, num_waypoints=num_waypoints) @@ -409,7 +414,10 @@ def grasp_position_for_open_on_revolute_joint(robot, target_obj, relevant_joint, ) # Now apply the grasp offset. - dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05 + avg_finger_offset = th.mean( + th.tensor([length for length in robot.eef_to_fingertip_lengths[robot.default_arm].values()]) + ) + dist_from_grasp_pos = avg_finger_offset + 0.05 offset_in_bbox_frame = canonical_open_direction * open_axis_closer_side_sign * dist_from_grasp_pos offset_grasp_pose_in_bbox_frame = (grasp_position + offset_in_bbox_frame, grasp_quat_in_bbox_frame) offset_grasp_pose_in_world_frame = T.pose_transform( diff --git a/omnigibson/utils/processing_utils.py b/omnigibson/utils/processing_utils.py index bd81ceba8..5acc088a7 100644 --- a/omnigibson/utils/processing_utils.py +++ b/omnigibson/utils/processing_utils.py @@ -116,7 +116,7 @@ def _dump_state(self): state = super()._dump_state() # Add info from this filter - state["past_samples"] = cb.to_torch(self.past_samples) + state["past_samples"] = cb.to_torch(cb.copy(self.past_samples)) state["current_idx"] = self.current_idx state["fully_filled"] = self.fully_filled @@ -127,7 +127,7 @@ def _load_state(self, state): super()._load_state(state=state) # Load relevant info for this filter - self.past_samples = cb.from_torch(state["past_samples"]) + self.past_samples = cb.copy(cb.from_torch(state["past_samples"])) self.current_idx = state["current_idx"] self.fully_filled = state["fully_filled"] diff --git a/omnigibson/utils/ui_utils.py b/omnigibson/utils/ui_utils.py index 8df735478..9b96898eb 100644 --- a/omnigibson/utils/ui_utils.py +++ b/omnigibson/utils/ui_utils.py @@ -659,18 +659,18 @@ def generate_ik_keypress_mapping(self, controller_info): """ mapping = {} - mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.5} + mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.1} + mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.1} + mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.1} return mapping @@ -687,18 +687,18 @@ def generate_osc_keypress_mapping(self, controller_info): """ mapping = {} - mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.5} - mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.5} - mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.5} + mapping[lazy.carb.input.KeyboardInput.UP] = {"idx": controller_info["start_idx"] + 0, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.DOWN] = {"idx": controller_info["start_idx"] + 0, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.RIGHT] = {"idx": controller_info["start_idx"] + 1, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.LEFT] = {"idx": controller_info["start_idx"] + 1, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.P] = {"idx": controller_info["start_idx"] + 2, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.SEMICOLON] = {"idx": controller_info["start_idx"] + 2, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.N] = {"idx": controller_info["start_idx"] + 3, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.B] = {"idx": controller_info["start_idx"] + 3, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.O] = {"idx": controller_info["start_idx"] + 4, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.U] = {"idx": controller_info["start_idx"] + 4, "val": -0.25} + mapping[lazy.carb.input.KeyboardInput.V] = {"idx": controller_info["start_idx"] + 5, "val": 0.25} + mapping[lazy.carb.input.KeyboardInput.C] = {"idx": controller_info["start_idx"] + 5, "val": -0.25} return mapping diff --git a/tests/test_controllers.py b/tests/test_controllers.py index fbb4130c4..f0ce52e39 100644 --- a/tests/test_controllers.py +++ b/tests/test_controllers.py @@ -149,11 +149,11 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): n_steps = { "pose_delta_ori": { - "zero": 10, - "forward": 10, - "side": 10, - "up": 10, - "rotate": 10, + "zero": 40, + "forward": 40, + "side": 40, + "up": 40, + "rotate": 20, "base_move": 30, }, "absolute_pose": { @@ -181,10 +181,15 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): for robot in env.robots: robot.keep_still() + # We need to explicitly reset the controllers to unify the initial state that will be seen + # during downstream action executions -- i.e.: the state seen after robot.reload_controllers() + # is called each time + for controller in robot.controllers.values(): + controller.reset() + # Update initial state (robot should be stable and still) env.scene.update_initial_state() - # Reset the environment env.scene.reset() # Record initial eef pose of all robots @@ -233,10 +238,10 @@ def check_ori_error(curr_orientation, init_orientation, tol=0.1): break start_idx += robot.controllers[c].command_dim if controller_mode == "pose_delta_ori": - forward_action[start_idx] = 0.1 - side_action[start_idx + 1] = 0.1 - up_action[start_idx + 2] = 0.1 - rot_action[start_idx + 3] = 0.1 + forward_action[start_idx] = 0.02 + side_action[start_idx + 1] = 0.02 + up_action[start_idx + 2] = 0.02 + rot_action[start_idx + 3] = 0.02 elif controller_mode == "absolute_pose": for act in [zero_action, forward_action, side_action, up_action, rot_action]: act[start_idx : start_idx + 3] = init_eef_pos.clone() diff --git a/tests/test_data_collection.py b/tests/test_data_collection.py index 30625b3b1..fac7532c2 100644 --- a/tests/test_data_collection.py +++ b/tests/test_data_collection.py @@ -136,5 +136,5 @@ def test_data_collect_and_playback(): n_render_iterations=1, only_successes=False, ) - env.playback_dataset(record=True) + env.playback_dataset(record_data=True) env.save_data() diff --git a/tests/test_robot_states_flatcache.py b/tests/test_robot_states_flatcache.py index 3de592249..522f801af 100644 --- a/tests/test_robot_states_flatcache.py +++ b/tests/test_robot_states_flatcache.py @@ -63,8 +63,8 @@ def camera_pose_test(flatcache): relative_pose_transform(sensor_world_pos, sensor_world_ori, robot_world_pos, robot_world_ori) ) - sensor_world_pos_gt = th.tensor([150.5187, 149.8295, 101.0960]) - sensor_world_ori_gt = th.tensor([0.0199, -0.1330, 0.9892, -0.0580]) + sensor_world_pos_gt = th.tensor([150.5134, 149.8278, 101.0816]) + sensor_world_ori_gt = th.tensor([0.0176, -0.1205, 0.9910, -0.0549]) assert th.allclose(sensor_world_pos, sensor_world_pos_gt, atol=1e-3) assert quaternions_close(sensor_world_ori, sensor_world_ori_gt, atol=1e-3)