@ -26,21 +26,7 @@
"metadata": {
"metadata": {
"tags": []
"tags": []
},
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Defaulting to user installation because normal site-packages is not writeable\n",
"Collecting git+https://github.com/safijari/tiny_tf.git\n",
" Cloning https://github.com/safijari/tiny_tf.git to /tmp/pip-req-build-yq8q8atk\n",
" Running command git clone --filter=blob:none --quiet https://github.com/safijari/tiny_tf.git /tmp/pip-req-build-yq8q8atk\n",
" Resolved https://github.com/safijari/tiny_tf.git to commit 6d28b69dfde971f5ed63ff24de5c84b950690997\n",
" Preparing metadata (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25hRequirement already satisfied: numpy in /home/hello-robot/.local/lib/python3.8/site-packages (from tiny-tf==1.2.0) (1.23.5)\n"
]
}
],
"outputs": [],
"source": [
"source": [
"!pip install git+https://github.com/safijari/tiny_tf.git"
"!pip install git+https://github.com/safijari/tiny_tf.git"
]
]
@ -82,7 +68,6 @@
"cell_type": "markdown",
"cell_type": "markdown",
"id": "ccc1cae3",
"id": "ccc1cae3",
"metadata": {
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
"tags": []
},
},
"source": [
"source": [
@ -99,97 +84,11 @@
"tags": []
"tags": []
},
},
"outputs": [
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"--------- Homing Head ----\n",
"--------- Homing Lift ----\n",
"Homing Lift...\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[INFO] [robot_monitor]: Guarded contact lift\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Hardstop detected at motor position (rad) 98.10670471191406\n",
"Marking Lift position to 1.097231 (m)\n",
"Marking Lift position to 0.000000 (m)\n",
"Lift homing successful\n",
"--------- Homing Arm ----\n",
"Homing Arm...\n",
"Hardstop detected at motor position (rad) 1.6194909811019897\n",
"Marking Arm position to 0.000000 (m)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[INFO] [robot_monitor]: Guarded contact arm\n",
"[INFO] [robot_monitor]: Wrist single tap: 15\n",
"[INFO] [robot_monitor]: Wrist single tap: 16\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Arm homing successful\n",
"Moving to first hardstop...\n",
"First hardstop contact at position (ticks): -25\n",
"-----\n",
"Homing offset was 1488\n",
"Marking current position to zero ticks\n",
"Homing offset is now 1510 (ticks)\n",
"-----\n",
"Current position (ticks): 40\n",
"Moving to calibrated zero: (rad)\n",
"Moving to first hardstop...\n",
"First hardstop contact at position (ticks): -26\n",
"-----\n",
"Homing offset was 2249\n",
"Marking current position to zero ticks\n",
"Homing offset is now 2269 (ticks)\n",
"-----\n",
"Current position (ticks): 62\n",
"Moving to calibrated zero: (rad)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[INFO] [robot_monitor]: Base bump event\n"
]
},
{
{
"name": "stdout",
"name": "stdout",
"output_type": "stream",
"output_type": "stream",
"text": [
"text": [
"--------- Stowing Arm ----\n",
"--------- Stowing Arm ----\n",
"Warning: Rate of calls to Pimu:trigger_motor_sync rate of 148.350157 above maximum frequency of 20.00 Hz. Motor commands dropped: 1\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[INFO] [robot_monitor]: Base bump event\n",
"[INFO] [robot_monitor]: Wrist single tap: 17\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"--------- Stowing ToolStretchDexWrist ----\n",
"--------- Stowing ToolStretchDexWrist ----\n",
"--------- Stowing Lift ----\n"
"--------- Stowing Lift ----\n"
]
]
@ -317,7 +216,7 @@
{
{
"data": {
"data": {
"text/plain": [
"text/plain": [
"<pyrealsense2.pyrealsense2.pipeline_profile at 0x7ff540ff563 0>"
"<pyrealsense2.pyrealsense2.pipeline_profile at 0x7f401f41e3b 0>"
]
]
},
},
"execution_count": 6,
"execution_count": 6,
@ -362,12 +261,18 @@
"# self.tf2_buffer = None\n",
"# self.tf2_buffer = None\n",
" self.joint_state = None\n",
" self.joint_state = None\n",
"# self.point_cloud = None\n",
"# self.point_cloud = None\n",
" self.thread_shutdown_flag = threading.Event()\n",
" self.thread_shutdown_flag.clear()\n",
" self.thread = threading.Thread(target=self._spin)\n",
" self.thread = threading.Thread(target=self._spin)\n",
" self.thread.setDaemon(True)\n",
" self.thread.setDaemon(True)\n",
" self.thread.start()\n",
" self.thread.start()\n",
"\n",
"\n",
" def stop(self):\n",
" self.thread_shutdown_flag.set()\n",
" self.thread.join(1)\n",
"\n",
" def _spin(self):\n",
" def _spin(self):\n",
" while True:\n",
" while not self.thread_shutdown_flag.is_set() :\n",
" # update self.tf2_buffer\n",
" # update self.tf2_buffer\n",
" tree = TFTree()\n",
" tree = TFTree()\n",
" q_curr = get_current_configuration(tool=robot.end_of_arm.name)\n",
" q_curr = get_current_configuration(tool=robot.end_of_arm.name)\n",
@ -404,8 +309,17 @@
" np.clip(v, 0, cw-1, out=v)\n",
" np.clip(v, 0, cw-1, out=v)\n",
" rgb = color_image[u, v] # rgb\n",
" rgb = color_image[u, v] # rgb\n",
"\n",
"\n",
" # 5. Create point cloud matrix\n",
" points_arr = np.zeros((points.size(),), dtype=[\n",
" # 5. Filter out black points\n",
" non_black_mask = ~(rgb == 0).all(axis=1)\n",
" xyz = xyz[non_black_mask]\n",
" rgb = rgb[non_black_mask]\n",
" assert xyz.shape[0] == rgb.shape[0]\n",
" num_valid_points = rgb.shape[0]\n",
" if num_valid_points == 0:\n",
" continue\n",
"\n",
" # 6. Create point cloud matrix\n",
" points_arr = np.zeros((num_valid_points,), dtype=[\n",
" ('x', np.float32),\n",
" ('x', np.float32),\n",
" ('y', np.float32),\n",
" ('y', np.float32),\n",
" ('z', np.float32),\n",
" ('z', np.float32),\n",
@ -420,7 +334,7 @@
" points_arr['b'] = rgb[:, 0]\n",
" points_arr['b'] = rgb[:, 0]\n",
" points_arr = ros_numpy.point_cloud2.merge_rgb_fields(points_arr)\n",
" points_arr = ros_numpy.point_cloud2.merge_rgb_fields(points_arr)\n",
"\n",
"\n",
" # 6 . Create PointCloud2 message\n",
" # 7 . Create PointCloud2 message\n",
" self.point_cloud = ros_numpy.msgify(PointCloud2, points_arr, stamp=time.time(), frame_id='camera_color_optical_frame')\n",
" self.point_cloud = ros_numpy.msgify(PointCloud2, points_arr, stamp=time.time(), frame_id='camera_color_optical_frame')\n",
"\n",
"\n",
" def move_to_pose(self, pose, return_before_done=False, custom_contact_thresholds=False):\n",
" def move_to_pose(self, pose, return_before_done=False, custom_contact_thresholds=False):\n",
@ -538,7 +452,6 @@
"cell_type": "markdown",
"cell_type": "markdown",
"id": "390a3f70-b7ed-4a08-b811-fd6819c2810a",
"id": "390a3f70-b7ed-4a08-b811-fd6819c2810a",
"metadata": {
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
"tags": []
},
},
"source": [
"source": [
@ -1767,13 +1680,12 @@
" # points = np.array([[x1,y1,z1], [x2,y2,z2]...]). The points\n",
" # points = np.array([[x1,y1,z1], [x2,y2,z2]...]). The points\n",
" # should be specified with respect to the coordinate system\n",
" # should be specified with respect to the coordinate system\n",
" # defined by points_frame_id.\n",
" # defined by points_frame_id.\n",
" \n",
" points_to_voi_mat, timestamp = self.voi.get_points_to_voi_matrix_with_tf2(points_frame_id, tf2_buffer)\n",
" points_to_voi_mat, timestamp = self.voi.get_points_to_voi_matrix_with_tf2(points_frame_id, tf2_buffer)\n",
"\n",
"\n",
" if points_to_voi_mat is not None: \n",
" if points_to_voi_mat is not None: \n",
" self.from_rgb_points(points_to_voi_mat, rgb_points)\n",
" self.from_rgb_points(points_to_voi_mat, rgb_points)\n",
" else:\n",
" else:\n",
" rospy.logwarn('ROSMaxHeightImage.from_rgb_points_with_tf2: failed to update the image likely due to a failure to lookup the transform using TF2. points_frame_id = {0}, points_timestamp = {1}, timeout_s = {2} '.format(points_frame_id, points_timestamp, timeout_s ))\n",
" rospy.logwarn('ROSMaxHeightImage.from_rgb_points_with_tf2: failed to update the image likely due to a failure to lookup the transform using TF2. points_frame_id = {0}'.format(points_frame_id))\n",
"\n",
"\n",
" \n",
" \n",
" def to_point_cloud(self, color_map=None):\n",
" def to_point_cloud(self, color_map=None):\n",
@ -2188,7 +2100,7 @@
" yaml.dump(data, fid)\n",
" yaml.dump(data, fid)\n",
" print('Finished saving.')\n",
" print('Finished saving.')\n",
"\n",
"\n",
" \n",
"\n",
" @classmethod\n",
" @classmethod\n",
" def from_file(self, base_filename):\n",
" def from_file(self, base_filename):\n",
" print('HeadScan.from_file: base_filename =', base_filename)\n",
" print('HeadScan.from_file: base_filename =', base_filename)\n",
@ -2261,7 +2173,7 @@
"FakeRobot.move_to_pose: sleeping 1 second\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': 0.1, 'joint_head_tilt': -1.2}\n",
"Moving to {'joint_head_pan': 0.1, 'joint_head_tilt': -1.2}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"via rospy.loginfo: The head scan took 25.813185453414917 seconds.\n"
"via rospy.loginfo: The head scan took 24.87184500694275 seconds.\n"
]
]
}
}
],
],
@ -2296,26 +2208,12 @@
},
},
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": 20 ,
"execution_count": 19 ,
"id": "3cd042ff-9ddb-4276-ab8c-a1210bd1f866",
"id": "3cd042ff-9ddb-4276-ab8c-a1210bd1f866",
"metadata": {},
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Exception in thread Thread-7:\n",
"Traceback (most recent call last):\n",
" File \"/usr/lib/python3.8/threading.py\", line 932, in _bootstrap_inner\n",
" self.run()\n",
" File \"/usr/lib/python3.8/threading.py\", line 870, in run\n",
" self._target(*self._args, **self._kwargs)\n",
" File \"/tmp/ipykernel_16300/3221415467.py\", line 26, in _spin\n",
"RuntimeError: wait_for_frames cannot be called before start()\n"
]
}
],
"outputs": [],
"source": [
"source": [
"node.stop()\n",
"robot.stop()\n",
"robot.stop()\n",
"pipeline.stop()"
"pipeline.stop()"
]
]