@ -21,33 +21,23 @@
},
{
"cell_type": "code",
"execution_count": 1 ,
"execution_count": null ,
"id": "3f861f1c",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Defaulting to user installation because normal site-packages is not writeable\n",
"Collecting git+https://github.com/safijari/tiny_tf.git\n",
" Cloning https://github.com/safijari/tiny_tf.git to /tmp/pip-req-build-qaor8p3r\n",
" Running command git clone --filter=blob:none --quiet https://github.com/safijari/tiny_tf.git /tmp/pip-req-build-qaor8p3r\n",
" Resolved https://github.com/safijari/tiny_tf.git to commit 6d28b69dfde971f5ed63ff24de5c84b950690997\n",
" Preparing metadata (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25hRequirement already satisfied: numpy in /home/hello-robot/.local/lib/python3.8/site-packages (from tiny-tf==1.2.0) (1.22.4)\n"
]
}
],
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"!pip install git+https://github.com/safijari/tiny_tf.git"
]
},
{
"cell_type": "code",
"execution_count": 2 ,
"execution_count": 1,
"id": "e870dd4a",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# FUNMAP Imports\n",
@ -65,6 +55,7 @@
"# Robot imports\n",
"import threading\n",
"from urdfpy import URDF\n",
"import pyrealsense2 as rs\n",
"import stretch_body.hello_utils as hu\n",
"from tiny_tf.tf import TFTree, Transform\n",
"from stretch_body.robot import Robot\n",
@ -83,14 +74,16 @@
"source": [
"## Emulating ROS\n",
"\n",
"We'll set up some infrastructure to emulate some of the ROS components that FUNMAP relies on. Namely, we emulate the TF2 library's `lookup_transform()` method."
"We'll set up some infrastructure to emulate some of the ROS components that FUNMAP relies on. Namely, we emulate the TF2 library's `lookup_transform()` method and reading point clouds from the Realsense depth camera ."
]
},
{
"cell_type": "code",
"execution_count": 3 ,
"execution_count": null ,
"id": "7460aa2c",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# Setup the Python API to Stretch\n",
@ -100,24 +93,19 @@
"\n",
"# Ensure robot is homed\n",
"if not robot.is_calibrated():\n",
" robot.home()"
" robot.home()\n",
"\n",
"robot.stow()"
]
},
{
"cell_type": "code",
"execution_count": 4 ,
"execution_count": null ,
"id": "033774e1",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/hello-robot/.local/lib/python3.8/site-packages/urdfpy/urdf.py:2169: RuntimeWarning: invalid value encountered in true_divide\n",
" value = value / np.linalg.norm(value)\n"
]
}
],
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# Load Stretch's URDF\n",
"urdf_path = str((pathlib.Path(hu.get_fleet_directory()) / 'exported_urdf' / 'stretch.urdf').absolute())\n",
@ -134,9 +122,11 @@
},
{
"cell_type": "code",
"execution_count": 5 ,
"execution_count": null ,
"id": "96f891c9",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"def get_current_configuration(tool):\n",
@ -189,6 +179,225 @@
" }"
]
},
{
"cell_type": "markdown",
"id": "866c112b-86b7-4f0e-8b68-90cf8d0b3813",
"metadata": {
"tags": []
},
"source": [
"## Realsense"
]
},
{
"cell_type": "markdown",
"id": "e5782197-e4fd-48e6-be71-050e7318ed8e",
"metadata": {},
"source": [
"Next, we'll start the Realsense pipeline."
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "221818a7-11d1-440d-86d8-b71e521b5487",
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"<pyrealsense2.pyrealsense2.pipeline_profile at 0x7f1235ee51f0>"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"fps_color = 30 # FPS for color-only videos and for color+depth videos\n",
"resolution_color = [1280, 720] # [1920, 1080], [1280, 720], [640, 480]\n",
"resolution_depth = [1280, 720] # [1280, 720], [640, 480]\n",
"\n",
"pipeline = rs.pipeline()\n",
"config = rs.config()\n",
"config.enable_stream(rs.stream.depth, resolution_depth[0], resolution_depth[1], rs.format.z16, fps_color)\n",
"config.enable_stream(rs.stream.color, resolution_color[0], resolution_color[1], rs.format.bgr8, fps_color)\n",
"pipeline.start(config)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "43472f59-9907-4e7f-8cc8-31ee4a129b79",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"pc = rs.pointcloud()\n",
"align = rs.align(rs.stream.depth)"
]
},
{
"cell_type": "code",
"execution_count": 37,
"id": "b881bc97-71da-4cbb-a452-ff135ff6dd63",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# Get the latest frames from the camera\n",
"frames = pipeline.wait_for_frames()\n",
"aligned = align.process(frames)\n",
"depth_frame = frames.get_depth_frame()\n",
"color_aligned_to_depth_frame = aligned.first(rs.stream.color)"
]
},
{
"cell_type": "code",
"execution_count": 38,
"id": "18115b02-d2d2-4f04-a203-ea196c637862",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# Convert images to numpy arrays\n",
"depth_image = np.asanyarray(depth_frame.get_data())\n",
"color_image = np.asanyarray(color_aligned_to_depth_frame.get_data())"
]
},
{
"cell_type": "code",
"execution_count": 39,
"id": "335d7679-bda4-4f0d-a086-b40e42e78804",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"points = pc.calculate(depth_frame)\n",
"pc.map_to(color_aligned_to_depth_frame)"
]
},
{
"cell_type": "code",
"execution_count": 40,
"id": "962dfa2f-1090-45a1-ace5-f7ccc58462b9",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"v = points.get_vertices()\n",
"xyz = np.asanyarray(v).view(np.float32).reshape(-1, 3) # xyz"
]
},
{
"cell_type": "code",
"execution_count": 41,
"id": "c00858fc-7067-4b58-bc0f-4282725a0f89",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"t = points.get_texture_coordinates()\n",
"texcoords = np.asanyarray(t).view(np.float32).reshape(-1, 2) # uv\n",
"cw, ch = color_image.shape[:2][::-1]\n",
"v, u = (texcoords * (cw, ch) + 0.5).astype(np.uint32).T\n",
"np.clip(u, 0, ch-1, out=u)\n",
"np.clip(v, 0, cw-1, out=v)\n",
"rgb = color_image[u, v] # rgb"
]
},
{
"cell_type": "code",
"execution_count": 42,
"id": "55cd4bc4-1ae8-48bf-a649-d0e93203daf4",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Frame #: 11631\n",
"Timestamp: 1683796286702.8328\n",
"Num points: 921600\n",
"(921600, 3)\n",
"(921600, 3)\n"
]
}
],
"source": [
"print(f'Frame #: {aligned.get_frame_number()}')\n",
"print(f'Timestamp: {aligned.timestamp}')\n",
"print(f'Num points: {points.size()}')\n",
"print(xyz.shape)\n",
"print(rgb.shape)"
]
},
{
"cell_type": "code",
"execution_count": 43,
"id": "b727f246-b611-4713-9c15-f1eeb80dff16",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"points_arr = np.zeros((points.size(),), dtype=[\n",
" ('x', np.float32),\n",
" ('y', np.float32),\n",
" ('z', np.float32),\n",
" ('r', np.uint8),\n",
" ('g', np.uint8),\n",
" ('b', np.uint8)])\n",
"points_arr['x'] = xyz[:, 0]\n",
"points_arr['y'] = xyz[:, 1]\n",
"points_arr['z'] = xyz[:, 2]\n",
"points_arr['r'] = rgb[:, 0]\n",
"points_arr['g'] = rgb[:, 1]\n",
"points_arr['b'] = rgb[:, 2]"
]
},
{
"cell_type": "code",
"execution_count": 44,
"id": "d236b294-b03c-4bb3-8fe7-a1bd745c4682",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"done\n"
]
}
],
"source": [
"pc_msg = ros_numpy.msgify(PointCloud2, points_arr)\n",
"print('done')"
]
},
{
"cell_type": "markdown",
"id": "241ea842-c238-4099-91cd-42e19ef0409e",
"metadata": {
"tags": []
},
"source": [
"## temp"
]
},
{
"cell_type": "markdown",
"id": "490e9415",
@ -199,9 +408,11 @@
},
{
"cell_type": "code",
"execution_count": 6 ,
"execution_count": null ,
"id": "c443b5c1",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class FakeNode:\n",
@ -219,7 +430,7 @@
" ('x', np.float32),\n",
" ('y', np.float32),\n",
" ('z', np.float32),\n",
" ('rgb', np.uin t32)])\n",
" ('rgb', np.floa t32)])\n",
" points_arr['x'] = np.random.random((npoints,))\n",
" points_arr['y'] = np.random.random((npoints,))\n",
" points_arr['z'] = np.random.random((npoints,))\n",
@ -280,9 +491,11 @@
},
{
"cell_type": "code",
"execution_count": 7 ,
"execution_count": null ,
"id": "228b8825-320a-442b-827c-4aede6e4e644",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class HeaderMsg:\n",
@ -368,9 +581,11 @@
},
{
"cell_type": "code",
"execution_count": 8 ,
"execution_count": null ,
"id": "e2396e2f",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"def draw_robot_mast_blind_spot_wedge(x_pix, y_pix, ang_rad, m_per_pix, image, verbose=False, value=255):\n",
@ -511,9 +726,11 @@
},
{
"cell_type": "code",
"execution_count": 9 ,
"execution_count": null ,
"id": "1fed3d42",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"def get_p1_to_p2_matrix(p1_frame_id, p2_frame_id, tf2_buffer):\n",
@ -532,9 +749,11 @@
},
{
"cell_type": "code",
"execution_count": 10 ,
"execution_count": null ,
"id": "e889d16e",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"@njit(fastmath=True)\n",
@ -1008,9 +1227,11 @@
},
{
"cell_type": "code",
"execution_count": 11 ,
"execution_count": null ,
"id": "f5b4b144",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class MaxHeightImage:\n",
@ -1422,9 +1643,11 @@
},
{
"cell_type": "code",
"execution_count": 12 ,
"execution_count": null ,
"id": "92ad0117",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class ROSMaxHeightImage(MaxHeightImage):\n",
@ -1593,9 +1816,11 @@
},
{
"cell_type": "code",
"execution_count": 13 ,
"execution_count": null ,
"id": "37c0e390",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class VolumeOfInterest:\n",
@ -1680,9 +1905,11 @@
},
{
"cell_type": "code",
"execution_count": 14 ,
"execution_count": null ,
"id": "d046c1d0",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class ROSVolumeOfInterest(VolumeOfInterest):\n",
@ -1756,9 +1983,11 @@
},
{
"cell_type": "code",
"execution_count": 15 ,
"execution_count": null ,
"id": "f5610c35",
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"class HeadScan:\n",
@ -2019,51 +2248,22 @@
{
"cell_type": "markdown",
"id": "5f5a7ef5-852d-43e6-82ce-1d21b286e5ad",
"metadata": {},
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"## Head Scanning Experiment"
]
},
{
"cell_type": "code",
"execution_count": 16 ,
"execution_count": null ,
"id": "c67281e1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"MaxHeightImage information:\n",
" image.shape = (2667, 2667)\n",
" image.dtype = uint8\n",
" m_per_pix = 0.006\n",
" m_per_height_unit = 0.0046456692913385824\n",
" voi.x_in_m = 16.0\n",
" voi.y_in_m = 16.0\n",
" voi.z_in_m = 1.18\n",
"Moving to {'joint_head_pan': -3.6, 'joint_head_tilt': -0.8}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': -3.6}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': -2.7583333333333333}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': -1.9166666666666667}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': -1.0750000000000002}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': -0.2333333333333334}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': 0.608333333333333}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': 1.45}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"Moving to {'joint_head_pan': 0.1, 'joint_head_tilt': -1.2}\n",
"FakeRobot.move_to_pose: sleeping 1 second\n",
"via rospy.loginfo: The head scan took 20.034795999526978 seconds.\n"
]
}
],
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"node = FakeNode()\n",
"head_scan = HeadScan(voi_side_m=16.0)\n",
@ -2072,21 +2272,12 @@
},
{
"cell_type": "code",
"execution_count": 17 ,
"execution_count": null ,
"id": "7a0f7a53",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"HeadScan: Saving to base_filename = /home/hello-robot/repos/stretch_tutorials/stretch_body/jupyter/example_head_scan\n",
"MaxHeightImage saving to base_filename = /home/hello-robot/repos/stretch_tutorials/stretch_body/jupyter/example_head_scan_mhi\n",
"Finished saving.\n",
"Finished saving.\n"
]
}
],
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"head_scan.save(\"/home/hello-robot/repos/stretch_tutorials/stretch_body/jupyter/example_head_scan\")"
]
@ -2095,6 +2286,29 @@
"cell_type": "code",
"execution_count": null,
"id": "3fa6d347-1a0b-4303-b3f4-40a1d5dd3615",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"node.point_cloud"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3cd042ff-9ddb-4276-ab8c-a1210bd1f866",
"metadata": {},
"outputs": [],
"source": [
"robot.stop()\n",
"pipeline.stop()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "edbd53d1-f2ab-43de-a995-825f2a1e3551",
"metadata": {},
"outputs": [],
"source": []