diff --git a/vz_human_state_estimation/launch/human_state_estimation.launch b/vz_human_state_estimation/launch/human_state_estimation.launch
index 641dbea..ef5cdb8 100644
--- a/vz_human_state_estimation/launch/human_state_estimation.launch
+++ b/vz_human_state_estimation/launch/human_state_estimation.launch
@@ -1,3 +1,5 @@
+
+
\ No newline at end of file
diff --git a/vz_human_state_estimation/scripts/ros_interface.py b/vz_human_state_estimation/scripts/ros_interface.py
index d193645..21453a3 100755
--- a/vz_human_state_estimation/scripts/ros_interface.py
+++ b/vz_human_state_estimation/scripts/ros_interface.py
@@ -19,6 +19,15 @@ class ROSInterface:
self.cv_rgbd_img = None
self.cv_thermal_img = None
self.cv_bounding_boxed_img = None
+
+ # Images for display and passing to human state estimation node
+ # They will be rotated based on parameter /flip_images_human_state_est
+ self.cv_rgbd_img_probably_rotated = None
+ self.cv_thermal_img_probably_rotated = None
+
+ # To flip or not to flip...
+ self.flip_images_human_state_est = rospy.get_param('flip_images_human_state_est')
+
self.MDA = mda.ConnectToROS()
# set up CV Bridge
@@ -35,16 +44,30 @@ class ROSInterface:
def ats_callback(self,rgbd_img_data,thermal_img_data):
self.cv_rgbd_img = self.bridge.imgmsg_to_cv2(rgbd_img_data)
self.cv_thermal_img = self.bridge.imgmsg_to_cv2(thermal_img_data)
- self.cv_bounding_boxed_img = self.MDA.processData(self.cv_rgbd_img,self.cv_thermal_img)
+
+ # Rotate if needed
+ # Show images and pass onto human state estimation flipped if following true
+ # This is done because the raw images captured from rgbd and thermal cameras are oriented incorrect
+
+ if(self.flip_images_human_state_est == True):
+ ##### NOTE: Change rotation based on what you see ########
+ self.cv_rgbd_img_probably_rotated = cv2.rotate(self.cv_rgbd_img,cv2.ROTATE_90_CLOCKWISE)
+ self.cv_thermal_img_probably_rotated = cv2.rotate(self.cv_thermal_img,cv2.ROTATE_180)
+ else:
+ self.cv_rgbd_img_probably_rotated = self.cv_rgbd_img
+ self.cv_thermal_img_probably_rotated = self.cv_thermal_img
+
+ # Call the human state estimation code via processData interface
+ self.cv_bounding_boxed_img = self.MDA.processData(self.cv_rgbd_img_probably_rotated,self.cv_thermal_img_probably_rotated)
def run(self):
while not rospy.is_shutdown():
- if(self.cv_rgbd_img is None or self.cv_thermal_img is None or self.cv_bounding_boxed_img is None):
+ if(self.cv_rgbd_img_probably_rotated is None or self.cv_thermal_img_probably_rotated is None or self.cv_bounding_boxed_img is None):
continue
try:
# Show images
- cv2.imshow('D435i',cv2.resize(self.cv_rgbd_img,(600,800), interpolation = cv2.INTER_AREA))
- cv2.imshow('Thermal',cv2.resize(self.cv_thermal_img,(600,800), interpolation = cv2.INTER_AREA))
+ cv2.imshow('D435i',cv2.resize(self.cv_rgbd_img_probably_rotated,(600,800), interpolation = cv2.INTER_AREA))
+ cv2.imshow('Thermal',cv2.resize(self.cv_thermal_img_probably_rotated,(600,800), interpolation = cv2.INTER_AREA))
cv2.imshow('BoundingBox',cv2.resize(self.cv_bounding_boxed_img,(600,800), interpolation = cv2.INTER_AREA))
cv2.waitKey(3)
diff --git a/vz_ros_wrappers/launch/D435i_and_Lepton.launch b/vz_ros_wrappers/launch/D435i_and_Lepton.launch
index 5e3bad1..5e59c99 100644
--- a/vz_ros_wrappers/launch/D435i_and_Lepton.launch
+++ b/vz_ros_wrappers/launch/D435i_and_Lepton.launch
@@ -1,4 +1,6 @@
+
+
\ No newline at end of file
diff --git a/vz_ros_wrappers/scripts/grab_D435i.py b/vz_ros_wrappers/scripts/grab_D435i.py
index 9118c4c..ea2df24 100755
--- a/vz_ros_wrappers/scripts/grab_D435i.py
+++ b/vz_ros_wrappers/scripts/grab_D435i.py
@@ -31,11 +31,18 @@ class D435i:
def __init__(self):
# Intialize empty stores for current image
self.D435i_image = None
+ # The following image will be flipped or not based on parameter /flip_images_grab_data
+ self.D4355i_image_for_display = None
+
+ # To flip or not to flip..
+ self.flip_images_grab_data = rospy.get_param('flip_images_grab_data')
+
# setup subscribed topics
# initialize ros/cv2 bridge
# Note: when testing with your own video, replace with local file
self.D435i_cap = cv2.VideoCapture('/dev/video4')
+ # self.D435i_cap = cv2.VideoCapture('/home/ananya/Downloads/2022-02-09-16-20-58(2).mp4')
if not self.D435i_cap.isOpened():
raise(Exception,'Unable to open video stream')
@@ -60,12 +67,18 @@ class D435i:
# todo ananya: comment out function below later once fully ros integrated
def display_images(self, timer):
- # if self.thermal_image is None:
if self.D435i_image is None:
return
try:
# Show images
- cv2.imshow('D435i',cv2.resize(self.D435i_image,(600,800), interpolation = cv2.INTER_AREA))
+ if(self.flip_images_grab_data == True):
+ ##### NOTE: Change rotation based on what you see ########
+ self.D4355i_image_for_display = cv2.rotate(self.D435i_image,cv2.ROTATE_90_CLOCKWISE)
+ ################################################################
+ else:
+ self.D4355i_image_for_display = self.D435i_image
+
+ cv2.imshow('D435i',cv2.resize(self.D4355i_image_for_display,(600,800), interpolation = cv2.INTER_AREA))
cv2.waitKey(3)
except Exception as e:
@@ -75,9 +88,9 @@ class D435i:
while not rospy.is_shutdown():
ret, self.D435i_image = self.D435i_cap.read()
- ##### NOTE: Change rotation based on what you see ########
- self.D435i_image = cv2.rotate(self.D435i_image,cv2.ROTATE_90_CLOCKWISE)
- ################################################################
+ # ##### NOTE: Change rotation based on what you see ########
+ # self.D435i_image = cv2.rotate(self.D435i_image,cv2.ROTATE_90_CLOCKWISE)
+ # ################################################################
# Note : To read the input at the same frame rate as the recorded video, when using
# locally recorded video, uncomment the following and replace with (1/frame rate of video)
diff --git a/vz_ros_wrappers/scripts/grab_thermal.py b/vz_ros_wrappers/scripts/grab_thermal.py
index 2813d77..9279e3b 100755
--- a/vz_ros_wrappers/scripts/grab_thermal.py
+++ b/vz_ros_wrappers/scripts/grab_thermal.py
@@ -34,6 +34,11 @@ class FLIR_LEPTON:
def __init__(self):
# Intialize empty stores for current image
self.thermal_image = None
+ # The following image will be flipped or not based on parameter /flip_images_grab_data
+ self.thermal_image_for_display = None
+
+ # To flip or not to flip..
+ self.flip_images_grab_data = rospy.get_param('flip_images_grab_data')
# setup subscribed topics
# initialize ros/cv2 bridge
@@ -42,8 +47,8 @@ class FLIR_LEPTON:
# for the vz project stretch robot, the top aux usb is video6
# Note: when testing with your own video, replace with local file
- # self.thermal_cap = cv2.VideoCapture('/home/ananya/Downloads/Thermal-3(2).mp4')
self.thermal_cap = cv2.VideoCapture('/dev/video6')
+ # self.thermal_cap = cv2.VideoCapture('/home/ananya/Downloads/Thermal-3(2).mp4')
if not self.thermal_cap.isOpened():
raise(Exception,'Unable to open video stream')
@@ -71,7 +76,14 @@ class FLIR_LEPTON:
return
try:
# Show images
- cv2.imshow('Lepton',cv2.resize(self.thermal_image,(600,800), interpolation = cv2.INTER_AREA))
+ if(self.flip_images_grab_data == True):
+ ##### NOTE: Change rotation based on what you see ########
+ self.thermal_image_for_display = cv2.rotate(self.thermal_image,cv2.ROTATE_180)
+ ################################################################
+ else:
+ self.thermal_image_for_display = self.thermal_image
+
+ cv2.imshow('Lepton',cv2.resize(self.thermal_image_for_display,(600,800), interpolation = cv2.INTER_AREA))
cv2.waitKey(3)
except Exception as e:
@@ -81,9 +93,9 @@ class FLIR_LEPTON:
while not rospy.is_shutdown():
ret, self.thermal_image = self.thermal_cap.read()
- ##### NOTE: Change rotation based on what you see ########
- self.thermal_image = cv2.rotate(self.thermal_image,cv2.ROTATE_180)
- ################################################################
+ # ##### NOTE: Change rotation based on what you see ########
+ # self.thermal_image = cv2.rotate(self.thermal_image,cv2.ROTATE_180)
+ # ################################################################
# Note : To read the input at the same frame rate as the recorded video, when using
# locally recorded video, uncomment the following and replace with (1/frame rate of video)