Skip to content

Commit

Permalink
Merge branch 'feat/i2w_pose2D_robot_impl' of https://github.com/butia…
Browse files Browse the repository at this point in the history
…-bots/butia_vision into feat/i2w_pose2D_robot_impl
  • Loading branch information
Emilly Lamotte committed Jul 4, 2023
2 parents a5c4d4a + 8ffea96 commit 4f3758a
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ def __detectionDescriptionProcessing(self, data, description2d, header):
w, h, _ = array_point_cloud.shape

bbox_limits[0] = bbox_limits[0] if bbox_limits[0] > 0 else 0
bbox_limits[1] = bbox_limits[1] if bbox_limits[1] > 0 else 0
bbox_limits[2] = bbox_limits[2] if bbox_limits[2] < w else w-1
bbox_limits[1] = bbox_limits[1] if bbox_limits[1] < w else w-1
bbox_limits[2] = bbox_limits[2] if bbox_limits[2] > 0 else 0
bbox_limits[3] = bbox_limits[3] if bbox_limits[3] < h else h-1

desc_point_cloud = array_point_cloud[bbox_limits[2]:bbox_limits[3], bbox_limits[0]:bbox_limits[1], :]
Expand Down Expand Up @@ -215,22 +215,21 @@ def __detectionDescriptionProcessing(self, data, description2d, header):
h, w = image_depth.shape

bbox_limits[0] = bbox_limits[0] if bbox_limits[0] > 0 else 0
bbox_limits[1] = bbox_limits[1] if bbox_limits[1] > 0 else 0
bbox_limits[2] = bbox_limits[2] if bbox_limits[2] < w else w-1
bbox_limits[1] = bbox_limits[1] if bbox_limits[1] < w else w-1
bbox_limits[2] = bbox_limits[2] if bbox_limits[2] > 0 else 0
bbox_limits[3] = bbox_limits[3] if bbox_limits[3] < h else h-1

self.__mountLutTable(camera_info)
self.__mountLutTable(camera_info)

center_depth = image_depth[int(center_y), int(center_x)]

if center_depth == 0:
if center_depth <= 0:
rospy.logwarn('INVALID DEPTH VALUE')

center_depth/= 1000.

limits = np.asarray([(bbox_limits[0], bbox_limits[2]), (bbox_limits[1], bbox_limits[3])])


vertices_3d = np.zeros((len(limits), 3))

vertices_3d[:, :2] = self.lut_table[limits[:, 1], limits[:, 0], :]*center_depth
Expand Down Expand Up @@ -267,7 +266,24 @@ def __detectionDescriptionProcessing(self, data, description2d, header):
min_bound = np.min(vertices_3d, axis=0)
max_bound = np.max(vertices_3d, axis=0)

box = o3d.geometry.AxisAlignedBoundingBox(min_bound, max_bound)
if self.fit_bbox:
box_depths = image_depth[limits[0, 1]:limits[1, 1], limits[0, 0]:limits[1, 0]]/1000.
box_lut = self.lut_table[limits[0, 1]:limits[1, 1], limits[0, 0]:limits[1, 0], :]

box_points = np.zeros((box_depths.size, 3))

box_points[:, 0] = box_lut[:, :, 0].flatten()*box_depths.flatten()
box_points[:, 1] = box_lut[:, :, 1].flatten()*box_depths.flatten()
box_points[:, 2] = box_depths.flatten()

box_points = box_points[box_points[:, 2] > 0]
box_points = box_points[np.logical_and(np.all(box_points > min_bound, axis=1),
np.all(box_points < max_bound, axis=1))]

box = o3d.geometry.AxisAlignedBoundingBox().create_from_points(o3d.utility.Vector3dVector(box_points))

else:
box = o3d.geometry.AxisAlignedBoundingBox(min_bound, max_bound)

#TODO:
'''
Expand Down Expand Up @@ -304,9 +320,11 @@ def __poseDescriptionProcessing(self, data, description2d : Description2D, heade
return description3d


def __createDescription3D(self, source_data, description2d, header):
def __createDescription3D(self, source_data, description2d : Description2D, header):
if description2d.type in self.DESCRIPTION_PROCESSING_ALGORITHMS:
return self.DESCRIPTION_PROCESSING_ALGORITHMS[description2d.type](source_data, description2d, header)
description3D : Description3D = self.DESCRIPTION_PROCESSING_ALGORITHMS[description2d.type](source_data, description2d, header)
description3D.bbox2D = description2d.bbox
return description3D
else:
return None

Expand Down Expand Up @@ -415,7 +433,8 @@ def __readParameters(self):
self.publish_debug = rospy.get_param('~publish_debug', False)
self.publish_markers = rospy.get_param('~publish_markers', True)
self.color = rospy.get_param('~color', [255, 0, 0])
self.depth_mean_error = rospy.get_param('~depth_mean_error', 0.017)
self.depth_mean_error = rospy.get_param('~depth_mean_error', 0.05)
self.fit_bbox = rospy.get_param('~fit_bbox', False)

if __name__ == '__main__':
rospy.init_node('image2world_node', anonymous = True)
Expand Down
4 changes: 1 addition & 3 deletions butia_vision_launchfiles/yolo_tracker_recognition.launch
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,10 @@
<arg name="config_file" default="yolo_tracker_recognition.yaml"/>
<arg name="node_name" default="yolo_tracker_recognition"/>
<arg name="camera" default="realsense"/>
<arg name="launch_image2world" default="true"/>
<arg name="launch_bvb" default="true"/>

<machine name="localhost" address="localhost" if="$(arg use_machine)"/>

<include file="$(find butia_vision_bridge)/launch/butia_vision_bridge.launch" if="$(arg launch_bvb)">
<include file="$(find butia_vision_bridge)/launch/butia_vision_bridge.launch">
<arg name="machine" value="$(arg bvb_machine)" />
<arg name="use_machine" value="$(arg use_machine)"/>
<arg name="output" value="$(arg output)"/>
Expand Down
2 changes: 1 addition & 1 deletion butia_vision_msgs

0 comments on commit 4f3758a

Please sign in to comment.