2
''' File to perform simple scan-to-map alignment and map creation.'''
3
from __future__ import division
9
import roslib; roslib.load_manifest('mrol_ros')
12
import message_filters
13
from sensor_msgs.msg import PointCloud2, CameraInfo, Image
15
import mrol_mapping.poseutil as poseutil
16
import mrol_mapping.mrol as mrol
17
import mrol_mapping.util as util
18
import mrol_mapping.occupiedlist as occupiedlist
19
import mrol_mapping.mapper as mapper
20
import mrol_mapping.pointcloud as pointcloud
22
import rosutils_mrol.rosutils_mrol as rosutils
23
from thread import allocate_lock
24
from threading import Thread
27
CAMERA_FRAME_ID = "camera_rgb_optical_frame"
28
RESOLUTION_PARAM_DEFAULT=0.10#0.04#0.05
29
RESOLUTION_PARAM="~resolution"
30
"""Resolution for mapping using MROL
32
Resolution determines how fine the map is and also how accurate the alignment
33
is, but finer = slower
36
MINIMAL_REQUIRED_OVERLAP_DEFAULT=0.50
37
MINIMAL_REQUIRED_OVERLAP="~min_overlap"
38
"""Minimal overlap required between global map and a new scan so that the new
39
scan can be included in the global map
43
MROL_LEVELS="~mrol_levels"
44
"""Multi resolution levels for mapping.
46
If the resolution is small then this should be increased because the overlap
47
is restricted to an are pow(2, levels) * resolution. Levels determines region
48
of convergence, more = larger region of convergences. smaller is faster, but
52
OUT_SCAN_TOPIC_DEFAULT="/mrol_scan"
53
OUT_SCAN_TOPIC="~mrol_out_scan_topic"
54
"""Topic on which MROL scan should be published"""
56
OUT_MAP_TOPIC_DEFAULT="/mrol"
57
OUT_MAP_TOPIC="~mrol_out_map_topic"
58
"""Topic on which MROL map should be published"""
61
BEST_POSE_FRAME="/mrol_bestpose"
62
BEST_POSE_GUESS_FRAME="/bestposeguess"
64
"""Frame used as a static frame relative to which point clouds are
68
MAX_RANGE_DEFAULT=10.0
69
MAX_RANGE="~max_range"
70
"""The data beyond this range will be discarded"""
73
def __init__(self, num_base_scans=2, use_ICP=False):
74
rospy.loginfo("Initializing")
75
# can use mrol's inherent alignment method, or ICP
76
self.use_ICP = use_ICP
78
import icp_mrol.icp_mrol as icp
79
rospy.loginfo("Using ICP for alignment")
80
# make an ICP instance to do the alignment
81
self.icp = icp.ICP(reciprocal=False, visualise=False)
83
self.res = rospy.get_param(RESOLUTION_PARAM, RESOLUTION_PARAM_DEFAULT)
84
self.levels = rospy.get_param(MROL_LEVELS, MROL_LEVELS_DEFAULT)
85
self.min_overlap = rospy.get_param(MINIMAL_REQUIRED_OVERLAP,
86
MINIMAL_REQUIRED_OVERLAP_DEFAULT)
87
self.max_range = rospy.get_param(MAX_RANGE, MAX_RANGE_DEFAULT)
89
# variable initialization
90
self.listener = tf.TransformListener(
92
rospy.Duration(20)) # max_cache_time
95
self.bestpose = self.bestpose
96
self.bestpose_lock = allocate_lock()
98
self.bestposeguess = None
99
self.bestposeguess_lock = allocate_lock()
101
self.odom_wrt_map = None
102
self.odom_wrt_map_lock = allocate_lock()
104
self.lastodompose = None
106
self.pc_frame_id_lock = allocate_lock()
107
self.pc_frame_id = None
109
#self.mapper = mapper.VoxelMap(resolution=self.res, levels=self.levels)
110
self.lock = allocate_lock() # lock to have thread-safety
111
self.num_base_scans = num_base_scans
113
self.pointcloud_queue = util.Queue(maxsize=1)
115
self.bestpose_queue = util.Queue(maxsize=1)
116
self.bestpose_queue.put_force(poseutil.Pose3D())
118
self.voxel_queue = util.Queue(maxsize=1)
119
self.scan_queue = util.Queue(maxsize=1)
122
Thread(target=self.publish_map_thread).start()
123
# Transform broadcast should be at a higher frequency then the map
124
# scan building, so it's better to have a separate thread for this.
125
Thread(target=self.broadcast_transform_thread).start()
126
# Let the mapping be in another thread
127
Thread(target=self.mapping_thread).start()
129
Thread(target=self.reframing_thread).start()
131
def reframing_thread(self):
132
# For debugging information only
133
rate = rospy.Rate(10.0)
134
br = tf.TransformBroadcaster()
135
while not rospy.is_shutdown():
136
with self.odom_wrt_map_lock:
137
if self.odom_wrt_map is None:
139
robot_wrt_odom = self.get_pose(CAMERA_FRAME_ID, None)
140
robot_wrt_map = np.dot(self.odom_wrt_map, robot_wrt_odom)
142
# # The /kinect_rgb_optical_frame gets updated a little later than
143
# # we get information via lookupTransform via rospy.Time(0).
144
# # This is probably because of the PointCloud that rviz receives
145
# # over /kinect_rgb_optical_frame. Perhaps rviz tries to get the
146
# # frame at the timestamp of PointCloud.
147
# translation = tuple(robot_wrt_odom[:3, 3])
148
# quat = tf.transformations.quaternion_from_matrix(robot_wrt_odom)
149
# br.sendTransform(translation,
155
translation = tuple(robot_wrt_map[:3, 3])
156
quat = tf.transformations.quaternion_from_matrix(robot_wrt_map)
158
br.sendTransform(translation,
165
def publish_map_thread(self):
167
topic_name = rospy.get_param(OUT_MAP_TOPIC, OUT_MAP_TOPIC_DEFAULT)
168
pub = rospy.Publisher(topic_name, PointCloud2)
169
while not rospy.is_shutdown():
170
rospy.loginfo("publisher: Waiting for map to be generated")
171
voxel_data, voxel_res = self.voxel_queue.get()
172
pointsROS = np.hstack([voxel_data[:,:3]*voxel_res, voxel_data[:,4:]])
173
pcROS = rosutils.xyzs2pc2(pointsROS, rospy.Time.now(), counter,
174
frame_id=GLOBAL_FRAME)
176
rospy.loginfo("pubisher: Map published")
179
def mapping_thread(self):
181
return 1 if rospy.is_shutdown() else 30
182
self.pointcloud_queue.set_timeout(timeout_func)
185
mapper.buildmap(self.pointcloud_queue,
187
visualise=False, # visualise using rviz
188
odometry_map=False, # odometry is unreliable
189
bestpose_queue=self.bestpose_queue,
190
voxel_queue=self.voxel_queue
195
def PCcallback(self, msg):
196
rospy.loginfo("callback: Recieved point cloud")
197
xyzs = pointcloud.PointCloud(
198
rosutils.pc22xyzs(msg, max_range=self.max_range))
199
# Nick: re-align x fwd, z up
201
# (xyzs.points[:,:3], _) = poseutil.transformPoints(xyzs.points[:,:3],
202
# [0,0,0,-np.pi/2.,0,-np.pi/2.])
203
# adjustment = poseutil.Pose3D([0,0,0,-np.pi/2.,0,-np.pi/2.]).getMatrix()
204
self.process_pointcloud(xyzs, msg.header.frame_id, msg.header.stamp)
206
def DepthImageCallback(self, msg):
207
rospy.loginfo("callback: Recieved depth image")
208
xyzs = rosutils.imgmsg2xyz(msg, max_range=self.max_range)
209
assert(len(xyzs.points) > 0)
210
if msg.header.frame_id != CAMERA_FRAME_ID:
211
if msg.header.frame_id != "kinect_rgb_optical_frame":
212
rospy.logerr("frame:{0}".format(msg.header.frame_id))
213
msg.header.frame_id = CAMERA_FRAME_ID
215
# Nick: re-align x fwd, z up
216
# Vikas: align CAMERA_FRAME_ID and kinect_rgb_optical_frame
217
(xyzs.points[:,:3], _) = poseutil.transformPoints(
218
xyzs.points[:,:3], [0,0,0,-np.pi/2., -np.pi/2.,-np.pi])
219
self.process_pointcloud(xyzs, msg.header.frame_id, msg.header.stamp)
222
def process_pointcloud(self, xyzs, frame_id, timestamp):
223
robot_wrt_odom = None
224
# Get the transformation of PointCloud frame w.r.t. odom global frame.
225
with self.pc_frame_id_lock:
226
self.pc_frame_id = frame_id
228
rospy.loginfo("Received pointclound on"
229
+ " frame_id:{0}".format(frame_id))
230
robot_wrt_odom = self.get_pose(frame_id, timestamp)
231
rospy.loginfo("{0}: {1}".format(CAMERA_FRAME_ID,
232
poseutil.posetuple(robot_wrt_odom)))
233
with self.odom_wrt_map_lock:
234
if self.odom_wrt_map is None:
235
self.odom_wrt_map = np.linalg.inv(robot_wrt_odom)
237
# Without odometry maporigin_to_coincide with odom
238
with self.odom_wrt_map_lock:
239
self.odom_wrt_map = tf.transformations.identity_matrix()
241
with self.odom_wrt_map_lock:
242
odom_wrt_map = copy.deepcopy(self.odom_wrt_map)
243
rospy.loginfo("/odom_wrt_map : {0}".format(
244
poseutil.posetuple(odom_wrt_map)))
246
# Get the bestpose as returned by the mrol nearest neighbour algorithm
248
bestpose = self.bestpose_queue.get_nowait().getMatrix()
249
# Creating a copy for publishing frame in a different thread
250
with self.bestpose_lock:
251
rospy.loginfo("/bestpose : {0}".format(
252
poseutil.posetuple(bestpose)))
253
self.bestpose = copy.deepcopy(bestpose)
255
rospy.logerr("Unable to get bestpose from mrol")
256
# If no new result is available, use the last bestpose
257
with self.bestpose_lock:
258
if self.bestpose is not None:
259
bestpose = self.bestpose
261
bestposeguess = self.guess_pose(robot_wrt_odom, bestpose)
263
# Creating a copy for publishing frame in a different thread
264
with self.bestposeguess_lock:
265
rospy.loginfo("/bestposeguess: {0}".format(
266
poseutil.posetuple(bestposeguess)))
267
self.bestposeguess = copy.deepcopy(bestposeguess)
269
# Save this odometry pose for future use
270
self.lastodompose = robot_wrt_odom
272
xyzs.pose = poseutil.Pose3D()
273
xyzs.pose.setMatrix(bestposeguess)
274
self.pointcloud_queue.put_force(xyzs)
276
rospy.loginfo("callback: Finished processing point cloud")
278
def guess_pose(self, robot_wrt_odom, bestpose):
280
# Use incremental change in odometry to correct the last bestpose, for
281
# a new bestposeguess
282
bestposeguess = self.guess_pose_incremental_odom(robot_wrt_odom,
284
# # For absolute dependence on odometery. Just project robot in map
286
# bestposeguess = np.dot(self.odom_wrt_map, robot_wrt_odom)
288
if bestpose is not None:
289
bestposeguess = bestpose
291
# This is first time we are getting a point cloud.
292
# bestposeguess best to be identity
293
bestposeguess = tf.transformations().identity_matrix()
296
def guess_pose_incremental_odom(self, robot_wrt_odom, bestpose):
298
Use incremental change in odometry to correct the last bestpose, for
301
if self.lastodompose is not None:
302
change_in_position = np.dot(np.linalg.inv(self.lastodompose),
304
bestposeguess = np.dot(bestpose, change_in_position)
306
# This is first time we are getting a point cloud.
307
# bestposeguess best to be identity
308
bestposeguess = tf.transformations.identity_matrix()
312
def get_pose(self, frame_id, timestamp):
313
# get transformations between world and robot at time of message
314
# to have a guess for the position
316
# We want the transform at time timestamp, but that time is
317
# usually not available. We usually get an exception that cache is
318
# emtpy. So we'll look for the latest transform available.
320
# self.listener.waitForTransform(
321
# ODOM_FRAME, frame_id, timestamp, rospy.Duration(1.0))
322
# (trans, quat) = self.listener.lookupTransform(
323
# ODOM_FRAME, frame_id, timestamp)
324
(trans, quat) = self.listener.lookupTransform(
325
ODOM_FRAME, frame_id, rospy.Time(0))
326
now = rospy.Time.now()
327
if timestamp and now != timestamp:
328
rospy.loginfo("Now:{0}; Timestamp:{1}".format(now, timestamp))
329
except (tf.LookupException, tf.ConnectivityException):
331
return quat2mat(quat, trans)
333
def broadcast_transform_thread(self):
334
"""Thread to handle transform broadcast. """
335
rate = rospy.Rate(10.0)
336
br = tf.TransformBroadcaster()
337
while not rospy.is_shutdown():
338
self.broadcast_one_transform(
339
self.bestposeguess_lock,
341
BEST_POSE_GUESS_FRAME,
343
self.broadcast_one_transform(
350
self.broadcast_one_transform(
351
self.odom_wrt_map_lock,
356
with self.pc_frame_id_lock:
357
ref_ros_frame = self.pc_frame_id
358
if ref_ros_frame is not None:
359
# If we publish ref_ros_frame wrt GLOBAL_FRAME then we
360
# usually get the following warning and the rviz refuses
361
# to visualize relationship b/w kinect_rgb_optical_frame
363
# [ WARN] [1331927940.889661623]: TF_OLD_DATA ignoring data from
364
# the past for frame /kinect_rgb_optical_frame at time 1.33039e+09
365
# according to authority /Kinect_mapper_demo
367
# So we publish map wrt /kinect_rgb_optical_frame
368
with self.bestposeguess_lock:
369
map_wrt_PCframe = np.linalg.inv(self.bestposeguess)
370
self.broadcast_one_transform(
371
self.bestposeguess_lock,
375
parent_frame_id=ref_ros_frame,
377
# camera does not show up in rviz
378
self.broadcast_one_transform(
379
self.bestposeguess_lock,
383
parent_frame_id='/camera_rgb_frame',
388
def broadcast_one_transform(self, lock, matrix, frame_id, broadcaster,
389
parent_frame_id=GLOBAL_FRAME):
393
translation = tuple(matrix[:3, 3])
394
quat = tf.transformations.quaternion_from_matrix(matrix)
396
broadcaster.sendTransform(translation,
402
def quat2mat(quat, trans):
403
"""Converts quaternion and translation to 4x4 transformation matrix"""
404
return np.dot(tf.transformations.translation_matrix(trans),
405
tf.transformations.quaternion_matrix(quat))
408
##########################
409
# Initialize ROS interface
410
##########################
411
rospy.init_node('Kinect_mapper_demo',log_level=rospy.DEBUG )
412
# sleep a little - this avoids some ROS bugs
414
orm = RosMapper(num_base_scans=2,use_ICP=ICP)
415
# define how many scan should be without minimum overlay
416
# this has to be at least 1, otherwise no map can be created
417
# rospy.Subscriber("/camera/rgb/points", PointCloud2, orm.PCcallback, queue_size=5)
418
# depth_sub = message_filters.Subscriber("/camera/depth/image", Image)
419
# image_sub = message_filters.Subscriber("/camera/rgb/color_image", Image)
420
# info_sub = message_filters.Subscriber("/camera/rgb/camera_info",
422
# ts = message_filters.TimeSynchronizer([depth_sub, info_sub], 10)
423
# ts.registerCallback(orm.DepthImageCallback)
424
rospy.Subscriber("/camera/depth/image_raw", Image, orm.DepthImageCallback, queue_size=5)
427
def profile_yappi(func):
431
rospy.logwarn("Yappi not installed. Not profiling")
437
with open("profile_Kinet_mapper_demo.yappi", "w") as f:
438
cPickle.dump(yappi.get_stats(yappi.SORTTYPE_TTOTAL,
439
yappi.SORTORDER_ASCENDING,
443
def run_and_catch_interrupt(ICP):
446
except rospy.ROSInterruptException: pass
449
if __name__ == '__main__':
450
ICP = '-ICP' in sys.argv
451
profile_yappi(lambda:run_and_catch_interrupt(ICP))