-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrack_mav.py
executable file
·525 lines (405 loc) · 20.6 KB
/
track_mav.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
#!/usr/bin/env python3
#####################################################
## librealsense T265 to MAVLink ##
#####################################################
# This script assumes pyrealsense2.[].so file is found under the same directory as this script
# Install required packages:
# pip3 install pyrealsense2
# pip3 install transformations
# pip3 install pymavlink
# pip3 install apscheduler
# pip3 install pyserial
#This code is adapted directly from
# https://github.com/thien94/vision_to_mavros/blob/master/scripts/t265_to_mavlink.py
# Same code, with many things removed and using dronekit to communicate with drone
# Please make sure that code is understood, as this code may still may have remnants of it that are extraneous
# Set the path for IDLE
import sys
sys.path.append("/usr/local/lib/")
# Set MAVLink protocol to 2.
import os
os.environ["MAVLINK20"] = "1"
# Import the libraries
import pyrealsense2 as rs
import numpy as np
import transformations as tf
import math as m
import time
import argparse
import threading
import signal
from time import sleep
from apscheduler.schedulers.background import BackgroundScheduler
from dronekit import *
from pymavlink import mavutil
# Replacement of the standard print() function to flush the output
def progress(string):
print(string, file=sys.stdout)
sys.stdout.flush()
#######################################
# Parameters
#######################################
# Transformation to convert different camera orientations to NED convention. Replace camera_orientation_default for your configuration.
# 0: Forward, USB port to the right
# 1: Downfacing, USB port to the right
# 2: Forward, 45 degree tilted down
# Important note for downfacing camera: you need to tilt the vehicle's nose up a little - not flat - before you run the script, otherwise the initial yaw will be randomized, read here for more details: https://github.com/IntelRealSense/librealsense/issues/4080. Tilt the vehicle to any other sides and the yaw might not be as stable.
camera_orientation_default = 0
# Monitor user's online input via keyboard, can only be used when runs from terminal
enable_user_keyboard_input = True
# Default global position for EKF home/ origin
enable_auto_set_ekf_home = False
home_lat = 151269321 # Somewhere random
home_lon = 16624301 # Somewhere random
home_alt = 163000 # Somewhere random
# TODO: Taken care of by ArduPilot, so can be removed (once the handling on AP side is confirmed stable)
# In NED frame, offset from the IMU or the center of gravity to the camera's origin point
body_offset_enabled = 0
body_offset_x = 0 # In meters (m)
body_offset_y = 0 # In meters (m)
body_offset_z = 0 # In meters (m)
# Global scale factor, position x y z will be scaled up/down by this factor
scale_factor = 1.0
# Enable using yaw from compass to align north (zero degree is facing north)
compass_enabled = 1
# pose data confidence: 0x0 - Failed / 0x1 - Low / 0x2 - Medium / 0x3 - High
pose_data_confidence_level = ('FAILED', 'Low', 'Medium', 'High')
# lock for thread synchronization
lock = threading.Lock()
mavlink_thread_should_exit = False
# default exit code is failure - a graceful termination with a
# terminate signal is possible.
exit_code = 1
#######################################
# Global variables
#######################################
# FCU connection variables
# Camera-related variables
pipe = None
pose_sensor = None
linear_accel_cov = 0.01
angular_vel_cov = 0.01
# Data variables
data = None
prev_data = None
H_aeroRef_aeroBody = None
V_aeroRef_aeroBody = None
heading_north_yaw = None
current_confidence_level = None
current_time_us = 0
# Increment everytime pose_jumping or relocalization happens
# See here: https://github.com/IntelRealSense/librealsense/blob/master/doc/t265.md#are-there-any-t265-specific-options
# For AP, a non-zero "reset_counter" would mean that we could be sure that the user's setup was using mavlink2
reset_counter = 1
#######################################
# Parsing user' inputs
#######################################
parser = argparse.ArgumentParser(description='Reboots vehicle')
parser.add_argument('--debug_enable',type=int,
help="Enable debug messages on terminal")
args = parser.parse_args()
debug_enable = args.debug_enable
if body_offset_enabled == 1:
progress("INFO: Using camera position offset: Enabled, x y z is %s %s %s" % (body_offset_x, body_offset_y, body_offset_z))
else:
progress("INFO: Using camera position offset: Disabled")
if compass_enabled == 1:
progress("INFO: Using compass: Enabled. Heading will be aligned to north.")
else:
progress("INFO: Using compass: Disabled")
# if scale_calib_enable == True:
# progress("\nINFO: SCALE CALIBRATION PROCESS. DO NOT RUN DURING FLIGHT.\nINFO: TYPE IN NEW SCALE IN FLOATING POINT FORMAT\n")
# else:
# if scale_factor == 1.0:
# progress("INFO: Using default scale factor %s" % scale_factor)
# else:
# progress("INFO: Using scale factor %s" % scale_factor)
camera_orientation = camera_orientation_default
progress("INFO: Using default camera orientation %s" % camera_orientation)
if camera_orientation == 0: # Forward, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
elif camera_orientation == 1: # Downfacing, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.array([[0,1,0,0],[1,0,0,0],[0,0,-1,0],[0,0,0,1]])
elif camera_orientation == 2: # 45degree forward
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = (tf.euler_matrix(m.pi/4, 0, 0)).dot(np.linalg.inv(H_aeroRef_T265Ref))
else: # Default is facing forward, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
if not debug_enable:
debug_enable = 0
else:
debug_enable = 1
np.set_printoptions(precision=4, suppress=True) # Format output on terminal
progress("INFO: Debug messages enabled.")
#######################################
# Functions - MAVLink
#######################################
# https://mavlink.io/en/messages/common.html#VISION_POSITION_ESTIMATE
def send_pos():
global current_time_us, H_aeroRef_aeroBody, reset_counter
if H_aeroRef_aeroBody is not None:
#print(H_aeroRef_aeroBody[0][3], H_aeroRef_aeroBody[1][3], H_aeroRef_aeroBody[2][3])
# Setup angle data
rpy_rad = np.array( tf.euler_from_matrix(H_aeroRef_aeroBody, 'sxyz'))
# Setup covariance data, which is the upper right triangle of the covariance matrix, see here: https://files.gitter.im/ArduPilot/VisionProjects/1DpU/image.png
# Attemp #01: following this formula https://github.com/IntelRealSense/realsense-ros/blob/development/realsense2_camera/src/base_realsense_node.cpp#L1406-L1411
cov_pose = linear_accel_cov * pow(10, 3 - int(data.tracker_confidence))
cov_twist = angular_vel_cov * pow(10, 1 - int(data.tracker_confidence))
covariance = np.array([cov_pose, 0, 0, 0, 0, 0,
cov_pose, 0, 0, 0, 0,
cov_pose, 0, 0, 0,
cov_twist, 0, 0,
cov_twist, 0,
cov_twist])
# Send the message
msg = vehicle.message_factory.vision_position_estimate_encode(
current_time_us, #us Timestamp (UNIX time or time since system boot)
# H_T265Ref_T265body[0][3],
# H_T265Ref_T265body[1][3],
# H_T265Ref_T265body[2][3],
H_aeroRef_aeroBody[0][3], #Global X position
H_aeroRef_aeroBody[1][3], #Global Y position
H_aeroRef_aeroBody[2][3], #Global Z position
rpy_rad[0], #Roll angle
rpy_rad[1], #Pitch angle
rpy_rad[2] #Yaw angle
#covariance, #covariance :upper right triangle (states: x, y, z, roll, pitch, ya
#reset_counter #reset_counter:Estimate reset counter.
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Send a mavlink SET_GPS_GLOBAL_ORIGIN message (http://mavlink.org/messages/common#SET_GPS_GLOBAL_ORIGIN), which allows us to use local position information without a GPS.
def set_default_global_origin(lat,lon,alt):
msg=vehicle.message_factory.set_gps_global_origin_encode(
0,int(lat*10000000),
int(lon*10000000),
0*1000)
vehicle.send_mavlink(msg)
vehicle.flush()
time.sleep(1)
msg=vehicle.message_factory.command_long_encode(
0,0,
mavutil.mavlink.MAV_CMD_DO_SET_HOME,
0,0,0,0,0,
lat, lon, alt)
vehicle.send_mavlink(msg)
vehicle.flush()
# Listen to attitude data to acquire heading when compass data is enabled
def att_msg_callback(self, attr_name, value):
global heading_north_yaw
if debug_enable:
progress(str(time.time()) + " ardupilot %s" % value)
if heading_north_yaw is None:
heading_north_yaw = value.yaw
progress("INFO: Received first ATTITUDE message with heading yaw %.2f degrees" % m.degrees(heading_north_yaw))
def mode_msg_callback(self, attr_name, value):
progress("MODE: %s" % value)
def local_pos_msg_callback(self, attr_name, value):
progress(str(current_time_us) + "ardupilot %s" % value)
#######################################
# Functions - T265
#######################################
#Currently Not used
def increment_reset_counter():
global reset_counter
if reset_counter >= 255:
reset_counter = 1
reset_counter += 1
# List of notification events: https://github.com/IntelRealSense/librealsense/blob/development/include/librealsense2/h/rs_types.h
# List of notification API: https://github.com/IntelRealSense/librealsense/blob/development/common/notifications.cpp
def realsense_notification_callback(notif):
progress("INFO: T265 event: " + notif)
if notif.get_category() is rs.notification_category.pose_relocalization:
increment_reset_counter()
send_msg_to_gcs('Relocalization detected')
def realsense_connect():
global pipe, pose_sensor
# Declare RealSense pipeline, encapsulating the actual device and sensors
pipe = rs.pipeline()
# Build config object before requesting data
cfg = rs.config()
# Enable the stream we are interested in
cfg.enable_stream(rs.stream.pose) # Positional data
# Configure callback for relocalization event
device = cfg.resolve(pipe).get_device()
pose_sensor = device.first_pose_sensor()
pose_sensor.set_notifications_callback(realsense_notification_callback)
# Start streaming with requested config
pipe.start(cfg)
#######################################
# Functions - Miscellaneous
#######################################
# Monitor user input from the terminal and perform action accordingly
#######################################
# Main code starts here
#######################################
try:
progress("INFO: pyrealsense2 version: %s" % str(rs.__version__))
except Exception:
# fail silently
pass
progress("INFO: Starting Vehicle communications")
#vehicle = connect('udp:0.0.0.0:14450', wait_ready=False)
vehicle = connect('/dev/ttyS0', wait_ready = False, baud = 115200)
vehicle.wait_ready(True, timeout=300)
#vehicle.add_attribute_listener('location.local_frame', local_pos_msg_callback)
vehicle.add_attribute_listener('attitude', att_msg_callback)
vehicle.add_attribute_listener('mode', mode_msg_callback)
print("\nConnected")
print("Setting home")
time.sleep(1)
set_default_global_origin(40.44,-79.99,0)
time.sleep(1)
print(vehicle.location.global_frame)
vehicle.mode = VehicleMode("STABILIZE") #INITIAL MODE (MANUAL FLIGHT)
print(vehicle.battery)
def user_input_monitor():
global scale_factor
while True:
# Add new action here according to the key pressed.
# Enter: Set EKF home when user press enter
try:
c = input()
if c == "":
vehicle.mode = VehicleMode("LAND") #HIT ENTER TO ENTER "LAND MODE"
else:
progress("Got keyboard input %s" % c)
except IOError: pass
# connecting and configuring the camera is a little hit-and-miss.
# Start a timer and rely on a restart of the script to get it working.
# Configuring the camera appears to block all threads, so we can't do
# this internally.
# send_msg_to_gcs('Setting timer...')
signal.setitimer(signal.ITIMER_REAL, 5) # seconds...
#send_msg_to_gcs('Connecting to camera...')
realsense_connect()
#send_msg_to_gcs('Camera connected.')
signal.setitimer(signal.ITIMER_REAL, 0) # cancel alarm
#A separate thread to monitor user input
if enable_user_keyboard_input:
user_keyboard_input_thread = threading.Thread(target=user_input_monitor)
user_keyboard_input_thread.daemon = True
user_keyboard_input_thread.start()
# gracefully terminate the script if an interrupt signal (e.g. ctrl-c)
# is received. This is considered to be abnormal termination.
main_loop_should_quit = False
def sigint_handler(sig, frame):
global main_loop_should_quit
main_loop_should_quit = True
signal.signal(signal.SIGINT, sigint_handler)
# gracefully terminate the script if a terminate signal is received
# (e.g. kill -TERM).
def sigterm_handler(sig, frame):
global main_loop_should_quit
main_loop_should_quit = True
global exit_code
exit_code = 0
signal.signal(signal.SIGTERM, sigterm_handler)
if compass_enabled == 1:
time.sleep(1) # Wait a short while for yaw to be correctly initiated
if debug_enable: f = open("t265Log.txt", "w") #Position Txt file for logging
counter = 0
##############################################################
# MAIN LOOP - MOST IMPORTANT
##############################################################
try:
while not main_loop_should_quit:
# Wait for the next set of frames from the camera
frames = pipe.wait_for_frames()
# Fetch pose frame
pose = frames.get_pose_frame()
# Process data
if pose:
with lock:
# Store the timestamp for MAVLink messages
current_time_us = int(round(time.time() * 1000000))
# Pose data consists of translation and rotation
data = pose.get_pose_data()
# Confidence level value from T265: 0-3, remapped to 0 - 100: 0% - Failed / 33.3% - Low / 66.6% - Medium / 100% - High
current_confidence_level = float(data.tracker_confidence * 100 / 3)
# In transformations, Quaternions w+ix+jy+kz are represented as [w, x, y, z]!
H_T265Ref_T265body = tf.quaternion_matrix([data.rotation.w, data.rotation.x, data.rotation.y, data.rotation.z])
H_T265Ref_T265body[0][3] = data.translation.x * scale_factor
H_T265Ref_T265body[1][3] = data.translation.y * scale_factor
H_T265Ref_T265body[2][3] = data.translation.z * scale_factor
# Transform to aeronautic coordinates (body AND reference frame!)
H_aeroRef_aeroBody = H_aeroRef_T265Ref.dot( H_T265Ref_T265body.dot( H_T265body_aeroBody))
# Calculate GLOBAL XYZ speed (speed from T265 is already GLOBAL)
V_aeroRef_aeroBody = tf.quaternion_matrix([1,0,0,0])
V_aeroRef_aeroBody[0][3] = data.velocity.x
V_aeroRef_aeroBody[1][3] = data.velocity.y
V_aeroRef_aeroBody[2][3] = data.velocity.z
V_aeroRef_aeroBody = H_aeroRef_T265Ref.dot(V_aeroRef_aeroBody)
s = str(time.time()) + " " + str(data.translation.x) + " " + str(data.translation.y) + " " + str(data.translation.z)
if debug_enable: f.write(s + "\n")
# Check for pose jump and increment reset_counter
if prev_data != None:
delta_translation = [data.translation.x - prev_data.translation.x, data.translation.y - prev_data.translation.y, data.translation.z - prev_data.translation.z]
delta_velocity = [data.velocity.x - prev_data.velocity.x, data.velocity.y - prev_data.velocity.y, data.velocity.z - prev_data.velocity.z]
position_displacement = np.linalg.norm(delta_translation)
speed_delta = np.linalg.norm(delta_velocity)
# Pose jump is indicated when position changes abruptly. The behavior is not well documented yet (as of librealsense 2.34.0)
jump_threshold = 0.1 # in meters, from trials and errors, should be relative to how frequent is the position data obtained (200Hz for the T265)
jump_speed_threshold = 20.0 # in m/s from trials and errors, should be relative to how frequent is the velocity data obtained (200Hz for the T265)
if (position_displacement > jump_threshold) or (speed_delta > jump_speed_threshold):
#send_msg_to_gcs('VISO jump detected')
if position_displacement > jump_threshold:
progress("Position jumped by: %s" % position_displacement)
elif speed_delta > jump_speed_threshold:
progress("Speed jumped by: %s" % speed_delta)
increment_reset_counter()
prev_data = data
# Take offsets from body's center of gravity (or IMU) to camera's origin into account
if body_offset_enabled == 1:
H_body_camera = tf.euler_matrix(0, 0, 0, 'sxyz')
H_body_camera[0][3] = body_offset_x
H_body_camera[1][3] = body_offset_y
H_body_camera[2][3] = body_offset_z
H_camera_body = np.linalg.inv(H_body_camera)
H_aeroRef_aeroBody = H_body_camera.dot(H_aeroRef_aeroBody.dot(H_camera_body))
# Realign heading to face north using initial compass data
if compass_enabled == 1:
H_aeroRef_aeroBody = H_aeroRef_aeroBody.dot( tf.euler_matrix(0, 0, heading_north_yaw, 'sxyz'))
#####################################
# Send Pos to Drone, then sleep until next loop (done 30hz right now)
#####################################
send_pos()
time.sleep(0.03) #30hz
# Show debug messages here
if debug_enable == 1:
print(vehicle.battery)
print(s + "\n")
#os.system('clear') # This helps in displaying the messages to be more readable
#progress(str(time.time()) + " t265: {}".format( np.array( tf.euler_from_matrix( H_aeroRef_aeroBody, 'sxyz'))))
# progress("DEBUG: Raw RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_T265Ref_T265body, 'sxyz')) * 180 / m.pi))
# progress("DEBUG: NED RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_aeroRef_aeroBody, 'sxyz')) * 180 / m.pi))
# progress("DEBUG: Raw pos xyz : {}".format( np.array( [data.translation.x, data.translation.y, data.translation.z])))
# progress("DEBUG: NED pos xyz : {}".format( np.array( tf.translation_from_matrix( H_aeroRef_aeroBody))))
if vehicle.ekf_ok:
if counter == 0:
print("POS LOCK") #drone EKF has good confidence, ok to start flying now
#Below code is dronekit code for autonomous takeoff - Must be in Guided mode to use (Seen strange behaviour when doing this previously)
#vehicle.armed=True
#while not vehicle.armed:
#time.sleep(0.01)
#vehicle.simple_takeoff(1)
counter+=1
except Exception as e:
progress(e)
except:
#send_msg_to_gcs('ERROR IN SCRIPT')
progress("Unexpected error: %s" % sys.exc_info()[0])
finally:
progress('Closing the script...')
vehicle.armed = False
# start a timer in case stopping everything nicely doesn't work.
signal.setitimer(signal.ITIMER_REAL, 5) # seconds...
pipe.stop()
# mavlink_thread_should_exit = True
# mavlink_thread.join()
#conn.close()
progress("INFO: Realsense pipeline and vehicle object closed.")
sys.exit(exit_code)