forked from katabeta/lm-mit-momentum
-
Notifications
You must be signed in to change notification settings - Fork 1
/
template_mission.py
377 lines (283 loc) · 14.6 KB
/
template_mission.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
import asyncio
from navpy.core.navpy import lla2ned
import pygazebo
import pygazebo.msg.v11.laserscan_stamped_pb2
import numpy as np # for transposing across coordinate systems
from mavsdk import System
from mavsdk.mission import (MissionItem, MissionPlan)
import navpy
# This is the gazebo master address and port from PX4 message `[Msg] Connected to gazebo master @ http://127.0.0.1:11345`
HOST, PORT = "127.0.0.1", 11345
class GazeboMessageSubscriber:
'''
Gazebo Message Subscriber subscribes to the lidar messages published by Gazebo.
Explore the lidar tutorial in this repository to understand how it works.
'''
def __init__(self, host, port, timeout=30):
self.host = host
self.port = port
self.loop = asyncio.get_event_loop()
self.running = False
self.timeout = timeout
async def connect(self):
connected = False
for i in range(self.timeout):
try:
self.manager = await pygazebo.connect((self.host, self.port))
connected = True
break
except Exception as e:
print(e)
await asyncio.sleep(1)
if connected:
# info from gz topic -l, gz topic -i arg goes here
self.gps_subscriber = self.manager.subscribe('/gazebo/default/iris_lmlidar/lmlidar/link/lmlidar/scan', 'gazebo.msgs.LaserScanStamped', self.LaserScanStamped_callback)
await self.gps_subscriber.wait_for_connection()
self.running = True
while self.running:
await asyncio.sleep(0.1)
else:
raise Exception("Timeout connecting to Gazebo.")
def LaserScanStamped_callback(self, data):
# What *_pb2 you use here depends on the message type you are subscribing to
self.LaserScanStamped = pygazebo.msg.v11.laserscan_stamped_pb2.LaserScanStamped()
self.LaserScanStamped.ParseFromString(data)
async def get_LaserScanStamped(self):
for i in range(self.timeout):
try:
return self.LaserScanStamped
except Exception as e:
pass
await asyncio.sleep(1)
def quat_mult(quaternion1, quaternion0):
# Hamilton Product
w0 = quaternion0[0]
x0 = quaternion0[1]
y0 = quaternion0[2]
z0 = quaternion0[3]
w1 = quaternion1[0]
x1 = quaternion1[1]
y1 = quaternion1[2]
z1 = quaternion1[3]
return np.array([-x1 * x0 - y1 * y0 - z1 * z0 + w1 * w0,
x1 * w0 + y1 * z0 - z1 * y0 + w1 * x0,
-x1 * z0 + y1 * w0 + z1 * x0 + w1 * y0,
x1 * y0 - y1 * x0 + z1 * w0 + w1 * z0], dtype=np.float64)
def get_world_obst(lsr_val):
# Laser returns are returned in a 1D array ordered as [v1h1 .. v20h1 v1h2 .. v20h2 v1h3 .. v20h3 ...], where v is the vertical index, and h is the horizontal index.
# https://math.stackexchange.com/questions/40164/how-do-you-rotate-a-vector-by-a-unit-quaternion
# All quaternions expressed in the order [w, i, j, k]
# Rotation quat
R = np.array([lsr_val.scan.world_pose.orientation.w, lsr_val.scan.world_pose.orientation.x, lsr_val.scan.world_pose.orientation.y, lsr_val.scan.world_pose.orientation.z])
# Conjugate of rotation quat
R_prime = np.array([lsr_val.scan.world_pose.orientation.w, -lsr_val.scan.world_pose.orientation.x, -lsr_val.scan.world_pose.orientation.y, -lsr_val.scan.world_pose.orientation.z])
# Linear displacement vector in pure quat form (0.1i is the translation from sensor frame to vehicle frame)
disp = np.array([[0], [lsr_val.scan.world_pose.position.x + 0.1], [lsr_val.scan.world_pose.position.y], [lsr_val.scan.world_pose.position.z]])
print(disp)
# Linearly space vector describing horizontal range in radians
h_range = np.linspace((lsr_val.scan.angle_min), (lsr_val.scan.angle_max), lsr_val.scan.count, dtype=np.float64)
# Linearly space vector describing vertical range in radians
v_range = np.linspace((lsr_val.scan.vertical_angle_min*-1.0 + np.pi/2.0), (lsr_val.scan.vertical_angle_max*-1.0 + np.pi/2.0), lsr_val.scan.vertical_count, dtype=np.float64)
# Tile and repeat variables to allow vectorized operations
R = np.tile(R, lsr_val.scan.count * lsr_val.scan.vertical_count)
R_prime = np.tile(R_prime, lsr_val.scan.count * lsr_val.scan.vertical_count)
disp = np.tile(disp, lsr_val.scan.count * lsr_val.scan.vertical_count)
h_range = np.tile(h_range, lsr_val.scan.vertical_count)
v_range = np.repeat(v_range, lsr_val.scan.count)
# Get x, y, z components of the range readings in sensor frame and pad to assume pure quat form
# https://tutorial.math.lamar.edu/classes/calciii/SphericalCoords.aspx
P = np.array([lsr_val.scan.ranges * np.sin(v_range) * np.cos(h_range), lsr_val.scan.ranges * np.sin(v_range) * np.sin(h_range), lsr_val.scan.ranges * np.cos(v_range)])
# Double check that the equality still holds
# print(np.equal(np.round(np.square(lsr_val.scan.ranges), 4), np.round(np.square(P[0]) + np.square(P[1]) + np.square(P[2]), 4)))
P = np.concatenate((np.array([np.zeros(P.shape[1])]), P), axis=0)
# Multiply out the rotation (RPR') and add vehicle + sensor offsets
P_prime = quat_mult(quat_mult(R, P), R_prime) + disp
# Remove the pad and transpose for easy reading
result = P_prime[1:4].T
# Access using the index of the return you'd like (in the format[v1h1 .. v20h1 v1h2 .. v20h2 v1h3 .. v20h3 ...]) and index of axis x=0, y=1, z=2
# e.g. result[-60:-40, 0:2] gives 20 x-y-z values starting from 60 from the end and ending at 40 from the end
return result
async def observe_is_in_air(drone, running_tasks):
""" Monitors whether the drone is flying or not and
returns after landing """
print("Observing in air")
was_in_air = False
async for is_in_air in drone.telemetry.in_air():
if is_in_air:
was_in_air = is_in_air
if was_in_air and not is_in_air:
for task in running_tasks:
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
await asyncio.get_event_loop().shutdown_asyncgens()
return
def get_next_wp (lsr_val, lla_ref_off):
'''
Use this function to implement your waypoint finding algorithm.
When you meet your condition for being ready to go to the last waypoint,
set go_to_endpoint = True.
NOTE: You will be moving along the x axis.
'''
max_speed = 2 # m/s
### DO NOT CHANGE ###
own_x = lsr_val.scan.world_pose.position.x + lla_ref_off[1]
if(own_x > 35):
go_to_endpoint = True
else:
go_to_endpoint = False
if(not go_to_endpoint):
# These are the obstacle coordinates in world-frame.
# Use them to determine where the obstacles are.
# Access using the index of the return you'd like (in the format[v1h1 .. v20h1 v1h2 .. v20h2 v1h3 .. v20h3 ...]) and index of axis x=0, y=1, z=2
# e.g. result[-60:-40, 0:2] gives 20 x-y-z values starting from 60 from the end and ending at 40 from the end
registered_scans = get_world_obst(lsr_val, lla_ref_off)
# Moving along x-axis, so y should stay constant
y = 0
### DO NOT CHANGE ###
# TODO: set x, z using your algorithm to determine where to go next
# This is saying that you want the drone to be at height z, at point x
#### YOUR CODE GOES HERE ####
#### YOUR CODE GOES HERE ####
else:
x = float("NaN")
y = float("NaN")
z = float("NaN")
speed = max_speed
return [x, y, z, speed]
async def run_mission(drone, mission_items, lla_ref, gz_sub):
done = False # Flag to signal when we're done with our mission
# The ned values are slightly skewed because the lla reference is at the start location, not world origin
# lla_ref_off = navpy.lla2ned(0, 0, 0, lla_ref[0], lla_ref[1], lla_ref[2])
lla_ref_off = [0, 0, 0]
[lat, lon, alt] = lla_ref # Set default mission item to start location
mission_item_idx = 0 # Keep track which mission item we're currently on
async for mission_progress in drone.mission.mission_progress():
if(not mission_progress.current == -1):
print(f"Mission progress: "
f"{mission_progress.current}/"
f"{mission_progress.total}")
# Processing one point at a time, when have reached the end of the current set of waypoints
if(mission_progress.current == mission_progress.total - 1 and not done):
print("-- Pause and clear mission")
# Pause mission while calculating
await drone.mission.pause_mission()
await drone.mission.clear_mission()
# Get current mission item index
mission_item_idx = mission_progress.current
print("-- Get Lidar Readings")
# Get Lidar readings
lsr_val = await gz_sub.get_LaserScanStamped()
# Get next wp and determine if done
print("-- Getting next wp")
[x, y, z, speed] = get_next_wp (lsr_val, lla_ref_off)
if(not np.isnan(x)):
[lat, lon, alt] = navpy.ned2lla([y, x, -z], lla_ref[0], lla_ref[1], lla_ref[2])
else:
print("-- Mission is done")
done = True
# Insert the mission item created into the overall mission
print("-- Making mission plan")
mission_items.insert(mission_item_idx, MissionItem(lat,
lon,
alt,
speed,
True,
float('nan'),
float('nan'),
MissionItem.CameraAction.NONE,
float('nan'),
float('nan')))
mission_plan = MissionPlan(mission_items)
# Upload updated mission to the drone
print("-- Uploading updated mission")
await drone.mission.upload_mission(mission_plan)
# Wait for the upload to finish, otherwise it can't resume
await asyncio.sleep(1)
# Start the mission from the item that we just added
print("-- Resuming mission")
await drone.mission.set_current_mission_item(mission_item_idx)
await drone.mission.start_mission()
# Land the drone at the end location
if(mission_progress.current == mission_progress.total and done):
print("-- Landing")
await drone.action.land()
async def run():
# Some of the values are inf and numpy complains when doing math with them, so turn off warnings
np.seterr(all='ignore')
np.set_printoptions(suppress=True)
# Make the Gazebo subscriber object
gz_sub = GazeboMessageSubscriber(HOST, PORT)
# Actually do the subscription
gz_task = asyncio.ensure_future(gz_sub.connect())
# Get a test message - this will speed up all of the subsequent requests
await gz_sub.get_LaserScanStamped()
# Initialize drone and connect
drone = System()
await drone.connect(system_address="udp://:14540")
print("Waiting for drone to connect...")
async for state in drone.core.connection_state():
if state.is_connected:
print(f"Drone discovered with UUID: {state.uuid}")
break
# Make sure telemetry is good so that we know we can trust the position and orientation values
print("Waiting for drone to have a global position estimate...")
async for health in drone.telemetry.health():
if health.is_global_position_ok:
print("Global position estimate ok")
break
# Get our starting position and use that as reference for conversions between lla and ned
print("Fetching amsl altitude at home location....")
lla_ref = []
async for terrain_info in drone.telemetry.position():
lla_ref = [terrain_info.latitude_deg, terrain_info.longitude_deg, terrain_info.absolute_altitude_m]
print(lla_ref)
break
# First mission item is to takeoff to just above our starting location
mission_items = []
mission_items.append(MissionItem(lla_ref[0],
lla_ref[1],
lla_ref[2] + 1,
1,
True,
float('nan'),
float('nan'),
MissionItem.CameraAction.NONE,
float('nan'),
float('nan')))
# Last mission item is at the end of the terrain, just above the terrain
# All of your mission items go between the the start point above and the end point below
[lat, lon, alt] = navpy.ned2lla([0, 40, -1], lla_ref[0], lla_ref[1], lla_ref[2])
mission_items.append(MissionItem(lat,
lon,
alt,
1,
True,
float('nan'),
float('nan'),
MissionItem.CameraAction.NONE,
float('nan'),
float('nan')))
mission_plan = MissionPlan(mission_items)
# Spool up the mission and mission monitor
mission_task = asyncio.ensure_future(run_mission(drone, mission_items, lla_ref, gz_sub))
termination_task = asyncio.ensure_future(observe_is_in_air(drone, [mission_task, gz_task]))
# Make the drone dangerous (arm it)
print("-- Arming")
await drone.action.arm()
await asyncio.sleep(1)
# Upload mission with our first item (more will be added in the mission)
print("-- Uploading mission")
await drone.mission.upload_mission(mission_plan)
await asyncio.sleep(1)
# Start running through the mission
# The mission task will watch the progression and add new points as we reach the old ones
# Once the end condition is met, the drone will land
print("-- Starting mission")
await drone.mission.start_mission()
await termination_task
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run())