-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapi_timings.py
358 lines (285 loc) · 11.2 KB
/
api_timings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
# api_timings.py
import os, time
from typing import Optional, List
from pydantic import BaseModel
from fastapi.responses import FileResponse
from fastapi import HTTPException, Header, Path, Request
import pandas as pd
import jellyfish as jf # for fuzzy search
from payanam_launch import app
import commonfuncs as cf
import dbconnect
class loadTimings_payload(BaseModel):
pattern_id: str
page: Optional[int] = 1
@app.post("/API/loadTimings", tags=["timings"])
def loadTimings(req: loadTimings_payload):
cf.logmessage("loadTimings api call")
space_id = int(os.environ.get('SPACE_ID',1))
pattern_id = req.pattern_id
returnD = { 'message': "success", "page":req.page, "stops":[], "trips":[] }
# stops
s1 = f"""select t1.stop_sequence, t1.stop_id, t2.name
from pattern_stops as t1
left join stops_master as t2
on t1.stop_id = t2.id
where t1.space_id={space_id}
and t1.pattern_id = '{pattern_id}'
order by t1.stop_sequence
"""
df1 = dbconnect.makeQuery(s1, output='df')
if len(df1):
returnD['stops'] = df1.to_dict(orient='records')
else:
returnD['stops'] = []
# trips
s2 = f"""select * from trips
where space_id={space_id}
and pattern_id = '{pattern_id}'
order by start_time
offset {(req.page-1)*10}
fetch first 10 rows only
"""
# TO DO: Pagination of trips if too many
# offset {(req.page-1)*10}
# fetch first 10 rows only
df2 = dbconnect.makeQuery(s2, output='df')
if len(df2):
df2['start_time'] = df2['start_time'].apply(lambda x: str(x)[:5])
returnD['trips'] = df2.to_dict(orient='records')
else:
returnD['trips'] = []
# fetch full count of trips if first page request.
# but if we got under 10 in pg1 itself then no need to query DB again.
if req.page == 1:
if len(df2) < 10:
returnD['num_trips'] = len(df2)
else:
s4 = f"""select count(id) as tripcount from trips
where space_id={space_id}
and pattern_id = '{pattern_id}'
"""
totalTrips = dbconnect.makeQuery(s4, output='oneValue')
returnD['num_trips'] = totalTrips
# timings
if len(df2):
trip_idSQL = cf.quoteNcomma(df2['id'].tolist())
s3 = f"""select trip_id, stop_sequence, arrival_time from stop_times
where space_id={space_id}
and trip_id in ({trip_idSQL})
order by trip_id, stop_sequence
"""
df3 = dbconnect.makeQuery(s3, output='df', fillna=True)
# df3['trip_id'] = df3['trip_id'].apply(lambda x: f"trip_{x}")
if len(df3):
df3['arrival_time'] = df3['arrival_time'].apply(lambda x: str(x)[:5])
else:
df3 = pd.DataFrame(columns=['trip_id', 'stop_sequence', 'arrival_time'])
if len(df1):
if len(df3):
# pivot by trip_id
df4 = df3.pivot(index='stop_sequence', columns='trip_id', values='arrival_time').fillna('').reset_index()
# merge in stop ids, names
df5 = pd.merge(df1, df4, on='stop_sequence', how='left')
else:
df5 = df1
# sort by start timings
allCols = list(df5.columns)
tripCols = [x for x in allCols if x not in ('stop_sequence', 'stop_id', 'name')]
newCols = ['stop_sequence', 'stop_id', 'name'] + sorted(tripCols)
returnD['stop_times'] = df5[newCols].to_dict(orient='records')
else:
returnD['stop_times'] = []
# TO DO: calc stop times offset from first trip or so
cf.logmessage(f"Got {len(df2)} trips, {len(df3)} timings total")
return returnD
############
@app.post("/API/saveTimings", tags=["timings"])
async def saveTimings(req: Request):
cf.logmessage("saveTimings api call")
space_id = int(os.environ.get('SPACE_ID',1))
returnD = { 'message': "success"}
# making the api take a custom json array
# from https://stackoverflow.com/a/64379772/4355695 (that and other answers)
reqD = await req.json()
# print(reqD)
# validation
if (not len(reqD.get('edits',[]))) or (not isinstance(reqD.get('edits',[]),list)) :
raise HTTPException(status_code=400, detail="No edits data")
if (not len(reqD.get('pattern_id'))) or (not isinstance(reqD.get('pattern_id',False),str)):
raise HTTPException(status_code=400, detail="No pattern_id")
## 2022-04-12 : New flow : receiving only edited values from backend, so chun chun ke edit kar
returnD['timings_updated'] = 0
for e in reqD['edits']:
u1 = f"""update stop_times
set arrival_time = '{e['arrival_time']}', departure_time='{e['arrival_time']}'
where space_id = {space_id}
and trip_id = '{e['trip_id']}'
and stop_sequence = {e['stop_sequence']}
"""
u1Count = dbconnect.execSQL(u1)
if not u1Count:
cf.logmessage(f"Warning: No stop_times edit happened from trip_id: {e['trip_id']}, stop_sequence: {e['stop_sequence']}. But skipping")
else:
returnD['timings_updated'] += u1Count
# update trips table in case a changed timing was the first one.
returnD['trips_updated'] = 0
if e['stop_sequence'] == 1:
u2 = f"""update trips
set start_time = '{e['arrival_time']}',
name = '{reqD['pattern_id']}_{e['arrival_time']}',
last_updated = CURRENT_TIMESTAMP,
modified_by = 'admin'
where space_id = {space_id}
and id = '{e['trip_id']}'
"""
u2Count = dbconnect.execSQL(u2)
if not u2Count:
cf.logmessage(f"Warning: No trips edit happened for trip_id: {e['trip_id']}. Skipping")
else:
returnD['trips_updated'] += u2Count
# TO DO: cover last sequence / trip end time change also
# for this, better to bring the sequence length from frontend?
return returnD
################
class deleteTrip_payload(BaseModel):
pattern_id: str
trip_id: str
@app.post("/API/deleteTrip", tags=["timings"])
async def deleteTrip(req: deleteTrip_payload):
cf.logmessage("deleteTrip api call")
space_id = int(os.environ.get('SPACE_ID',1))
pattern_id = req.pattern_id
trip_id = req.trip_id
# check if its there in trips table and stop_times table
s1 = f"""select count(id) from trips
where space_id = {space_id}
and pattern_id = '{pattern_id}'
and id = '{trip_id}'
"""
c1 = dbconnect.makeQuery(s1, output='oneValue')
s2 = f"""select count(id) from stop_times
where space_id = {space_id}
and trip_id = '{trip_id}'
"""
c2 = dbconnect.makeQuery(s2, output='oneValue')
returnD = { "message": "success", "trips_deleted":0, "stop_times_deleted":0 }
if c1:
d1 = f"""delete from trips
where space_id = {space_id}
and pattern_id = '{pattern_id}'
and id = '{trip_id}'
"""
d1Count = dbconnect.execSQL(d1)
returnD['trips_deleted'] = d1Count
if c2:
d2 = f"""delete from stop_times
where space_id = {space_id}
and trip_id = '{trip_id}'
"""
d2Count = dbconnect.execSQL(d2)
returnD['stop_times_deleted'] = d2Count
return returnD
################
class addTrip_payload(BaseModel):
pattern_id: str
start_time: str
autoFill: Optional[bool] = False
@app.post("/API/addTrip", tags=["timings"])
async def deleteTrip(req: addTrip_payload):
cf.logmessage("addTrip api call")
space_id = int(os.environ.get('SPACE_ID',1))
pattern_id = req.pattern_id
start_time = req.start_time
trip_id = cf.makeUID()
i1 = f"""insert into trips
(space_id, id, pattern_id, start_time, name) values
({space_id}, '{trip_id}', '{pattern_id}', '{start_time}', '{trip_id}_{start_time}')
"""
s1 = f"""select stop_sequence, stop_id from pattern_stops
where space_id = {space_id}
and pattern_id = '{pattern_id}'
order by stop_sequence
"""
df1 = dbconnect.makeQuery(s1, output='df')
df2 = df1[['stop_sequence']].copy()
df2['space_id'] = space_id
df2['trip_id'] = trip_id
df2['id'] = cf.assignUID(df1, length=7)
# set all timings except first one as null
df2['arrival_time'] = None
df2.at[0,'arrival_time'] = start_time
# TO DO: if requested, populate remaining arrival times also, taking a default speed
# and calculating lat-long distance / routed distance
status1 = dbconnect.execSQL(i1)
status2 = dbconnect.addTable(df2, 'stop_times')
returnD = { "message": "success"}
returnD['trip_id'] = trip_id
returnD['added_stop_times'] = len(df2)
return returnD
################
def updateTimingsForPattern(pattern_id, pattern_length):
# to do: if a pattern's length has changed, then update timings entries for it
space_id = int(os.environ.get('SPACE_ID',1))
totalAdded = totalRemoved = 0
# find all trips for the pattern
s1 = f"""select t1.id as trip_id, t2.id, t2.stop_sequence
from trips as t1
left join stop_times as t2
on t1.id = t2.trip_id
where t1.space_id = {space_id}
and t1.pattern_id = '{pattern_id}'
and t2.space_id = {space_id}
order by t2.trip_id, t2.stop_sequence
"""
df_exist_all = dbconnect.makeQuery(s1, output='df')
# tripsList = dbconnect.makeQuery(s1, output='column')
if not len(df_exist_all):
return 0, 0, 0
tripsList = df_exist_all['trip_id'].unique().tolist()
# if not len(tripsList):
# return len(tripsList), totalAdded, totalRemoved
all_delIds = []
all_df_new = []
for trip_id in tripsList:
# get existing
cf.logmessage(f"trip_id: {trip_id}")
df_exist = df_exist_all[df_exist_all['trip_id'] == trip_id ].copy().reset_index(drop=True)
# space_id = int(os.environ.get('SPACE_ID',1))
# s1 = f"""select id, stop_sequence from stop_times
# where space_id = {space_id}
# and trip_id = '{trip_id}'
# order by stop_sequence
# """
# df_exist = dbconnect.makeQuery(s1, output='df')
if len(df_exist) == pattern_length:
# no change needed!
continue
elif len(df_exist) > pattern_length:
# delete flow
delIds = df_exist[pattern_length:]['id'].tolist()
if len(delIds): all_delIds += delIds
else:
# add flow
newSeq = list(range(len(df_exist)+1, pattern_length+1))
df_new = pd.DataFrame({'stop_sequence':newSeq})
df_new['id'] = cf.assignUID(df_new, length=7)
df_new['space_id'] = space_id
df_new['trip_id'] = trip_id
all_df_new.append(df_new)
# delete at once
if len(all_delIds):
delIdsSQL = cf.quoteNcomma(all_delIds)
cf.logmessage(f"ids to delete: {all_delIds}")
d1 = f"""delete from stop_times
where id in ({delIdsSQL})
"""
totalRemoved = dbconnect.execSQL(d1)
# add at once
if len(all_df_new):
add_df = pd.concat(all_df_new, sort=False, ignore_index=True)
print("add_df:")
print(add_df)
totalAdded = dbconnect.addTable(add_df, 'stop_times')
return len(tripsList), totalAdded, totalRemoved
# TO DO: Download full route's timings as excel, and let user edit offline and upload it again