Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log and resolve errors in uploading data due to Firebase API changes #1008

Merged
merged 2 commits into from
Jan 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 32 additions & 10 deletions emission/net/api/usercache.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,19 @@ def sync_server_to_phone(uuid):
# logging.debug("retrievedData = %s" % retrievedData)
return retrievedData

def _remove_dots(entry_doc):
for key in entry_doc:
# print(f"Checking {key=}")
if isinstance(entry_doc[key], dict):
# print(f"Found dict for {key=}, recursing")
_remove_dots(entry_doc[key])
if '.' in key:
munged_key = key.replace(".", "_")
logging.info(f"Found {key=} with dot, munged to {munged_key=}")
# Get and delete in one swoop
# https://stackoverflow.com/a/11277439
entry_doc[munged_key] = entry_doc.pop(key, None)

def sync_phone_to_server(uuid, data_from_phone):
"""
Puts the blob from the phone into the cache
Expand All @@ -44,21 +57,30 @@ def sync_phone_to_server(uuid, data_from_phone):

if "ts" in data["data"] and ecc.isMillisecs(data["data"]["ts"]):
data["data"]["ts"] = old_div(float(data["data"]["ts"]), 1000)

# mongodb/documentDB don't support field names with `.`
# let's convert them all to `_`
_remove_dots(data)

# logging.debug("After updating with UUId, we get %s" % data)
document = {'$set': data}
update_query = {'user_id': uuid,
'metadata.type': data["metadata"]["type"],
'metadata.write_ts': data["metadata"]["write_ts"],
'metadata.key': data["metadata"]["key"]}
result = usercache_db.update_one(update_query,
document,
upsert=True)
logging.debug("Updated result for user = %s, key = %s, write_ts = %s = %s" %
(uuid, data["metadata"]["key"], data["metadata"]["write_ts"], result.raw_result))
try:
result = usercache_db.update_one(update_query,
document,
upsert=True)
logging.debug("Updated result for user = %s, key = %s, write_ts = %s = %s" %
(uuid, data["metadata"]["key"], data["metadata"]["write_ts"], result.raw_result))

# I am not sure how to trigger a writer error to test this
# and whether this is the format expected from the server in the rawResult
if 'ok' in result.raw_result and result.raw_result['ok'] != 1.0:
logging.error("In sync_phone_to_server, err = %s" % result.raw_result['writeError'])
raise Exception()
# I am not sure how to trigger a writer error to test this
# and whether this is the format expected from the server in the rawResult
if 'ok' in result.raw_result and result.raw_result['ok'] != 1.0:
logging.error("In sync_phone_to_server, err = %s" % result.raw_result['writeError'])
raise Exception()
except pymongo.errors.PyMongoError as e:
logging.error(f"In sync_phone_to_server, while executing {update_query=} on {document=}")
logging.exception(e)
raise
72 changes: 72 additions & 0 deletions emission/tests/netTests/TestBuiltinUserCacheHandlerInput.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import uuid
import attrdict as ad
import time
import copy
import geojson as gj
# This change should be removed in the next server update, by which time hopefully the new geojson version will incorporate the long-term fix for their default precision
# See - jazzband/geojson#177
Expand Down Expand Up @@ -273,6 +274,77 @@ def testTwoLongTermCalls(self):
self.assertEqual(edb.get_timeseries_db().estimated_document_count(), 120)
self.assertEqual(edb.get_timeseries_error_db().estimated_document_count(), 0)

def testRemoteDots(self):
test_template = {"ts":1735934360.256,
"client_app_version":"1.9.6",
"name":"open_notification",
"client_os_version":"15.5",
"reading":{
"additionalData":{
"google.c.sender.id":"FAKE_SENDER_ID",
"coldstart":False,
"notId":"1122334455667788",
"payload":1122334455667788,
"content-available":1,
"foreground":False,
"google.c.fid":"FAKE_FID",
"gcm.message_id":"FAKE_MESSAGE_ID"}}}
test_1 = copy.copy(test_template)
self.assertEqual(len(test_1["reading"]["additionalData"]), 8)
self.assertIn("google.c.sender.id",
test_1["reading"]["additionalData"])
self.assertIn("google.c.fid",
test_1["reading"]["additionalData"])
self.assertIn("gcm.message_id",
test_1["reading"]["additionalData"])
mauc._remove_dots(test_1)
self.assertEqual(len(test_1["reading"]["additionalData"]), 8)
self.assertIn("google_c_sender_id",
test_1["reading"]["additionalData"])
self.assertIn("google_c_fid",
test_1["reading"]["additionalData"])
self.assertIn("gcm_message_id",
test_1["reading"]["additionalData"])
self.assertNotIn("google.c.sender.id",
test_1["reading"]["additionalData"])
self.assertNotIn("google.c.fid",
test_1["reading"]["additionalData"])
self.assertNotIn("gcm.message_id",
test_1["reading"]["additionalData"])

metadata_template = {'plugin': 'none',
'write_ts': self.curr_ts - 25,
'time_zone': u'America/Los_Angeles',
'platform': u'ios',
'key': u'stats/client_time',
'read_ts': self.curr_ts - 27,
'type': u'message'}

# there are 30 entries in the setup function
self.assertEqual(len(self.uc1.getMessage()), 30)

three_entries_with_dots = []
for i in range(3):
curr_md = copy.copy(metadata_template)
curr_md['write_ts'] = self.curr_ts - 25 + i
three_entries_with_dots.append({
'user_id': self.testUserUUID1,
'data': copy.copy(test_template),
'metadata': curr_md})

print(f"AFTER {[e.get('metadata', None) for e in three_entries_with_dots]}")

mauc.sync_phone_to_server(self.testUserUUID1, three_entries_with_dots)
# we have munged, so these new entries should also be saved
# and we should have 33 entries in the usercache
self.assertEqual(len(self.uc1.getMessage()), 33)
self.assertEqual(len(list(self.ts1.find_entries())), 0)
enuah.UserCacheHandler.getUserCacheHandler(self.testUserUUID1).moveToLongTerm()
# since they were munged before saving into the usercache,
# there should be no errors while copying to the timeseries
self.assertEqual(len(self.uc1.getMessage()), 0)
self.assertEqual(len(list(self.ts1.find_entries())), 33)

if __name__ == '__main__':
import emission.tests.common as etc

Expand Down
Loading