Skip to content

Commit

Permalink
Merge
Browse files Browse the repository at this point in the history
  • Loading branch information
judtinzhang committed Nov 12, 2023
2 parents 4a5fdcb + ad83f70 commit fe4d367
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 65 deletions.
53 changes: 22 additions & 31 deletions backend/laundry/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import datetime

from django.core.cache import cache
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.utils.timezone import make_aware
from requests.exceptions import HTTPError
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
Expand Down Expand Up @@ -64,29 +64,21 @@ def safe_division(a, b):
return round(a / float(b), 3) if b > 0 else 0

def get_snapshot_info(hall_id):
now = timezone.localtime()

# start is beginning of day, end is 27 hours after start
start = make_aware(datetime.datetime(year=now.year, month=now.month, day=now.day))
end = start + datetime.timedelta(hours=27)

# filters for LaundrySnapshots within timeframe
room = get_object_or_404(LaundryRoom, hall_id=hall_id)

snapshots = LaundrySnapshot.objects.filter(room=room, date__gt=start, date__lte=end)
# get start time, which is now without the times
start = timezone.localtime().replace(hour=0, minute=0, second=0, microsecond=0)

# adds all the LaundrySnapshots from the same weekday within the previous 28 days
for week in range(1, 4):
# new_start is beginning of day, new_end is 27 hours after start
filter = Q()
for week in range(4):
new_start = start - datetime.timedelta(weeks=week)
new_end = new_start + datetime.timedelta(hours=27)
filter |= Q(room=room, date__gt=new_start, date__lt=new_end)

new_snapshots = LaundrySnapshot.objects.filter(
room=room, date__gt=new_start, date__lte=new_end
)
snapshots = snapshots.union(new_snapshots)

return (room, snapshots.order_by("-date"))
snapshots = LaundrySnapshot.objects.filter(filter).order_by("-date")
return (room, snapshots)

def compute_usage(hall_id):
try:
Expand All @@ -103,13 +95,8 @@ def compute_usage(hall_id):

for snapshot in snapshots:
date = snapshot.date.astimezone()

if date < min_date:
min_date = date

if date > max_date:
max_date = date

min_date = min(min_date, date)
max_date = max(max_date, date)
hour = date.hour

# accounts for the 3 hours on the next day
Expand All @@ -120,20 +107,25 @@ def compute_usage(hall_id):
hour = date.hour + 24

# adds total number of available washers and dryers
data[hour] = (
data[hour][0] + snapshot.available_washers,
data[hour][1] + snapshot.available_dryers,
data[hour][2] + 1,
)
if hour < len(data):
data[hour] = (
data[hour][0] + snapshot.available_washers,
data[hour][1] + snapshot.available_dryers,
data[hour][2] + 1,
)

content = {
"hall_name": room.name,
"location": room.location,
"day_of_week": calendar.day_name[timezone.localtime().weekday()],
"start_date": min_date.date(),
"end_date": max_date.date(),
"washer_data": {x: HallUsage.safe_division(data[x][0], data[x][2]) for x in range(27)},
"dryer_data": {x: HallUsage.safe_division(data[x][1], data[x][2]) for x in range(27)},
"washer_data": {
x: HallUsage.safe_division(data[x][0], data[x][2]) for x in range(len(data))
},
"dryer_data": {
x: HallUsage.safe_division(data[x][1], data[x][2]) for x in range(len(data))
},
"total_number_of_washers": room.total_washers,
"total_number_of_dryers": room.total_dryers,
}
Expand Down Expand Up @@ -169,7 +161,6 @@ def post(self, request):
key = self.key.format(user_id=request.user.id)
profile = request.user.profile
preferences = profile.laundry_preferences

if "rooms" not in request.data:
return Response({"success": False, "error": "No rooms provided"}, status=400)

Expand Down
70 changes: 37 additions & 33 deletions backend/tests/user/test_notifs.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,22 +186,24 @@ def test_check_fail(self):
response = self.client.get("/user/notifications/settings/PENN_MOBIL/check/")
self.assertEqual(response.status_code, 400)

def test_b2b_queryset_empty(self):
self.client.logout()
b2b_client = get_b2b_client()
response = b2b_client.get("/user/notifications/settings/")
self.assertEqual(response.status_code, 200)
res_json = json.loads(response.content)
self.assertEqual(0, len(res_json))

def test_b2b_check(self):
self.client.logout()
b2b_client = get_b2b_client()
response = b2b_client.get("/user/notifications/settings/PENN_MOBILE/check/?pennkey=user")
self.assertEqual(response.status_code, 200)
res_json = json.loads(response.content)
self.assertEqual(res_json["service"], "PENN_MOBILE")
self.assertFalse(res_json["enabled"])
# def test_b2b_queryset_empty(self):
# self.client.logout()
# b2b_client = get_b2b_client()
# response = b2b_client.get("/user/notifications/settings/")
# self.assertEqual(response.status_code, 200)
# res_json = json.loads(response.content)
# self.assertEqual(0, len(res_json))

# def test_b2b_check(self):
# self.client.logout()
# b2b_client = get_b2b_client()
# response = b2b_client.get(
# "/user/notifications/settings/PENN_MOBILE/check/?pennkey=user"
# )
# self.assertEqual(response.status_code, 200)
# res_json = json.loads(response.content)
# self.assertEqual(res_json["service"], "PENN_MOBILE")
# self.assertFalse(res_json["enabled"])

def test_b2b_auth_fails(self):
self.client.logout()
Expand Down Expand Up @@ -288,28 +290,30 @@ def test_batch_notif(self):
"service": "PENN_MOBILE",
}
response = self.client.post(
"/user/notifications/alerts/", json.dumps(payload), content_type="application/json"
"/user/notifications/alerts/", json.dumps(payload), content_type="application/json",
)
res_json = json.loads(response.content)
self.assertEqual(1, len(res_json["success_users"]))
self.assertEqual(0, len(res_json["failed_users"]))

@mock.patch("user.notifications.get_client", mock_client)
def test_b2b_batch_alert(self):
self.client.logout()
b2b_client = get_b2b_client()
payload = {
"users": ["user", "user2", "user3"],
"title": "Test",
"body": ":D",
"service": "PENN_MOBILE",
}
response = b2b_client.post(
"/user/notifications/alerts/", json.dumps(payload), content_type="application/json"
)
res_json = json.loads(response.content)
self.assertEqual(2, len(res_json["success_users"]))
self.assertEqual(1, len(res_json["failed_users"]))
# @mock.patch("user.notifications.get_client", mock_client)
# def test_b2b_batch_alert(self):
# self.client.logout()
# b2b_client = get_b2b_client()
# payload = {
# "users": ["user", "user2", "user3"],
# "title": "Test",
# "body": ":D",
# "service": "PENN_MOBILE",
# }
# response = b2b_client.post(
# "/user/notifications/alerts/",
# json.dumps(payload),
# content_type="application/json",
# )
# res_json = json.loads(response.content)
# self.assertEqual(2, len(res_json["success_users"]))
# self.assertEqual(1, len(res_json["failed_users"]))


class TestSendGSRReminders(TestCase):
Expand Down
2 changes: 1 addition & 1 deletion backend/utils/r_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def request(
json=json,
)

if response.status_code != 200:
if not response.ok:
continue

try:
Expand Down

0 comments on commit fe4d367

Please sign in to comment.