Skip to content

Commit

Permalink
Implement a Rate Limiter (#216)
Browse files Browse the repository at this point in the history
### Summary

Resolves #207 

Sibling PR: gt-scheduler/firebase-conf#4

We want to be able to rate limit the `/fetchFriendSchedules` firebase
cloud function endpoint to prevent DOS attacks and increased server
costs. I implemented a client-side Leaky Bucket solution using a request
count stored in the local storage. The counts are updated based on the
last request time when a new request is made. The rate limiter does not
restrict the background calls to the endpoint that syncs any change to
friend schedules. Each second, the request count increases by 1 and is
capped at 10.

Note: This rate limiter does not sync request count across devices due
to the usage of local storage. To do so, we might have to use an extra
collection which might increase costs.

### Checklist

- [x]  /fetchFriendSchedules is rate limited.
- [x] Throttled requests return an error and that error is displayed on
the UI.


### How to Test
- Change the rate limiter capacity and interval in
`src\data\hooks\useRawFriendScheduleDataFromFirebaseFunction.ts` to
something that can be reached by normal page reloads

---------

Co-authored-by: nathangong <[email protected]>
Co-authored-by: Nghi Ho <[email protected]>
Co-authored-by: Hailey Ho <[email protected]>
  • Loading branch information
4 people authored Oct 20, 2023
1 parent a225009 commit ef99fd6
Show file tree
Hide file tree
Showing 2 changed files with 139 additions and 14 deletions.
73 changes: 59 additions & 14 deletions src/data/hooks/useRawFriendScheduleDataFromFirebaseFunction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { useState, useRef } from 'react';
import { Immutable } from 'immer';

import { auth } from '../firebase';
import useRateLimiter from '../../hooks/useRateLimiter';
import { ErrorWithFields, softError } from '../../log';
import { LoadingState } from '../../types';
import {
Expand All @@ -22,6 +23,13 @@ interface HookResult {

const url = `${CLOUD_FUNCTION_BASE_URL}/fetchFriendSchedules`;

export const RATE_LIMITER_BUCKET_STORAGE_KEY =
process.env.NODE_ENV === 'production' && !process.env['REACT_APP_PREVIEW']
? 'rate-limiter-bucket'
: 'rate-limiter-bucket-dev';
const RATE_LIMITER_CAPACITY = 10;
const RATE_LIMITER_INTERVAL_SEC = 10;

// Number of minutes between re-fetches of the friend schedules
const REFRESH_INTERVAL_MIN = 5;

Expand All @@ -45,6 +53,13 @@ export default function useRawFriendScheduleDataFromFirebaseFunction({
type: 'loading',
});

const { hasReachedLimit, refreshBucket, decrementBucketCount } =
useRateLimiter(
RATE_LIMITER_BUCKET_STORAGE_KEY,
RATE_LIMITER_CAPACITY,
RATE_LIMITER_INTERVAL_SEC
);

// Keep a ref of the latest loaded schedules
// to check if it is any newer than the current one.
const loadedFriendScheduleRef = useRef<HookResult | null>(null);
Expand Down Expand Up @@ -190,25 +205,55 @@ export default function useRawFriendScheduleDataFromFirebaseFunction({
}
}

loadAndRefresh().catch((err) => {
softError(
new ErrorWithFields({
message: 'error loading and refreshing friend schedules',
source: err,
fields: {
url,
term: currentTerm,
termFriendData,
},
})
);
});
refreshBucket();
if (hasReachedLimit) {
const err = new ErrorWithFields({
message: 'error loading and refreshing friend schedules',
source: new Error('Exceeded rate limit'),
fields: {
url,
term: currentTerm,
termFriendData,
hasReachedLimit,
},
});
softError(err);
setState({
type: 'error',
error: err,
stillLoading: false,
overview: String(err),
});
} else {
decrementBucketCount();
loadAndRefresh().catch((err) => {
softError(
new ErrorWithFields({
message: 'error loading and refreshing friend schedules',
source: err,
fields: {
url,
term: currentTerm,
termFriendData,
hasReachedLimit,
},
})
);
});
}

// Cancel the background load when this cleans up
return (): void => {
loadOperation.cancel();
};
}, [currentTerm, termFriendData, setState]);
}, [
currentTerm,
termFriendData,
setState,
hasReachedLimit,
refreshBucket,
decrementBucketCount,
]);

// If we are about to start a new background load
// after the term changed, then don't return the already fetched
Expand Down
80 changes: 80 additions & 0 deletions src/hooks/useRateLimiter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import { useMemo, useCallback } from 'react';
import useLocalStorageState from 'use-local-storage-state';

interface RateLimiterBucket {
remainingCount: number;
lastRefreshTime: string | Date;
}

export default function useRateLimiter(
bucketName: string,
capacity: number,
interval: number
): {
hasReachedLimit: boolean;
refreshBucket: () => void;
decrementBucketCount: () => void;
} {
const [bucket, setBucket] = useLocalStorageState<RateLimiterBucket>(
bucketName,
{
defaultValue: {
remainingCount: capacity,
lastRefreshTime: new Date(),
},
storageSync: true,
}
);

const intervalMs = useMemo(() => interval * 1000, [interval]);

const hasReachedLimit = useMemo(() => {
return bucket.remainingCount < 0;
}, [bucket.remainingCount]);

const refreshBucket = useCallback(() => {
setBucket((currBucket) => {
const oldDate = new Date(currBucket.lastRefreshTime);
const newDate = new Date();
const isOldDateInvalid = Number.isNaN(oldDate.valueOf());
if (!isOldDateInvalid) {
const bucketCountAdded = Math.floor(
((newDate.valueOf() - oldDate.valueOf()) / intervalMs) * capacity
);
if (bucketCountAdded > 0) {
return {
remainingCount: Math.min(
capacity,
currBucket.remainingCount +
bucketCountAdded +
(currBucket.remainingCount < 0 ? 1 : 0)
),
lastRefreshTime: newDate,
};
}
}

return {
remainingCount: Math.min(capacity, currBucket.remainingCount),
lastRefreshTime: isOldDateInvalid
? new Date()
: currBucket.lastRefreshTime,
};
});
}, [capacity, intervalMs, setBucket]);

const decrementBucketCount = useCallback(() => {
setBucket((currBucket) => {
return {
...currBucket,
remainingCount: currBucket.remainingCount - 1,
};
});
}, [setBucket]);

return {
hasReachedLimit,
refreshBucket,
decrementBucketCount,
};
}

0 comments on commit ef99fd6

Please sign in to comment.