Merge pull request #38 from dangeredwolf/cache-guest-token

Implement Cloudflare guest token caching
This commit is contained in:
dangered wolf 2022-08-18 11:12:56 -04:00 committed by GitHub
commit eea9b82622
Signed by: DevComp
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 63 additions and 22 deletions

View file

@ -138,9 +138,10 @@ const populateTweetProperties = async (
available for free using api.fxtwitter.com. */
export const statusAPI = async (
status: string,
language: string | undefined
language: string | undefined,
event: FetchEvent
): Promise<APIResponse> => {
const conversation = await fetchUsingGuest(status);
const conversation = await fetchUsingGuest(status, event);
const tweet = conversation?.globalObjects?.tweets?.[status] || {};
/* Fallback for if Tweet did not load */

View file

@ -1,8 +1,11 @@
import { Constants } from './constants';
export const fetchUsingGuest = async (status: string): Promise<TimelineBlobPartial> => {
export const fetchUsingGuest = async (
status: string,
event: FetchEvent
): Promise<TimelineBlobPartial> => {
let apiAttempts = 0;
let cachedTokenFailed = false;
let newTokenGenerated = false;
const tokenHeaders: { [header: string]: string } = {
Authorization: Constants.GUEST_BEARER_TOKEN,
@ -16,12 +19,25 @@ export const fetchUsingGuest = async (status: string): Promise<TimelineBlobParti
headers: tokenHeaders,
cf: {
cacheEverything: true,
cacheTtl: 600
cacheTtl: 300
},
body: ''
}
);
/* A dummy version of the request only used for Cloudflare caching purposes.
The reason it exists at all is because Cloudflare won't cache POST requests. */
const guestTokenRequestCacheDummy = new Request(
`${Constants.TWITTER_API_ROOT}/1.1/guest/activate.json`,
{
method: 'GET',
cf: {
cacheEverything: true,
cacheTtl: 300
}
}
);
const cache = caches.default;
while (apiAttempts < 10) {
@ -41,19 +57,19 @@ export const fetchUsingGuest = async (status: string): Promise<TimelineBlobParti
let activate: Response | null = null;
if (!cachedTokenFailed) {
const cachedResponse = await cache.match(guestTokenRequest);
if (!newTokenGenerated) {
const cachedResponse = await cache.match(guestTokenRequestCacheDummy);
if (cachedResponse) {
console.log('Token cache hit');
activate = cachedResponse;
} else {
console.log('Token cache miss');
newTokenGenerated = true;
}
console.log('Token cache miss');
cachedTokenFailed = true;
}
if (cachedTokenFailed || activate === null) {
if (newTokenGenerated || activate === null) {
/* If all goes according to plan, we have a guest token we can use to call API
AFAIK there is no limit to how many guest tokens you can request.
@ -66,14 +82,14 @@ export const fetchUsingGuest = async (status: string): Promise<TimelineBlobParti
let activateJson: { guest_token: string };
try {
activateJson = (await activate.json()) as { guest_token: string };
activateJson = (await activate.clone().json()) as { guest_token: string };
} catch (e: unknown) {
continue;
}
const guestToken = activateJson.guest_token;
console.log('Activated guest:', activateJson);
console.log(newTokenGenerated ? 'Activated guest:' : 'Using guest:', activateJson);
console.log('Guest token:', guestToken);
/* Just some cookies to mimick what the Twitter Web App would send */
@ -107,10 +123,21 @@ export const fetchUsingGuest = async (status: string): Promise<TimelineBlobParti
/* We'll usually only hit this if we get an invalid response from Twitter.
It's uncommon, but it happens */
console.error('Unknown error while fetching conversation from API');
cachedTokenFailed = true;
event && event.waitUntil(cache.delete(guestTokenRequestCacheDummy));
newTokenGenerated = true;
continue;
}
const remainingRateLimit = parseInt(
apiRequest.headers.get('x-rate-limit-remaining') || '0'
);
console.log(`Remaining rate limit: ${remainingRateLimit} requests`);
/* Running out of requests within our rate limit, let's purge the cache */
if (remainingRateLimit < 20) {
console.log(`Purging token on this edge due to low rate limit remaining`);
event && event.waitUntil(cache.delete(guestTokenRequestCacheDummy));
}
if (
typeof conversation.globalObjects === 'undefined' &&
(typeof conversation.errors === 'undefined' ||
@ -118,12 +145,20 @@ export const fetchUsingGuest = async (status: string): Promise<TimelineBlobParti
239) /* TODO: i forgot what code 239 actually is lol */
) {
console.log('Failed to fetch conversation, got', conversation);
cachedTokenFailed = true;
newTokenGenerated = true;
continue;
}
/* Once we've confirmed we have a working guest token, let's cache it! */
// event.waitUntil(cache.put(guestTokenRequest, activate.clone()));
/* If we've generated a new token, we'll cache it */
if (event && newTokenGenerated) {
const cachingResponse = new Response(await activate.clone().text(), {
headers: {
...tokenHeaders,
'cache-control': 'max-age=300'
}
});
console.log('Caching guest token');
event.waitUntil(cache.put(guestTokenRequestCacheDummy, cachingResponse));
}
conversation.guestToken = guestToken;
return conversation;
}

View file

@ -90,7 +90,8 @@ const statusRequest = async (
mediaNumber ? parseInt(mediaNumber) : undefined,
userAgent,
flags,
language
language,
event
);
/* Complete responses are normally sent just by errors. Normal embeds send a `text` value. */

View file

@ -24,12 +24,13 @@ export const handleStatus = async (
mediaNumber?: number,
userAgent?: string,
flags?: InputFlags,
language?: string
language?: string,
event?: FetchEvent
// eslint-disable-next-line sonarjs/cognitive-complexity
): Promise<StatusResponse> => {
console.log('Direct?', flags?.direct);
const api = await statusAPI(status, language);
const api = await statusAPI(status, language, event as FetchEvent);
const tweet = api?.tweet as APITweet;
/* Catch this request if it's an API response */

View file

@ -5,4 +5,7 @@ compatibility_date = "2022-08-17"
send_metrics = false
[build]
command = "npm run build"
command = "npm run build"
[miniflare.globals]
TEST = "true" # Will have unicode character errors in headers if not set to true