From ebb5e5e60d16dde68e23adeed044369ba08dae7c Mon Sep 17 00:00:00 2001 From: dangered wolf Date: Fri, 10 Nov 2023 16:24:47 -0500 Subject: [PATCH] Fix said crash safety --- src/fetch.ts | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/fetch.ts b/src/fetch.ts index 0f3e4d0..4056aab 100644 --- a/src/fetch.ts +++ b/src/fetch.ts @@ -223,20 +223,20 @@ export const twitterFetch = async ( newTokenGenerated = true; continue; } - /* If we've generated a new token, we'll cache it */ - if (c.executionCtx && newTokenGenerated && activate) { - const cachingResponse = new Response(await activate.clone().text(), { - headers: { - ...tokenHeaders, - 'cache-control': `max-age=${Constants.GUEST_TOKEN_MAX_AGE}` - } - }); - console.log('Caching guest token'); - try { + try { + /* If we've generated a new token, we'll cache it */ + if (c.executionCtx && newTokenGenerated && activate) { + const cachingResponse = new Response(await activate.clone().text(), { + headers: { + ...tokenHeaders, + 'cache-control': `max-age=${Constants.GUEST_TOKEN_MAX_AGE}` + } + }); + console.log('Caching guest token'); c.executionCtx.waitUntil(cache.put(guestTokenRequestCacheDummy.clone(), cachingResponse)); - } catch (error) { - console.error((error as Error).stack); } + } catch (error) { + console.error((error as Error).stack); } // @ts-expect-error - We'll pin the guest token to whatever response we have