mirror of
https://github.com/CompeyDev/fxtwitter-docker.git
synced 2025-04-06 02:50:54 +01:00
Add crash safety around executionCtx
This commit is contained in:
parent
1c8c71b0cd
commit
7dac72df79
2 changed files with 23 additions and 10 deletions
|
@ -56,6 +56,7 @@ export const cacheMiddleware = (): MiddlewareHandler => async (c, next) => {
|
||||||
Use waitUntil so you can return the response without blocking on
|
Use waitUntil so you can return the response without blocking on
|
||||||
writing to cache */
|
writing to cache */
|
||||||
try {
|
try {
|
||||||
|
c.executionCtx &&
|
||||||
c.executionCtx.waitUntil(cache.put(cacheKey, response.clone()));
|
c.executionCtx.waitUntil(cache.put(cacheKey, response.clone()));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error((error as Error).stack);
|
console.error((error as Error).stack);
|
||||||
|
|
32
src/fetch.ts
32
src/fetch.ts
|
@ -163,11 +163,15 @@ export const twitterFetch = async (
|
||||||
console.log('Tweet was not found');
|
console.log('Tweet was not found');
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
!useElongator &&
|
try{
|
||||||
c.executionCtx &&
|
!useElongator &&
|
||||||
c.executionCtx.waitUntil(
|
c.executionCtx &&
|
||||||
cache.delete(guestTokenRequestCacheDummy.clone(), { ignoreMethod: true })
|
c.executionCtx.waitUntil(
|
||||||
);
|
cache.delete(guestTokenRequestCacheDummy.clone(), { ignoreMethod: true })
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error((error as Error).stack);
|
||||||
|
}
|
||||||
if (useElongator) {
|
if (useElongator) {
|
||||||
console.log('Elongator request failed, trying again without it');
|
console.log('Elongator request failed, trying again without it');
|
||||||
wasElongatorDisabled = true;
|
wasElongatorDisabled = true;
|
||||||
|
@ -195,10 +199,14 @@ export const twitterFetch = async (
|
||||||
/* Running out of requests within our rate limit, let's purge the cache */
|
/* Running out of requests within our rate limit, let's purge the cache */
|
||||||
if (!useElongator && remainingRateLimit < 10) {
|
if (!useElongator && remainingRateLimit < 10) {
|
||||||
console.log(`Purging token on this edge due to low rate limit remaining`);
|
console.log(`Purging token on this edge due to low rate limit remaining`);
|
||||||
c.executionCtx &&
|
try {
|
||||||
c.executionCtx.waitUntil(
|
c.executionCtx &&
|
||||||
cache.delete(guestTokenRequestCacheDummy.clone(), { ignoreMethod: true })
|
c.executionCtx.waitUntil(
|
||||||
);
|
cache.delete(guestTokenRequestCacheDummy.clone(), { ignoreMethod: true })
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error((error as Error).stack);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!validateFunction(response)) {
|
if (!validateFunction(response)) {
|
||||||
|
@ -224,7 +232,11 @@ export const twitterFetch = async (
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
console.log('Caching guest token');
|
console.log('Caching guest token');
|
||||||
c.executionCtx.waitUntil(cache.put(guestTokenRequestCacheDummy.clone(), cachingResponse));
|
try {
|
||||||
|
c.executionCtx.waitUntil(cache.put(guestTokenRequestCacheDummy.clone(), cachingResponse));
|
||||||
|
} catch (error) {
|
||||||
|
console.error((error as Error).stack);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// @ts-expect-error - We'll pin the guest token to whatever response we have
|
// @ts-expect-error - We'll pin the guest token to whatever response we have
|
||||||
|
|
Loading…
Add table
Reference in a new issue