Mirror: The highly customizable and versatile GraphQL client with which you add on features like normalized caching as you grow.
at main 236 lines 7.9 kB view raw
1import { 2 makeSubject, 3 pipe, 4 merge, 5 filter, 6 fromValue, 7 debounce, 8 mergeMap, 9 takeUntil, 10} from 'wonka'; 11 12import type { Exchange, Operation, CombinedError } from '@urql/core'; 13import { makeOperation } from '@urql/core'; 14 15/** Input parameters for the {@link retryExchange}. */ 16export interface RetryExchangeOptions { 17 /** Specify the minimum time to wait until retrying. 18 * 19 * @remarks 20 * `initialDelayMs` specifies the minimum time (in milliseconds) to wait 21 * until a failed operation is retried. 22 * 23 * @defaultValue `1_000` - one second 24 */ 25 initialDelayMs?: number; 26 /** Specifies the maximum time to wait until retrying. 27 * 28 * @remarks 29 * `maxDelayMs` specifies the maximum time (in milliseconds) to wait 30 * until a failed operation is retried. While `initialDelayMs` 31 * specifies the minimum amount of time, `randomDelay` may cause 32 * the delay to increase over multiple attempts. 33 * 34 * @defaultValue `15_000` - 15 seconds 35 */ 36 maxDelayMs?: number; 37 /** Enables a random exponential backoff to increase the delay over multiple retries. 38 * 39 * @remarks 40 * `randomDelay`, unless disabled, increases the time until a failed 41 * operation is retried over multiple attempts. It increases the time 42 * starting at `initialDelayMs` by 1.5x with an added factor of 0–1, 43 * until `maxDelayMs` is reached. 44 * 45 * @defaultValue `true` - enables random exponential backoff 46 */ 47 randomDelay?: boolean; 48 /** Specifies the maximum number of attempts, including the initial request. 49 * 50 * @remarks 51 * `maxNumberAttempts` defines the total number of attempts before it's 52 * considered failed. 53 * 54 * @defaultValue `2` - Retry once, i.e. two attempts 55 */ 56 maxNumberAttempts?: number; 57 /** Predicate allowing you to selectively not retry `Operation`s. 58 * 59 * @remarks 60 * `retryIf` is called with a {@link CombinedError} and the {@link Operation} that 61 * failed. If this function returns false the failed `Operation` is not retried. 62 * 63 * @defaultValue `(error) => !!error.networkError` - retries only on network errors. 64 */ 65 retryIf?(error: CombinedError, operation: Operation): boolean; 66 /** Transform function allowing you to selectively replace a retried `Operation` or return nullish value. 67 * 68 * @remarks 69 * `retryWhen` is called with a {@link CombinedError} and the {@link Operation} that 70 * failed. If this function returns an `Operation`, `retryExchange` will replace the 71 * failed `Operation` and retry. It won't retry the `Operation` if a nullish value 72 * is returned. 73 * 74 * The `retryIf` function, if defined, takes precedence and overrides this option. 75 */ 76 retryWith?( 77 error: CombinedError, 78 operation: Operation 79 ): Operation | null | undefined; 80} 81 82interface RetryState { 83 count: number; 84 delay: number | null; 85} 86 87/** Exchange factory that retries failed operations. 88 * 89 * @param options - A {@link RetriesExchangeOptions} configuration object. 90 * @returns the created retry {@link Exchange}. 91 * 92 * @remarks 93 * The `retryExchange` retries failed operations with specified delays 94 * and exponential backoff. 95 * 96 * You may define a {@link RetryExchangeOptions.retryIf} or 97 * {@link RetryExchangeOptions.retryWhen} function to only retry 98 * certain kinds of operations, e.g. only queries. 99 * 100 * @example 101 * ```ts 102 * retryExchange({ 103 * initialDelayMs: 1000, 104 * maxDelayMs: 15000, 105 * randomDelay: true, 106 * maxNumberAttempts: 2, 107 * retryIf: err => err && err.networkError, 108 * }); 109 * ``` 110 */ 111export const retryExchange = (options: RetryExchangeOptions = {}): Exchange => { 112 const { retryIf, retryWith } = options; 113 const MIN_DELAY = options.initialDelayMs || 1000; 114 const MAX_DELAY = options.maxDelayMs || 15_000; 115 const MAX_ATTEMPTS = options.maxNumberAttempts || 2; 116 const RANDOM_DELAY = 117 options.randomDelay != null ? !!options.randomDelay : true; 118 119 return ({ forward, dispatchDebug }) => 120 operations$ => { 121 const { source: retry$, next: nextRetryOperation } = 122 makeSubject<Operation>(); 123 124 const retryWithBackoff$ = pipe( 125 retry$, 126 mergeMap((operation: Operation) => { 127 const retry: RetryState = operation.context.retry || { 128 count: 0, 129 delay: null, 130 }; 131 132 const retryCount = ++retry.count; 133 let delayAmount = retry.delay || MIN_DELAY; 134 135 const backoffFactor = Math.random() + 1.5; 136 if (RANDOM_DELAY) { 137 // if randomDelay is enabled and it won't exceed the max delay, apply a random 138 // amount to the delay to avoid thundering herd problem 139 if (delayAmount * backoffFactor < MAX_DELAY) { 140 delayAmount *= backoffFactor; 141 } else { 142 delayAmount = MAX_DELAY; 143 } 144 } else { 145 // otherwise, increase the delay proportionately by the initial delay 146 delayAmount = Math.min(retryCount * MIN_DELAY, MAX_DELAY); 147 } 148 149 // ensure the delay is carried over to the next context 150 retry.delay = delayAmount; 151 152 // We stop the retries if a teardown event for this operation comes in 153 // But if this event comes through regularly we also stop the retries, since it's 154 // basically the query retrying itself, no backoff should be added! 155 const teardown$ = pipe( 156 operations$, 157 filter(op => { 158 return ( 159 (op.kind === 'query' || op.kind === 'teardown') && 160 op.key === operation.key 161 ); 162 }) 163 ); 164 165 dispatchDebug({ 166 type: 'retryAttempt', 167 message: `The operation has failed and a retry has been triggered (${retryCount} / ${MAX_ATTEMPTS})`, 168 operation, 169 data: { 170 retryCount, 171 delayAmount, 172 }, 173 }); 174 175 // Add new retryDelay and retryCount to operation 176 return pipe( 177 fromValue( 178 makeOperation(operation.kind, operation, { 179 ...operation.context, 180 retry, 181 }) 182 ), 183 debounce(() => delayAmount), 184 // Stop retry if a teardown comes in 185 takeUntil(teardown$) 186 ); 187 }) 188 ); 189 190 return pipe( 191 merge([operations$, retryWithBackoff$]), 192 forward, 193 filter(res => { 194 const retry = res.operation.context.retry as RetryState | undefined; 195 // Only retry if the error passes the conditional retryIf function (if passed) 196 // or if the error contains a networkError 197 if ( 198 !res.error || 199 (retryIf 200 ? !retryIf(res.error, res.operation) 201 : !retryWith && !res.error.networkError) 202 ) { 203 // Reset the delay state for a successful operation 204 if (retry) { 205 retry.count = 0; 206 retry.delay = null; 207 } 208 return true; 209 } 210 211 const maxNumberAttemptsExceeded = 212 ((retry && retry.count) || 0) >= MAX_ATTEMPTS - 1; 213 if (!maxNumberAttemptsExceeded) { 214 const operation = retryWith 215 ? retryWith(res.error, res.operation) 216 : res.operation; 217 if (!operation) return true; 218 219 // Send failed responses to be retried by calling next on the retry$ subject 220 // Exclude operations that have been retried more than the specified max 221 nextRetryOperation(operation); 222 return false; 223 } 224 225 dispatchDebug({ 226 type: 'retryExhausted', 227 message: 228 'Maximum number of retries has been reached. No further retries will be performed.', 229 operation: res.operation, 230 }); 231 232 return true; 233 }) 234 ); 235 }; 236};