From 6bd384b01a5aa1754dea5b82cc56d8d321933eca Mon Sep 17 00:00:00 2001 From: infinite-persistence Date: Mon, 10 Jan 2022 16:28:25 +0800 Subject: [PATCH] TUS: retry on 423_locked to try address "failed to upload chunk" ## Background Per developer of `tus-js-client`, it is normal to occasionally encounter upload errors. The auto-retry mechanism is meant to address this. While implementing tab-lock to prevent multiple uploads of the same file, 423_locked was used to detect this scenario. But 423_locked could also mean "the server is busy writing the chunk" (per discussion with Randy), so we kind of disabled the auto-retry mechanism accidentally. Meanwhile, from a prior discussion with Randy, one of the chunk-writing duration took 3 minutes. Our current maximum of "retry after 15s" wouldn't help. ## Change 1. Given that tab-locking was improved recently and no longer reliant on the server error messages (we use secure storage to mark a file as locked), reverted the change to "skip retry on 409/423". This is now back to normal recommended behavior. 2. `tus-js-client` currently does not support variable retry delay, otherwise we could prolong the delay if the error was 423. Since we know it could take up to 3 minutes, and that we don't know if it's file-size dependant, just add another 30s retry and put a friendlier message asking the user to retry themselves after waiting a bit. --- web/setup/publish-v2.js | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/web/setup/publish-v2.js b/web/setup/publish-v2.js index 0e29fa2e8..b8cf56cc7 100644 --- a/web/setup/publish-v2.js +++ b/web/setup/publish-v2.js @@ -60,7 +60,7 @@ export function makeResumableUploadRequest( const uploader = new tus.Upload(file, { ...urlOptions, chunkSize: UPLOAD_CHUNK_SIZE_BYTE, - retryDelays: [0, 5000, 10000, 15000], + retryDelays: [0, 5000, 10000, 15000, 30000], parallelUploads: 1, storeFingerprintForResuming: false, removeFingerprintOnSuccess: true, @@ -73,21 +73,25 @@ export function makeResumableUploadRequest( window.store.dispatch(doUpdateUploadProgress({ guid, status: 'retry' })); const status = err.originalResponse ? err.originalResponse.getStatus() : 0; analytics.error(`tus: retry=${uploader._retryAttempt}, status=${status}`); - return !inStatusCategory(status, 400); + return !inStatusCategory(status, 400) || status === STATUS_CONFLICT || status === STATUS_LOCKED; }, onError: (err) => { const status = err.originalResponse ? err.originalResponse.getStatus() : 0; const errMsg = typeof err === 'string' ? err : err.message; - if (status === STATUS_CONFLICT || status === STATUS_LOCKED || errMsg === 'file currently locked') { + if (status === STATUS_CONFLICT) { window.store.dispatch(doUpdateUploadProgress({ guid, status: 'conflict' })); - // prettier-ignore - reject(new Error(`${status}: concurrent upload detected. Uploading the same file from multiple tabs or windows is not allowed.`)); + reject(new Error(`${status}: concurrent upload detected.`)); } else { + const errToLog = + status === STATUS_LOCKED || errMsg === 'file currently locked' + ? 'File is locked. Try resuming after waiting a few minutes' + : err; + window.store.dispatch(doUpdateUploadProgress({ guid, status: 'error' })); reject( // $FlowFixMe - flow's constructor for Error is incorrect. - new Error(err, { + new Error(errToLog, { cause: { url: uploader.url, status,