diff --git a/Changelog.md b/Changelog.md index eea6cb5340..79fe813aa7 100644 --- a/Changelog.md +++ b/Changelog.md @@ -28,6 +28,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - FIO-6370: Fixes issues with PasswordStrength Addon settings - FIO-7146: formiojs-circleci-to-ghactions - FIO-6859: update-s3-to-accept-headers-from-signer-presign + - FIO 7239: support for AWS S3 Multipart Upload + - FIO-7239: add polyfill and include token in abort and complete requests for multipart upload ## 5.0.0-rc.26 ### Changed diff --git a/package.json b/package.json index 44d0f40782..9de818ead2 100644 --- a/package.json +++ b/package.json @@ -83,6 +83,7 @@ "@formio/core": "1.3.0-rc.16", "@formio/text-mask-addons": "^3.8.0-formio.2", "@formio/vanilla-text-mask": "^5.1.1-formio.1", + "abortcontroller-polyfill": "^1.7.5", "autocompleter": "^8.0.4", "bootstrap": "^5.3.0", "browser-cookies": "^1.2.0", diff --git a/src/Formio.js b/src/Formio.js index e7a9613080..339b8a1715 100644 --- a/src/Formio.js +++ b/src/Formio.js @@ -6,7 +6,7 @@ Formio.Providers = Providers; Formio.version = 'FORMIO_VERSION'; const isNil = (val) => val === null || val === undefined; -Formio.prototype.uploadFile = function(storage, file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, uploadStartCallback, abortCallback) { +Formio.prototype.uploadFile = function(storage, file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, uploadStartCallback, abortCallback, multipartOptions) { const requestArgs = { provider: storage, method: 'upload', @@ -26,7 +26,7 @@ Formio.prototype.uploadFile = function(storage, file, fileName, dir, progressCal if (uploadStartCallback) { uploadStartCallback(); } - return provider.uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback); + return provider.uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback, multipartOptions); } else { throw ('Storage provider not found'); diff --git a/src/Wizard.js b/src/Wizard.js index da8bd99609..b514d19121 100644 --- a/src/Wizard.js +++ b/src/Wizard.js @@ -897,30 +897,27 @@ export default class Wizard extends Webform { } setValue(submission, flags = {}, ignoreEstablishment) { - const changed = this.getPages({ all: true }).reduce((changed, page) => { - return this.setNestedValue(page, submission.data, flags, changed) || changed; - }, false); - - if (!flags.sanitize || + this._submission = submission; + if ( + (flags && flags.fromSubmission && (this.options.readOnly || this.editMode) && !this.isHtmlRenderMode()) || (flags && flags.fromSubmission && (this.prefixComps.length || this.suffixComps.length) && submission._id) || (this.options.server && (this.prefixComps.length || this.suffixComps.length)) ) { - this.mergeData(this.data, submission.data); + this._data = submission.data; } + if (!ignoreEstablishment) { + this.establishPages(submission.data); + } + const changed = this.getPages({ all: true }).reduce((changed, page) => { + return this.setNestedValue(page, submission.data, flags, changed) || changed; + }, false); + if (changed) { this.pageFieldLogic(this.page); } - this.setEditMode(submission); - submission.data = this.data; - this._submission = submission; - - if (!ignoreEstablishment) { - this.establishPages(submission.data); - } - return changed; } diff --git a/src/Wizard.unit.js b/src/Wizard.unit.js index 1956a789d1..549dfcaf10 100644 --- a/src/Wizard.unit.js +++ b/src/Wizard.unit.js @@ -161,7 +161,7 @@ describe('Wizard tests', () => { }, 'Should contain correct submission data'); done(); - }, 500); + }, 200); }, 200); }, 200); }, 200); diff --git a/src/components/file/editForm/File.edit.file.js b/src/components/file/editForm/File.edit.file.js index cc369f0754..e76dba3a0d 100644 --- a/src/components/file/editForm/File.edit.file.js +++ b/src/components/file/editForm/File.edit.file.js @@ -21,6 +21,46 @@ export default [ } } }, + { + type: 'checkbox', + input: true, + key: 'useMultipartUpload', + label: 'Use the S3 Multipart Upload API', + tooltip: "The S3 Multipart Upload API is designed to improve the upload experience for larger objects (> 5GB).", + conditional: { + json: { '===': [{ var: 'data.storage' }, 's3'] } + }, + }, + { + label: 'Multipart Upload', + tableView: false, + key: 'multipart', + type: 'container', + input: true, + components: [ + { + label: 'Part Size (MB)', + applyMaskOn: 'change', + mask: false, + tableView: false, + delimiter: false, + requireDecimal: false, + inputFormat: 'plain', + truncateMultipleSpaces: false, + validate: { + min: 5, + max: 5000, + }, + key: 'partSize', + type: 'number', + input: true, + defaultValue: 500, + }, + ], + conditional: { + json: { '===': [{ var: 'data.useMultipartUpload' }, true] } + }, + }, { type: 'textfield', input: true, diff --git a/src/providers/storage/dropbox.js b/src/providers/storage/dropbox.js index 64122202bc..b1229fd674 100644 --- a/src/providers/storage/dropbox.js +++ b/src/providers/storage/dropbox.js @@ -1,65 +1,67 @@ import { setXhrHeaders } from './xhr'; -const dropbox = (formio) => ({ - uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback) { - return new Promise(((resolve, reject) => { - // Send the file with data. - const xhr = new XMLHttpRequest(); +function dropbox(formio) { + return { + uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback) { + return new Promise(((resolve, reject) => { + // Send the file with data. + const xhr = new XMLHttpRequest(); - if (typeof progressCallback === 'function') { - xhr.upload.onprogress = progressCallback; - } + if (typeof progressCallback === 'function') { + xhr.upload.onprogress = progressCallback; + } - if (typeof abortCallback === 'function') { - abortCallback(() => xhr.abort()); - } + if (typeof abortCallback === 'function') { + abortCallback(() => xhr.abort()); + } - const fd = new FormData(); - fd.append('name', fileName); - fd.append('dir', dir); - fd.append('file', file); + const fd = new FormData(); + fd.append('name', fileName); + fd.append('dir', dir); + fd.append('file', file); - // Fire on network error. - xhr.onerror = (err) => { - err.networkError = true; - reject(err); - }; + // Fire on network error. + xhr.onerror = (err) => { + err.networkError = true; + reject(err); + }; - xhr.onload = () => { - if (xhr.status >= 200 && xhr.status < 300) { - const response = JSON.parse(xhr.response); - response.storage = 'dropbox'; - response.size = file.size; - response.type = file.type; - response.groupId = groupId; - response.groupPermissions = groupPermissions; - response.url = response.path_lower; - resolve(response); - } - else { - reject(xhr.response || 'Unable to upload file'); - } - }; + xhr.onload = () => { + if (xhr.status >= 200 && xhr.status < 300) { + const response = JSON.parse(xhr.response); + response.storage = 'dropbox'; + response.size = file.size; + response.type = file.type; + response.groupId = groupId; + response.groupPermissions = groupPermissions; + response.url = response.path_lower; + resolve(response); + } + else { + reject(xhr.response || 'Unable to upload file'); + } + }; - xhr.onabort = reject; + xhr.onabort = reject; - xhr.open('POST', `${formio.formUrl}/storage/dropbox`); + xhr.open('POST', `${formio.formUrl}/storage/dropbox`); - setXhrHeaders(formio, xhr); + setXhrHeaders(formio, xhr); + const token = formio.getToken(); + if (token) { + xhr.setRequestHeader('x-jwt-token', token); + } + xhr.send(fd); + })); + }, + downloadFile(file) { const token = formio.getToken(); - if (token) { - xhr.setRequestHeader('x-jwt-token', token); - } - xhr.send(fd); - })); - }, - downloadFile(file) { - const token = formio.getToken(); - file.url = - `${formio.formUrl}/storage/dropbox?path_lower=${file.path_lower}${token ? `&x-jwt-token=${token}` : ''}`; - return Promise.resolve(file); - } -}); + file.url = + `${formio.formUrl}/storage/dropbox?path_lower=${file.path_lower}${token ? `&x-jwt-token=${token}` : ''}`; + return Promise.resolve(file); + } + }; +} dropbox.title = 'Dropbox'; export default dropbox; diff --git a/src/providers/storage/s3.js b/src/providers/storage/s3.js index 61fb0a293a..a5ab147dd0 100644 --- a/src/providers/storage/s3.js +++ b/src/providers/storage/s3.js @@ -1,16 +1,49 @@ import XHR from './xhr'; +import { withRetries } from './util'; + +const AbortController = window.AbortController || require('abortcontroller-polyfill/dist/cjs-ponyfill'); function s3(formio) { return { - uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback) { - return XHR.upload(formio, 's3', (xhr, response) => { + async uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback, multipartOptions) { + const xhrCallback = async(xhr, response, abortCallback) => { response.data.fileName = fileName; response.data.key = XHR.path([response.data.key, dir, fileName]); if (response.signed) { - xhr.openAndSetHeaders('PUT', response.signed); - Object.keys(response.data.headers || {}).forEach(key => { - xhr.setRequestHeader(key, response.data.headers[key]); - }); - return file; + if (multipartOptions && Array.isArray(response.signed)) { + // patch abort callback + const abortController = new AbortController(); + const abortSignal = abortController.signal; + if (typeof abortCallback === 'function') { + abortCallback(() => abortController.abort()); + } + try { + const parts = await this.uploadParts( + file, + response.signed, + response.data.headers, + response.partSizeActual, + multipartOptions, + abortSignal + ); + await withRetries(this.completeMultipartUpload, [response, parts, multipartOptions], 3); + return; + } + catch (err) { + // abort in-progress fetch requests + abortController.abort(); + // attempt to cancel the multipart upload + this.abortMultipartUpload(response); + throw err; + } + } + else { + xhr.openAndSetHeaders('PUT', response.signed); + xhr.setRequestHeader('Content-Type', file.type); + Object.keys(response.data.headers).forEach((key) => { + xhr.setRequestHeader(key, response.data.headers[key]); + }); + return file; + } } else { const fd = new FormData(); @@ -21,18 +54,94 @@ function s3(formio) { xhr.openAndSetHeaders('POST', response.url); return fd; } - }, file, fileName, dir, progressCallback, groupPermissions, groupId, abortCallback).then((response) => { - return { - storage: 's3', - name: fileName, - bucket: response.bucket, - key: response.data.key, - url: XHR.path([response.url, response.data.key]), - acl: response.data.acl, - size: file.size, - type: file.type - }; + }; + const response = await XHR.upload( + formio, + 's3', + xhrCallback, + file, + fileName, + dir, + progressCallback, + groupPermissions, + groupId, + abortCallback, + multipartOptions + ); + return { + storage: 's3', + name: fileName, + bucket: response.bucket, + key: response.data.key, + url: XHR.path([response.url, response.data.key]), + acl: response.data.acl, + size: file.size, + type: file.type + }; + }, + async completeMultipartUpload(serverResponse, parts, multipart) { + const { changeMessage } = multipart; + changeMessage('Completing AWS S3 multipart upload...'); + const token = formio.getToken(); + const response = await fetch(`${formio.formUrl}/storage/s3/multipart/complete`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(token ? { 'x-jwt-token': token } : {}) + }, + body: JSON.stringify({ parts, uploadId: serverResponse.uploadId, key: serverResponse.key }) }); + const message = await response.text(); + if (!response.ok) { + throw new Error(message); + } + // the AWS S3 SDK CompleteMultipartUpload command can return a HTTP 200 status header but still error; + // we need to parse, and according to AWS, to retry + if (message.match(/Error/)) { + throw new Error(message); + } + }, + abortMultipartUpload(serverResponse) { + const { uploadId, key } = serverResponse; + const token = formio.getToken(); + fetch(`${formio.formUrl}/storage/s3/multipart/abort`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(token ? { 'x-jwt-token': token } : {}) + }, + body: JSON.stringify({ uploadId, key }) + }).catch((err) => console.error('Error while aborting multipart upload:', err)); + }, + uploadParts(file, urls, headers, partSize, multipart, abortSignal) { + const { changeMessage, progressCallback } = multipart; + changeMessage('Chunking and uploading parts to AWS S3...'); + const promises = []; + for (let i = 0; i < urls.length; i++) { + const start = i * partSize; + const end = (i + 1) * partSize; + const blob = i < urls.length ? file.slice(start, end) : file.slice(start); + const promise = fetch(urls[i], { + method: 'PUT', + headers, + body: blob, + signal: abortSignal, + }).then((res) => { + if (res.ok) { + progressCallback(urls.length); + const eTag = res.headers.get('etag'); + if (!eTag) { + throw new Error('ETag header not found; it must be exposed in S3 bucket CORS settings'); + } + return { ETag: eTag, PartNumber: i + 1 }; + } + else { + throw new Error(`Part no ${i} failed with status ${res.status}`); + } + }); + promises.push(promise); + } + return Promise.all(promises); }, downloadFile(file) { if (file.acl !== 'public-read') { diff --git a/src/providers/storage/s3.unit.js b/src/providers/storage/s3.unit.js new file mode 100644 index 0000000000..a4160170fa --- /dev/null +++ b/src/providers/storage/s3.unit.js @@ -0,0 +1,80 @@ +import assert from 'assert'; +import sinon from 'sinon'; +import fetchMock from 'fetch-mock'; + +import { Formio } from '../../Formio'; +import S3 from './s3'; +import { withRetries } from './util'; + +describe('S3 Provider', () => { + describe('Function Unit Tests', () => { + it('withRetries should retry a given function three times, then throw the provided error', (done) => { + function sleepAndReject(ms) { + return new Promise((_, reject) => setTimeout(reject, ms)); + } + + const spy = sinon.spy(sleepAndReject); + withRetries(spy, [200], 3, 'Custom error message').catch((err) => { + assert.equal(err.message, 'Custom error message'); + assert.equal(spy.callCount, 3); + done(); + }); + }); + }); + + describe('Provider Integration Tests', () => { + describe('AWS S3 Multipart Uploads', () => { + before('Mocks fetch', () => { + fetchMock + .post('https://fakeproject.form.io/fakeform/storage/s3', { + signed: new Array(5).fill('https://fakebucketurl.aws.com/signed'), + minio: false, + url: 'https://fakebucketurl.aws.com', + bucket: 'fakebucket', + uploadId: 'fakeuploadid', + key: 'test.jpg', + partSizeActual: 1, + data: {} + }) + .put('https://fakebucketurl.aws.com/signed', { status: 200, headers: { 'Etag': 'fakeetag' } }) + .post('https://fakeproject.form.io/fakeform/storage/s3/multipart/complete', 200) + .post('https://fakeproject.form.io/fakeform/storage/s3/multipart/abort', 200); + }); + it('Given an array of signed urls it should upload a file to S3 using multipart upload', (done) => { + const mockFormio = { + formUrl: 'https://fakeproject.form.io/fakeform', + getToken: () => {} + }; + const s3 = new S3(mockFormio); + const uploadSpy = sinon.spy(s3, 'uploadParts'); + const completeSpy = sinon.spy(s3, 'completeMultipartUpload'); + + const mockFile = new File(['test!'], 'test.jpg', { type: 'image/jpeg' }); + s3.uploadFile( + mockFile, + 'test.jpg', + '', + () => {}, + '', + {}, + 'test.jpg', + {}, + '', + () => {}, + { partSize: 1, changeMessage: () => {}, progressCallback: () => {} } + ).then((response) => { + assert.equal(response.storage, 's3'); + assert.equal(response.name, 'test.jpg'); + assert.equal(response.bucket, 'fakebucket'); + assert.equal(response.url, 'https://fakebucketurl.aws.com/test.jpg'); + assert.equal(response.acl, undefined); + assert.equal(response.size, 5); + assert.equal(response.type, 'image/jpeg'); + assert.equal(uploadSpy.callCount, 1); + assert.equal(completeSpy.callCount, 1); + done(); + }); + }); + }); + }); +}); diff --git a/src/providers/storage/util.js b/src/providers/storage/util.js new file mode 100644 index 0000000000..f84101648f --- /dev/null +++ b/src/providers/storage/util.js @@ -0,0 +1,6 @@ +export async function withRetries(fn, args, retries = 3, err = null) { + if (!retries) { + throw new Error(err); + } + return fn(...args).catch(() => withRetries(fn, args, retries - 1, err)); +} diff --git a/src/providers/storage/xhr.js b/src/providers/storage/xhr.js index 928d6845b5..51f9c8f24c 100644 --- a/src/providers/storage/xhr.js +++ b/src/providers/storage/xhr.js @@ -21,87 +21,89 @@ const XHR = { path(items) { return items.filter(item => !!item).map(XHR.trim).join('/'); }, - upload(formio, type, xhrCb, file, fileName, dir, progressCallback, groupPermissions, groupId, abortCallback) { - return new Promise(((resolve, reject) => { - // Send the pre response to sign the upload. - const pre = new XMLHttpRequest(); - - // This only fires on a network error. - pre.onerror = (err) => { - err.networkError = true; - reject(err); + async upload(formio, type, xhrCallback, file, fileName, dir, progressCallback, groupPermissions, groupId, abortCallback, multipartOptions) { + // make request to Form.io server + const token = formio.getToken(); + let response; + try { + response = await fetch(`${formio.formUrl}/storage/${type}`, { + method: 'POST', + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json; charset=UTF-8', + ...(token ? { 'x-jwt-token': token } : {}), + }, + body: JSON.stringify({ + name: XHR.path([dir, fileName]), + size: file.size, + type: file.type, + groupPermissions, + groupId, + multipart: multipartOptions + }) + }); + } + catch (err) { + // only throws on network errors + err.networkError = true; + throw err; + } + if (!response.ok) { + const message = await response.text(); + throw new Error(message || 'Unable to sign file.'); + } + const serverResponse = await response.json(); + return await XHR.makeXhrRequest(formio, xhrCallback, serverResponse, progressCallback, abortCallback); + }, + makeXhrRequest(formio, xhrCallback, serverResponse, progressCallback, abortCallback) { + return new Promise((resolve, reject) => { + // Send the file with data. + const xhr = new XMLHttpRequest(); + xhr.openAndSetHeaders = (...params) => { + xhr.open(...params); + setXhrHeaders(formio, xhr); }; + Promise.resolve(xhrCallback(xhr, serverResponse, abortCallback)).then((payload) => { + // if payload is nullish we can assume the provider took care of the entire upload process + if (!payload) { + return resolve(serverResponse); + } + // Fire on network error. + xhr.onerror = (err) => { + err.networkError = true; + reject(err); + }; - pre.onabort = reject; - pre.onload = () => { - if (pre.status >= 200 && pre.status < 300) { - const response = JSON.parse(pre.response); - - // Send the file with data. - const xhr = new XMLHttpRequest(); - - if (typeof progressCallback === 'function') { - xhr.upload.onprogress = progressCallback; - } - - if (typeof abortCallback === 'function') { - abortCallback(() => xhr.abort()); - } - - xhr.openAndSetHeaders = (...params) => { - xhr.open(...params); - setXhrHeaders(formio, xhr); - }; - - // Fire on network error. - xhr.onerror = (err) => { - err.networkError = true; - reject(err); - }; - - // Fire on network abort. - xhr.onabort = (err) => { - err.networkError = true; - reject(err); - }; - - // Fired when the response has made it back from the server. - xhr.onload = () => { - if (xhr.status >= 200 && xhr.status < 300) { - resolve(response); - } - else { - reject(xhr.response || 'Unable to upload file'); - } - }; + // Fire on network abort. + xhr.onabort = (err) => { + err.networkError = true; + reject(err); + }; - // Set the onabort error callback. - xhr.onabort = reject; + // Set the onabort error callback. + xhr.onabort = reject; - // Get the request and send it to the server. - xhr.send(xhrCb(xhr, response)); - } - else { - reject(pre.response || 'Unable to sign file'); + if (typeof progressCallback === 'function') { + xhr.upload.onprogress = progressCallback; } - }; - pre.open('POST', `${formio.formUrl}/storage/${type}`); - pre.setRequestHeader('Accept', 'application/json'); - pre.setRequestHeader('Content-Type', 'application/json; charset=UTF-8'); - const token = formio.getToken(); - if (token) { - pre.setRequestHeader('x-jwt-token', token); - } + if (typeof abortCallback === 'function') { + abortCallback(() => xhr.abort()); + } + // Fired when the response has made it back from the server. + xhr.onload = () => { + if (xhr.status >= 200 && xhr.status < 300) { + resolve(serverResponse); + } + else { + reject(xhr.response || 'Unable to upload file'); + } + }; - pre.send(JSON.stringify({ - name: XHR.path([dir, fileName]), - size: file.size, - type: file.type, - groupPermissions, - groupId, - })); - })); + // Get the request and send it to the server. + xhr.send(payload); + }).catch(reject); + }); } }; diff --git a/yarn.lock b/yarn.lock index d3822a5834..5597f5e203 100644 --- a/yarn.lock +++ b/yarn.lock @@ -792,6 +792,11 @@ abbrev@^1.0.0: resolved "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== +abortcontroller-polyfill@^1.7.5: + version "1.7.5" + resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz#6738495f4e901fbb57b6c0611d0c75f76c485bed" + integrity sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ== + accepts@~1.3.4: version "1.3.8" resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"