diff --git a/packages/upload/README.md b/packages/upload/README.md
index b9ea1899ef..23e5f7c57f 100644
--- a/packages/upload/README.md
+++ b/packages/upload/README.md
@@ -28,6 +28,29 @@ Once installed, import the component in your application:
import '@vaadin/upload';
```
+## Performance Considerations
+
+When uploading large numbers of files, the component automatically throttles concurrent uploads to prevent browser performance degradation. By default, a maximum of 3 files are uploaded simultaneously, with additional files queued automatically.
+
+You can customize this limit using the `max-concurrent-uploads` attribute:
+
+```html
+
+
+```
+
+```js
+// Or set it programmatically
+upload.maxConcurrentUploads = 5;
+```
+
+This helps prevent:
+- Browser XHR limitations (failures when uploading 2000+ files simultaneously)
+- Performance degradation with hundreds of concurrent uploads
+- Network congestion on slower connections
+
+The default value of 3 balances upload performance with network resource conservation.
+
## Contributing
Read the [contributing guide](https://vaadin.com/docs/latest/contributing) to learn about our development process, how to propose bugfixes and improvements, and how to test your changes to Vaadin components.
diff --git a/packages/upload/src/vaadin-upload-mixin.d.ts b/packages/upload/src/vaadin-upload-mixin.d.ts
index 477b0c59d7..bc9a4b589d 100644
--- a/packages/upload/src/vaadin-upload-mixin.d.ts
+++ b/packages/upload/src/vaadin-upload-mixin.d.ts
@@ -205,6 +205,16 @@ export declare class UploadMixinClass {
*/
uploadFormat: UploadFormat;
+ /**
+ * Specifies the maximum number of files that can be uploaded simultaneously.
+ * This helps prevent browser performance degradation and XHR limitations when
+ * uploading large numbers of files. Files exceeding this limit will be queued
+ * and uploaded as active uploads complete.
+ * @attr {number} max-concurrent-uploads
+ * @default 3
+ */
+ maxConcurrentUploads: number;
+
/**
* The object used to localize this component. To change the default
* localization, replace this with an object that provides all properties, or
diff --git a/packages/upload/src/vaadin-upload-mixin.js b/packages/upload/src/vaadin-upload-mixin.js
index bc0b6384c1..a58c51a836 100644
--- a/packages/upload/src/vaadin-upload-mixin.js
+++ b/packages/upload/src/vaadin-upload-mixin.js
@@ -322,6 +322,20 @@ export const UploadMixin = (superClass) =>
value: 'raw',
},
+ /**
+ * Specifies the maximum number of files that can be uploaded simultaneously.
+ * This helps prevent browser performance degradation and XHR limitations when
+ * uploading large numbers of files. Files exceeding this limit will be queued
+ * and uploaded as active uploads complete.
+ * @attr {number} max-concurrent-uploads
+ * @type {number}
+ */
+ maxConcurrentUploads: {
+ type: Number,
+ value: 3,
+ sync: true,
+ },
+
/**
* Pass-through to input's capture attribute. Allows user to trigger device inputs
* such as camera or microphone immediately.
@@ -347,6 +361,18 @@ export const UploadMixin = (superClass) =>
_files: {
type: Array,
},
+
+ /** @private */
+ _uploadQueue: {
+ type: Array,
+ value: () => [],
+ },
+
+ /** @private */
+ _activeUploads: {
+ type: Number,
+ value: 0,
+ },
};
}
@@ -698,12 +724,48 @@ export const UploadMixin = (superClass) =>
Array.prototype.forEach.call(files, this._uploadFile.bind(this));
}
+ /**
+ * Process the upload queue by starting uploads for queued files
+ * if there is available capacity.
+ * @private
+ */
+ _processQueue() {
+ // Process as many queued files as we have capacity for
+ while (this._uploadQueue.length > 0 && this._activeUploads < this.maxConcurrentUploads) {
+ const nextFile = this._uploadQueue.shift();
+ if (nextFile && !nextFile.complete && !nextFile.uploading) {
+ this._uploadFile(nextFile);
+ }
+ }
+ }
+
/** @private */
_uploadFile(file) {
if (file.uploading) {
return;
}
+ // Check if we've reached the concurrent upload limit
+ if (this._activeUploads >= this.maxConcurrentUploads) {
+ // Add to queue if not already queued
+ if (!this._uploadQueue.includes(file)) {
+ this._uploadQueue.push(file);
+ file.held = true;
+ file.status = this.__effectiveI18n.uploading.status.held;
+ this._renderFileList();
+ }
+ return;
+ }
+
+ // Remove from queue if it was queued
+ const queueIndex = this._uploadQueue.indexOf(file);
+ if (queueIndex >= 0) {
+ this._uploadQueue.splice(queueIndex, 1);
+ }
+
+ // Increment active uploads counter
+ this._activeUploads += 1;
+
const ini = Date.now();
const xhr = (file.xhr = this._createXhr());
@@ -745,7 +807,13 @@ export const UploadMixin = (superClass) =>
if (xhr.readyState === 4) {
clearTimeout(stalledId);
file.indeterminate = file.uploading = false;
+
+ // Decrement active uploads counter
+ this._activeUploads -= 1;
+
if (file.abort) {
+ // Process queue even on abort
+ this._processQueue();
return;
}
file.status = '';
@@ -759,6 +827,8 @@ export const UploadMixin = (superClass) =>
);
if (!evt) {
+ // Process queue even if event was cancelled
+ this._processQueue();
return;
}
if (xhr.status === 0) {
@@ -776,6 +846,9 @@ export const UploadMixin = (superClass) =>
}),
);
this._renderFileList();
+
+ // Process the queue to start the next upload
+ this._processQueue();
}
};
@@ -877,10 +950,24 @@ export const UploadMixin = (superClass) =>
);
if (evt) {
file.abort = true;
+
+ // Remove from queue if it was queued
+ const queueIndex = this._uploadQueue.indexOf(file);
+ if (queueIndex >= 0) {
+ this._uploadQueue.splice(queueIndex, 1);
+ }
+
+ // Decrement active uploads if file was uploading
+ if (file.uploading) {
+ this._activeUploads -= 1;
+ }
+
if (file.xhr) {
file.xhr.abort();
}
this._removeFile(file);
+ // Process the queue to start the next upload
+ this._processQueue();
}
}
diff --git a/packages/upload/test/adding-files.test.js b/packages/upload/test/adding-files.test.js
index 8e2787e142..3a279b9eff 100644
--- a/packages/upload/test/adding-files.test.js
+++ b/packages/upload/test/adding-files.test.js
@@ -19,7 +19,7 @@ describe('adding files', () => {
beforeEach(async () => {
upload = fixtureSync(``);
- upload.target = 'http://foo.com/bar';
+ upload.target = 'https://foo.com/bar';
upload._createXhr = xhrCreator({ size: testFileSize, uploadTime: 200, stepTime: 50 });
await nextRender();
files = createFiles(2, testFileSize, 'application/x-octet-stream');
@@ -332,12 +332,17 @@ describe('adding files', () => {
describe('start upload', () => {
it('should automatically start upload', () => {
+ upload.maxConcurrentUploads = 1;
const uploadStartSpy = sinon.spy();
upload.addEventListener('upload-start', uploadStartSpy);
files.forEach(upload._addFile.bind(upload));
- expect(uploadStartSpy.calledTwice).to.be.true;
- expect(upload.files[0].held).to.be.false;
+ // With queue behavior, only the first file starts uploading immediately
+ expect(uploadStartSpy.calledOnce).to.be.true;
+ // Files are prepended, so the first file added is at index 1
+ expect(upload.files[1].held).to.be.false;
+ // Second file (at index 0) should be held in queue
+ expect(upload.files[0].held).to.be.true;
});
it('should not automatically start upload when noAuto flag is set', () => {
diff --git a/packages/upload/test/concurrent-uploads.test.js b/packages/upload/test/concurrent-uploads.test.js
new file mode 100644
index 0000000000..c5769c48be
--- /dev/null
+++ b/packages/upload/test/concurrent-uploads.test.js
@@ -0,0 +1,415 @@
+import { expect } from '@vaadin/chai-plugins';
+import { fixtureSync, nextRender } from '@vaadin/testing-helpers';
+import sinon from 'sinon';
+import '../src/vaadin-upload.js';
+import { createFiles, xhrCreator } from './helpers.js';
+
+describe('concurrent uploads', () => {
+ let upload;
+
+ beforeEach(async () => {
+ upload = fixtureSync(``);
+ upload.target = 'https://foo.com/bar';
+ await nextRender();
+ });
+
+ describe('maxConcurrentUploads property', () => {
+ it('should have default value of 3', () => {
+ expect(upload.maxConcurrentUploads).to.equal(3);
+ });
+
+ it('should accept custom value', () => {
+ upload.maxConcurrentUploads = 5;
+ expect(upload.maxConcurrentUploads).to.equal(5);
+ });
+
+ it('should accept Infinity for unlimited uploads', () => {
+ upload.maxConcurrentUploads = Infinity;
+ expect(upload.maxConcurrentUploads).to.equal(Infinity);
+ });
+ });
+
+ describe('upload queue management', () => {
+ let clock;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should track active uploads count', async () => {
+ const files = createFiles(3, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ expect(upload._activeUploads).to.equal(0);
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ });
+
+ it('should queue files when exceeding concurrent limit', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(3);
+ });
+
+ it('should show queued status for files in queue', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ // First 2 files should be uploading
+ expect(files[0].uploading).to.be.true;
+ expect(files[1].uploading).to.be.true;
+
+ // Remaining files should be queued
+ expect(files[2].held).to.be.true;
+ expect(files[2].status).to.equal(upload.i18n.uploading.status.held);
+ expect(files[3].held).to.be.true;
+ expect(files[4].held).to.be.true;
+ });
+
+ it('should process queue as uploads complete', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(3);
+
+ // Wait for first uploads to complete
+ await clock.tickAsync(250);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(1);
+
+ // Wait for next batch to complete
+ await clock.tickAsync(250);
+
+ expect(upload._activeUploads).to.equal(1);
+ expect(upload._uploadQueue.length).to.equal(0);
+ });
+
+ it('should handle all uploads completing', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+
+ // Wait for all uploads to complete
+ await clock.tickAsync(1000);
+
+ expect(upload._activeUploads).to.equal(0);
+ expect(upload._uploadQueue.length).to.equal(0);
+ files.forEach((file) => {
+ expect(file.complete).to.be.true;
+ });
+ });
+
+ it('should work with manual upload mode', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.noAuto = true;
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(0);
+ expect(upload._uploadQueue.length).to.equal(0);
+
+ // Start uploads manually
+ upload.uploadFiles();
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(3);
+ });
+ });
+
+ describe('upload queue with abort', () => {
+ let clock;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should remove file from queue when aborted', async () => {
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._uploadQueue.length).to.equal(3);
+
+ // Abort a queued file
+ upload._abortFileUpload(files[3]);
+ await clock.tickAsync(1);
+
+ expect(upload._uploadQueue.length).to.equal(2);
+ expect(upload._uploadQueue.includes(files[3])).to.be.false;
+ });
+
+ it('should process queue after file is aborted', async () => {
+ const files = createFiles(4, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ const initialActive = upload._activeUploads;
+ const initialQueued = upload._uploadQueue.length;
+
+ expect(initialActive).to.equal(2);
+ expect(initialQueued).to.equal(2);
+
+ // Abort a queued file (not an active upload)
+ upload._abortFileUpload(files[3]);
+ await clock.tickAsync(1);
+
+ // File should be removed from queue
+ expect(upload._uploadQueue.length).to.equal(initialQueued - 1);
+ });
+ });
+
+ describe('upload queue with errors', () => {
+ let clock;
+
+ beforeEach(() => {
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should process queue when upload fails', async () => {
+ upload._createXhr = xhrCreator({
+ size: 100,
+ uploadTime: 100,
+ stepTime: 25,
+ serverTime: 10,
+ serverValidation: () => ({ status: 500, statusText: 'Error' }),
+ });
+
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(3);
+
+ // Wait for first 2 uploads to fail (uploadTime + stepTime + serverTime = 100 + 25 + 10 = 135ms)
+ await clock.tickAsync(150);
+
+ // After first 2 fail, next 2 should start from queue
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(1);
+ });
+
+ it('should handle response event cancellation', async () => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+
+ const files = createFiles(5, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload.addEventListener('upload-response', (e) => {
+ e.preventDefault();
+ });
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+
+ // Wait for uploads to reach completion state
+ await clock.tickAsync(250);
+
+ // When response is prevented, files stay in uploading state
+ // but queue should still be processed once xhr completes
+ expect(upload._activeUploads).to.be.greaterThan(0);
+ });
+ });
+
+ describe('unlimited concurrent uploads', () => {
+ let clock;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should allow unlimited uploads when maxConcurrentUploads is Infinity', async () => {
+ const files = createFiles(20, 100, 'application/json');
+ upload.maxConcurrentUploads = Infinity;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(20);
+ expect(upload._uploadQueue.length).to.equal(0);
+ });
+ });
+
+ describe('dynamic maxConcurrentUploads change', () => {
+ let clock;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should respect new limit when increased during uploads', async () => {
+ const files = createFiles(10, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+ expect(upload._uploadQueue.length).to.equal(8);
+
+ // Increase limit
+ upload.maxConcurrentUploads = 5;
+
+ // Manually process queue with new limit
+ upload._processQueue();
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(5);
+ expect(upload._uploadQueue.length).to.equal(5);
+ });
+ });
+
+ describe('retry with queue', () => {
+ let clock;
+
+ beforeEach(() => {
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should handle retry of failed file with queue', async () => {
+ upload._createXhr = xhrCreator({
+ size: 100,
+ serverValidation: () => ({ status: 500, statusText: 'Error' }),
+ });
+
+ const files = createFiles(3, 100, 'application/json');
+ upload.maxConcurrentUploads = 2;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(2);
+
+ // Wait for uploads to fail
+ await clock.tickAsync(100);
+
+ // Replace XHR creator with successful one
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+
+ // Retry first file
+ upload._retryFileUpload(files[0]);
+ await clock.tickAsync(10);
+
+ // Should respect concurrent limit
+ expect(upload._activeUploads).to.be.lte(upload.maxConcurrentUploads);
+ });
+ });
+
+ describe('edge cases', () => {
+ let clock;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: 100, uploadTime: 200, stepTime: 50 });
+ clock = sinon.useFakeTimers({
+ shouldClearNativeTimers: true,
+ });
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should handle single file with limit of 1', async () => {
+ const files = createFiles(1, 100, 'application/json');
+ upload.maxConcurrentUploads = 1;
+
+ upload._addFiles(files);
+ await clock.tickAsync(10);
+
+ expect(upload._activeUploads).to.equal(1);
+ expect(upload._uploadQueue.length).to.equal(0);
+ });
+
+ it('should handle zero files', () => {
+ upload.maxConcurrentUploads = 5;
+
+ expect(upload._activeUploads).to.equal(0);
+ expect(upload._uploadQueue.length).to.equal(0);
+ });
+
+ it('should not start upload if already uploading', async () => {
+ const files = createFiles(1, 100, 'application/json');
+ upload.maxConcurrentUploads = 1;
+
+ upload._uploadFile(files[0]);
+ await clock.tickAsync(10);
+
+ const initialActiveCount = upload._activeUploads;
+
+ // Try to upload same file again
+ upload._uploadFile(files[0]);
+ await clock.tickAsync(10);
+
+ // Should not increase active count
+ expect(upload._activeUploads).to.equal(initialActiveCount);
+ });
+ });
+});
diff --git a/packages/upload/test/upload.test.js b/packages/upload/test/upload.test.js
index 2d6c8567d4..b9642566cc 100644
--- a/packages/upload/test/upload.test.js
+++ b/packages/upload/test/upload.test.js
@@ -9,7 +9,7 @@ describe('upload', () => {
beforeEach(async () => {
upload = fixtureSync(``);
- upload.target = 'http://foo.com/bar';
+ upload.target = 'https://foo.com/bar';
file = createFile(100000, 'application/unknown');
await nextRender();
});
@@ -443,16 +443,21 @@ describe('upload', () => {
upload.files.forEach((file) => {
expect(file.uploading).not.to.be.ok;
});
+ let firstUploadStartFired = false;
upload.addEventListener('upload-start', (e) => {
- expect(e.detail.xhr).to.be.ok;
- expect(e.detail.file).to.be.ok;
- expect(e.detail.file.name).to.equal(tempFileName);
- expect(e.detail.file.uploading).to.be.ok;
+ if (!firstUploadStartFired) {
+ firstUploadStartFired = true;
+ expect(e.detail.xhr).to.be.ok;
+ expect(e.detail.file).to.be.ok;
+ expect(e.detail.file.name).to.equal(tempFileName);
+ expect(e.detail.file.uploading).to.be.ok;
- for (let i = 0; i < upload.files.length - 1; i++) {
- expect(upload.files[i].uploading).not.to.be.ok;
+ for (let i = 0; i < upload.files.length - 1; i++) {
+ expect(upload.files[i].uploading).not.to.be.ok;
+ }
+ done();
}
- done();
+ // With queue behavior, other files will start after the first completes - ignore those events
});
upload.uploadFiles([upload.files[2]]);
});
@@ -545,6 +550,142 @@ describe('upload', () => {
});
});
+ describe('Upload Queue', () => {
+ let clock, files;
+
+ beforeEach(() => {
+ upload._createXhr = xhrCreator({ size: file.size, uploadTime: 200, stepTime: 50 });
+ upload.maxConcurrentUploads = 1;
+ clock = sinon.useFakeTimers();
+ });
+
+ afterEach(() => {
+ clock.restore();
+ });
+
+ it('should upload multiple files one at a time', async () => {
+ files = createFiles(3, 512, 'application/json');
+ upload._addFiles(files);
+
+ // Files are prepended, so files[0] is at index 2, files[1] at index 1, files[2] at index 0
+ // First file added (files[0]) should start uploading
+ await clock.tickAsync(10);
+ expect(upload.files[2].uploading).to.be.true;
+ expect(upload.files[2].held).to.be.false;
+ expect(upload.files[1].held).to.be.true;
+ expect(upload.files[0].held).to.be.true;
+
+ // Wait for first file to complete (connectTime + uploadTime + serverTime = 10 + 200 + 10 = 220ms)
+ await clock.tickAsync(220);
+ expect(upload.files[2].complete).to.be.true;
+ expect(upload.files[2].uploading).to.be.false;
+
+ // Second file (files[1]) should now start uploading
+ await clock.tickAsync(10);
+ expect(upload.files[1].uploading).to.be.true;
+ expect(upload.files[1].held).to.be.false;
+ expect(upload.files[0].held).to.be.true;
+
+ // Wait for second file to complete
+ await clock.tickAsync(220);
+ expect(upload.files[1].complete).to.be.true;
+ expect(upload.files[1].uploading).to.be.false;
+
+ // Third file (files[2]) should now start uploading
+ await clock.tickAsync(10);
+ expect(upload.files[0].uploading).to.be.true;
+ expect(upload.files[0].held).to.be.false;
+
+ // Wait for third file to complete
+ await clock.tickAsync(220);
+ expect(upload.files[0].complete).to.be.true;
+ expect(upload.files[0].uploading).to.be.false;
+ });
+
+ it('should process next file in queue after one completes with error', async () => {
+ upload._createXhr = xhrCreator({
+ size: 512,
+ uploadTime: 200,
+ stepTime: 50,
+ serverValidation: () => {
+ return { status: 500, statusText: 'Server Error' };
+ },
+ });
+
+ const errorSpy = sinon.spy();
+ const startSpy = sinon.spy();
+ upload.addEventListener('upload-error', errorSpy);
+ upload.addEventListener('upload-start', startSpy);
+
+ files = createFiles(2, 512, 'application/json');
+ upload._addFiles(files);
+
+ // First file should start
+ await clock.tickAsync(10);
+ expect(startSpy.callCount).to.equal(1);
+
+ // Wait for first file to complete with error
+ await clock.tickAsync(220);
+ expect(errorSpy.callCount).to.equal(1);
+
+ // Second file should now start
+ await clock.tickAsync(10);
+ expect(startSpy.callCount).to.equal(2);
+ expect(upload.files.some((f) => f.uploading)).to.be.true;
+ });
+
+ it('should process next file in queue after one is aborted', async () => {
+ files = createFiles(2, 512, 'application/json');
+ upload._addFiles(files);
+
+ // First file added (at index 1) should start uploading
+ await clock.tickAsync(10);
+ expect(upload.files[1].uploading).to.be.true;
+ expect(upload.files[0].held).to.be.true;
+
+ // Abort the first file (at index 1)
+ upload._abortFileUpload(upload.files[1]);
+
+ // Second file (now at index 0 after first is removed) should now start uploading
+ await clock.tickAsync(10);
+ expect(upload.files[0].uploading).to.be.true;
+ });
+
+ it('should only start one file when uploadFiles is called with multiple files', async () => {
+ upload.noAuto = true;
+ files = createFiles(3, 512, 'application/json');
+ upload._addFiles(files);
+
+ // No files should be uploading yet - all should be held
+ await clock.tickAsync(10);
+ expect(upload.files[0].held).to.be.true;
+ expect(upload.files[1].held).to.be.true;
+ expect(upload.files[2].held).to.be.true;
+
+ // Call uploadFiles
+ upload.uploadFiles();
+
+ // Only first file (at index 2) should start uploading - wait for it to begin
+ await clock.tickAsync(20);
+ expect(upload.files.length).to.equal(3);
+ // One file should be uploading (the oldest one added)
+ const uploadingFile = upload.files.find((f) => f.uploading);
+ expect(uploadingFile).to.be.ok;
+ // The other two should still be held
+ const heldFiles = upload.files.filter((f) => f.held);
+ expect(heldFiles.length).to.equal(2);
+
+ // Wait for first file to complete
+ await clock.tickAsync(220);
+
+ // Second file should start automatically
+ await clock.tickAsync(10);
+ expect(upload.files.some((f) => f.uploading)).to.be.true;
+ const remainingHeldFiles = upload.files.filter((f) => f.held);
+ expect(remainingHeldFiles.length).to.equal(1);
+ });
+ });
+
describe('Upload format', () => {
let clock;