Multipart upload provides high-performance, reliable uploading of large files by breaking them into parts that can be uploaded concurrently. It includes managed upload with automatic part management, progress tracking, and resume capabilities.
async function multipartUpload(name: string, file: MultipartSource, options?: MultipartUploadOptions): Promise<MultipartUploadResult>;
type MultipartSource = string | Buffer | ReadableStream | File;
interface MultipartUploadOptions {
parallel?: number;
partSize?: number;
progress?: (percentage: number, checkpoint?: Checkpoint, res?: any) => void;
checkpoint?: Checkpoint;
meta?: Record<string, string>;
headers?: Record<string, string>;
callback?: CallbackConfig;
timeout?: number | number[];
mime?: string;
copySource?: CopySource;
cancellationToken?: CancellationToken;
}
interface MultipartUploadResult {
bucket: string;
name: string;
etag: string;
location: string;
res: ResponseInfo;
size?: number;
aborted?: boolean;
rt?: number;
}
interface Checkpoint {
file: CheckpointFile;
name: string;
fileSize: number;
partSize: number;
uploadId: string;
doneParts: CompletedPart[];
}
interface CheckpointFile {
path?: string;
size: number;
lastModified: number;
}
interface CompletedPart {
number: number;
etag: string;
}// Basic multipart upload
const result = await client.multipartUpload('large-file.zip', '/path/to/file.zip', {
parallel: 4, // Upload 4 parts concurrently
partSize: 1024 * 1024, // 1MB per part
progress: (percentage, checkpoint) => {
console.log(`Upload progress: ${Math.round(percentage * 100)}%`);
}
});
// Resumable multipart upload with checkpoint
let checkpoint = null;
const result = await client.multipartUpload('video.mp4', buffer, {
checkpoint,
progress: (percentage, cpt) => {
checkpoint = cpt; // Save checkpoint for resume
console.log(`Progress: ${Math.round(percentage * 100)}%`);
}
});
// Upload with metadata and custom headers
const result = await client.multipartUpload('document.pdf', stream, {
parallel: 3,
partSize: 5 * 1024 * 1024, // 5MB parts
meta: {
author: 'John Doe',
category: 'documents'
},
headers: {
'Cache-Control': 'max-age=3600',
'Content-Disposition': 'attachment; filename="report.pdf"'
}
});async function initMultipartUpload(name: string, options?: InitMultipartUploadOptions): Promise<InitMultipartUploadResult>;
interface InitMultipartUploadOptions extends RequestOptions {
meta?: Record<string, string>;
headers?: Record<string, string>;
mime?: string;
callback?: CallbackConfig;
storageClass?: 'Standard' | 'IA' | 'Archive' | 'ColdArchive';
serverSideEncryption?: 'AES256' | 'KMS';
serverSideEncryptionKeyId?: string;
}
interface InitMultipartUploadResult {
bucket: string;
name: string;
uploadId: string;
res: ResponseInfo;
}async function uploadPart(name: string, uploadId: string, partNumber: number, file: PartSource, start?: number, end?: number, options?: RequestOptions): Promise<UploadPartResult>;
type PartSource = string | Buffer | ReadableStream;
interface UploadPartResult {
name: string;
etag: string;
res: ResponseInfo;
size?: number;
}async function completeMultipartUpload(name: string, uploadId: string, parts: CompletedPart[], options?: CompleteMultipartUploadOptions): Promise<CompleteMultipartUploadResult>;
interface CompleteMultipartUploadOptions extends RequestOptions {
callback?: CallbackConfig;
}
interface CompleteMultipartUploadResult {
bucket: string;
name: string;
etag: string;
location: string;
res: ResponseInfo;
}async function abortMultipartUpload(name: string, uploadId: string, options?: RequestOptions): Promise<AbortMultipartUploadResult>;
interface AbortMultipartUploadResult {
res: ResponseInfo;
}async function listUploads(query?: ListUploadsQuery, options?: RequestOptions): Promise<ListUploadsResult>;
interface ListUploadsQuery {
prefix?: string;
delimiter?: string;
'key-marker'?: string;
'upload-id-marker'?: string;
'max-uploads'?: number;
}
interface ListUploadsResult {
uploads: UploadInfo[];
commonPrefixes: string[];
isTruncated: boolean;
nextKeyMarker?: string;
nextUploadIdMarker?: string;
res: ResponseInfo;
}
interface UploadInfo {
name: string;
uploadId: string;
initiated: Date;
storageClass: string;
owner: Owner;
}async function listParts(name: string, uploadId: string, query?: ListPartsQuery, options?: RequestOptions): Promise<ListPartsResult>;
interface ListPartsQuery {
'part-number-marker'?: number;
'max-parts'?: number;
}
interface ListPartsResult {
uploadId: string;
bucket: string;
name: string;
partNumberMarker: number;
nextPartNumberMarker?: number;
maxParts: number;
isTruncated: boolean;
parts: PartInfo[];
aborted: boolean;
res: ResponseInfo;
}
interface PartInfo {
partNumber: number;
lastModified: Date;
etag: string;
size: number;
}async function uploadPartCopy(name: string, uploadId: string, partNumber: number, range: string, sourceData: CopySource, options?: RequestOptions): Promise<UploadPartCopyResult>;
interface CopySource {
sourceBucket?: string;
sourceKey: string;
sourceVersionId?: string;
}
interface UploadPartCopyResult {
etag: string;
lastModified: Date;
res: ResponseInfo;
}// Initialize multipart upload for copying
const init = await client.initMultipartUpload('backup/large-file-copy.zip');
// Copy parts from source object
const part1 = await client.uploadPartCopy(
'backup/large-file-copy.zip',
init.uploadId,
1,
'bytes=0-5242879', // First 5MB
{
sourceBucket: 'source-bucket',
sourceKey: 'original/large-file.zip'
}
);
const part2 = await client.uploadPartCopy(
'backup/large-file-copy.zip',
init.uploadId,
2,
'bytes=5242880-10485759', // Next 5MB
{
sourceBucket: 'source-bucket',
sourceKey: 'original/large-file.zip'
}
);
// Complete the multipart copy
await client.completeMultipartUpload('backup/large-file-copy.zip', init.uploadId, [
{ number: 1, etag: part1.etag },
{ number: 2, etag: part2.etag }
]);// 1. Initialize multipart upload
const init = await client.initMultipartUpload('large-video.mp4', {
meta: { 'content-type': 'video/mp4' },
headers: { 'Cache-Control': 'max-age=7200' }
});
console.log('Upload ID:', init.uploadId);
// 2. Upload parts
const partSize = 5 * 1024 * 1024; // 5MB per part
const parts = [];
const fileBuffer = fs.readFileSync('/path/to/large-video.mp4');
for (let i = 0; i < Math.ceil(fileBuffer.length / partSize); i++) {
const start = i * partSize;
const end = Math.min(start + partSize, fileBuffer.length);
const partBuffer = fileBuffer.slice(start, end);
const partResult = await client.uploadPart(
'large-video.mp4',
init.uploadId,
i + 1,
partBuffer
);
parts.push({
number: i + 1,
etag: partResult.etag
});
console.log(`Uploaded part ${i + 1}`);
}
// 3. Complete multipart upload
const result = await client.completeMultipartUpload(
'large-video.mp4',
init.uploadId,
parts
);
console.log('Upload completed:', result.location);interface ProgressCallback {
(percentage: number, checkpoint?: Checkpoint, res?: any): void;
}
interface CancellationToken {
cancelled: boolean;
cancel(): void;
}// Create cancellation token
const cancellationToken = {
cancelled: false,
cancel() { this.cancelled = true; }
};
// Start upload with progress tracking and cancellation support
const uploadPromise = client.multipartUpload('large-file.bin', fileBuffer, {
parallel: 3,
partSize: 1024 * 1024,
cancellationToken,
progress: (percentage, checkpoint) => {
console.log(`Progress: ${Math.round(percentage * 100)}%`);
// Save checkpoint for resume capability
if (checkpoint) {
localStorage.setItem('uploadCheckpoint', JSON.stringify(checkpoint));
}
// Cancel after 50% for demo
if (percentage > 0.5) {
cancellationToken.cancel();
}
}
});
try {
const result = await uploadPromise;
console.log('Upload completed:', result);
} catch (error) {
if (cancellationToken.cancelled) {
console.log('Upload was cancelled');
// Resume later using saved checkpoint
const savedCheckpoint = JSON.parse(localStorage.getItem('uploadCheckpoint'));
if (savedCheckpoint) {
console.log('Resuming upload...');
const resumeResult = await client.multipartUpload('large-file.bin', fileBuffer, {
checkpoint: savedCheckpoint,
progress: (percentage) => {
console.log(`Resume progress: ${Math.round(percentage * 100)}%`);
}
});
}
}
}async function robustMultipartUpload(name, file, options = {}) {
let uploadId = null;
try {
// Start multipart upload
const result = await client.multipartUpload(name, file, {
...options,
progress: (percentage, checkpoint) => {
if (checkpoint) {
uploadId = checkpoint.uploadId;
}
if (options.progress) {
options.progress(percentage, checkpoint);
}
}
});
return result;
} catch (error) {
console.error('Multipart upload failed:', error);
// Cleanup incomplete upload
if (uploadId) {
try {
await client.abortMultipartUpload(name, uploadId);
console.log('Cleaned up incomplete upload');
} catch (cleanupError) {
console.error('Failed to cleanup upload:', cleanupError);
}
}
throw error;
}
}// List and cleanup old incomplete uploads
async function cleanupIncompleteUploads(olderThanDays = 7) {
const uploads = await client.listUploads();
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - olderThanDays);
for (const upload of uploads.uploads) {
if (upload.initiated < cutoffDate) {
try {
await client.abortMultipartUpload(upload.name, upload.uploadId);
console.log(`Cleaned up old upload: ${upload.name}`);
} catch (error) {
console.error(`Failed to cleanup ${upload.name}:`, error);
}
}
}
}interface ResponseInfo {
status: number;
headers: Record<string, string>;
size: number;
aborted: boolean;
rt: number;
keepAliveSocket: boolean;
}
interface CallbackConfig {
url: string;
host?: string;
body?: string;
contentType?: string;
customValue?: Record<string, string>;
}
interface Owner {
id: string;
displayName: string;
}function cancel(abort?: AbortConfig): void;
function isCancel(): boolean;
function resetCancelFlag(): void;
interface AbortConfig {
name: string;
uploadId: string;
options?: RequestOptions;
}// Start a multipart upload
const uploadPromise = client.multipartUpload('large-file.zip', fileBuffer, {
parallel: 3,
partSize: 1024 * 1024,
progress: (percentage, checkpoint) => {
console.log(`Progress: ${Math.round(percentage * 100)}%`);
// Cancel upload if needed (e.g., user clicks cancel)
if (shouldCancel) {
client.cancel({
name: 'large-file.zip',
uploadId: checkpoint?.uploadId,
options: { timeout: 10000 }
});
}
}
});
try {
const result = await uploadPromise;
console.log('Upload completed:', result);
} catch (error) {
if (client.isCancel()) {
console.log('Upload was cancelled by user');
// Reset flag for next operation
client.resetCancelFlag();
} else {
console.error('Upload failed:', error);
}
}async function multipartUploadCopy(name: string, sourceData: CopySourceData, options?: MultipartCopyOptions): Promise<MultipartCopyResult>;
interface CopySourceData {
sourceBucket?: string;
sourceKey: string;
sourceVersionId?: string;
}
interface MultipartCopyOptions {
partSize?: number;
parallel?: number;
progress?: (percentage: number, checkpoint: CopyCheckpoint) => void;
checkpoint?: CopyCheckpoint;
meta?: Record<string, string>;
headers?: Record<string, string>;
timeout?: number | number[];
metadataDirective?: 'COPY' | 'REPLACE';
}
interface MultipartCopyResult {
name: string;
etag: string;
res: ResponseInfo;
}
interface CopyCheckpoint {
sourceBucket: string;
sourceKey: string;
fileSize: number;
partSize: number;
uploadId: string;
doneParts: CompletedPart[];
}// Copy large object using multipart copy
const copyResult = await client.multipartUploadCopy('backup/large-video.mp4', {
sourceBucket: 'source-bucket',
sourceKey: 'videos/original.mp4'
}, {
partSize: 100 * 1024 * 1024, // 100MB parts for large files
parallel: 3,
progress: (percentage, checkpoint) => {
console.log(`Copy progress: ${Math.round(percentage * 100)}%`);
// Save checkpoint for resume capability
saveCopyCheckpoint(checkpoint);
},
meta: {
'backup-date': new Date().toISOString(),
'source': 'multipart-copy'
},
metadataDirective: 'REPLACE'
});
console.log('Copy completed:', copyResult.name);// Resume interrupted multipart copy
let copyCheckpoint = loadCopyCheckpoint();
const copyResult = await client.multipartUploadCopy('backup/large-file.bin', {
sourceBucket: 'source-bucket',
sourceKey: 'data/large-file.bin'
}, {
checkpoint: copyCheckpoint,
progress: (percentage, checkpoint) => {
copyCheckpoint = checkpoint;
saveCopyCheckpoint(checkpoint);
console.log(`Resume progress: ${Math.round(percentage * 100)}%`);
}
});The multipart upload implementation includes intelligent fallback mechanisms:
put() upload instead of multipartpartSize is specified, the system calculates optimal part size based on file size// Internal utility functions (exposed for advanced use)
_getPartSize(fileSize: number, partSize?: number): number;
_divideParts(fileSize: number, partSize: number): PartInfo[];
interface PartInfo {
number: number;
start: number;
end: number;
size: number;
}WebFileReadStream class for efficient browser file handling