S3 Compatible Cloud Storage client for JavaScript/TypeScript
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
This document covers basic object operations including upload, download, copy, delete, and metadata management. For advanced features like multipart uploads, object composition, and S3 Select, see Advanced Objects.
const uploadInfo = await client.putObject(bucketName, objectName, stream, size?, metaData?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
stream: stream.Readable | Buffer | string // Data to upload
size?: number // Data size (optional, auto-detected for Buffer/string)
metaData?: ItemBucketMetadata // Object metadata
// Returns: Promise<UploadedObjectInfo>interface ItemBucketMetadata {
[key: string]: any // Custom metadata (prefixed with 'x-amz-meta-')
// Standard HTTP headers (not prefixed)
'Content-Type'?: string
'Content-Encoding'?: string
'Content-Disposition'?: string
'Content-Language'?: string
'Cache-Control'?: string
'Expires'?: string
}interface UploadedObjectInfo {
etag: string // Object ETag
versionId?: string // Version ID (if versioning enabled)
}import * as fs from 'node:fs'
import * as path from 'node:path'
// Upload from string
const uploadInfo = await client.putObject(
'my-bucket',
'hello.txt',
'Hello, World!',
undefined, // size auto-detected
{ 'Content-Type': 'text/plain' }
)
// Upload from Buffer
const buffer = Buffer.from('Binary data', 'utf8')
await client.putObject('my-bucket', 'data.bin', buffer)
// Upload from stream with metadata
const stream = fs.createReadStream('/path/to/file.pdf')
const stat = fs.statSync('/path/to/file.pdf')
await client.putObject('my-bucket', 'document.pdf', stream, stat.size, {
'Content-Type': 'application/pdf',
'Content-Disposition': 'attachment; filename="document.pdf"',
'x-amz-meta-author': 'John Doe',
'x-amz-meta-category': 'reports'
})const uploadInfo = await client.fPutObject(bucketName, objectName, filePath, metaData?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
filePath: string // Local file path
metaData?: ObjectMetaData // Object metadata
// Returns: Promise<UploadedObjectInfo>// Simple file upload
await client.fPutObject('my-bucket', 'photo.jpg', './photos/vacation.jpg')
// File upload with metadata
await client.fPutObject('my-bucket', 'report.pdf', './documents/report.pdf', {
'Content-Type': 'application/pdf',
'x-amz-meta-department': 'finance',
'x-amz-meta-year': '2023'
})const stream = await client.getObject(bucketName, objectName, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
getOpts?: GetObjectOpts // Download options
// Returns: Promise<stream.Readable>interface GetObjectOpts {
versionId?: string // Specific version ID
SSECustomerAlgorithm?: string // Server-side encryption algorithm (AES256)
SSECustomerKey?: string // Server-side encryption key
SSECustomerKeyMD5?: string // MD5 hash of encryption key
}import * as fs from 'node:fs'
// Download object to stream
const stream = await client.getObject('my-bucket', 'hello.txt')
let content = ''
stream.on('data', chunk => content += chunk)
stream.on('end', () => console.log('Content:', content))
// Download and save to file
const downloadStream = await client.getObject('my-bucket', 'photo.jpg')
const writeStream = fs.createWriteStream('./downloaded-photo.jpg')
downloadStream.pipe(writeStream)
// Download specific version
const versionStream = await client.getObject('my-bucket', 'document.pdf', {
versionId: 'ABC123DEF456'
})
// Download with server-side encryption
const encryptedStream = await client.getObject('my-bucket', 'secret.txt', {
SSECustomerAlgorithm: 'AES256',
SSECustomerKey: 'your-32-char-secret-key-here!!',
SSECustomerKeyMD5: 'MD5-hash-of-key'
})const stream = await client.getPartialObject(bucketName, objectName, offset, length?, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
offset: number // Byte offset to start from
length?: number // Number of bytes to read (optional, reads to end)
getOpts?: GetObjectOpts // Download options
// Returns: Promise<stream.Readable>// Download first 1024 bytes
const partialStream = await client.getPartialObject('my-bucket', 'large-file.dat', 0, 1024)
// Download from offset 1000 to end of file
const tailStream = await client.getPartialObject('my-bucket', 'large-file.dat', 1000)
// Download specific byte range
const middleStream = await client.getPartialObject('my-bucket', 'large-file.dat', 5000, 2048)await client.fGetObject(bucketName, objectName, filePath, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
filePath: string // Local file path to save
getOpts?: GetObjectOpts // Download options
// Returns: Promise<void>// Download to local file
await client.fGetObject('my-bucket', 'report.pdf', './downloads/report.pdf')
// Download specific version to file
await client.fGetObject('my-bucket', 'document.txt', './versions/document-v1.txt', {
versionId: 'version-123'
})const stat = await client.statObject(bucketName, objectName, statOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
statOpts?: StatObjectOpts // Stat options
// Returns: Promise<BucketItemStat>interface StatObjectOpts {
versionId?: string // Specific version ID
SSECustomerAlgorithm?: string // Server-side encryption algorithm
SSECustomerKey?: string // Server-side encryption key
SSECustomerKeyMD5?: string // MD5 hash of encryption key
}interface BucketItemStat {
size: number // Object size in bytes
etag: string // Object ETag
lastModified: Date // Last modified timestamp
metaData: ResponseHeader // Object metadata and headers
versionId?: string // Version ID (if versioning enabled)
isDeleteMarker?: boolean // True if this is a delete marker
}// Get object metadata
const stat = await client.statObject('my-bucket', 'photo.jpg')
console.log('Size:', stat.size)
console.log('ETag:', stat.etag)
console.log('Last Modified:', stat.lastModified)
console.log('Content Type:', stat.metaData['content-type'])
console.log('Custom Meta:', stat.metaData['x-amz-meta-author'])
// Get metadata for specific version
const versionStat = await client.statObject('my-bucket', 'document.pdf', {
versionId: 'version-456'
})const objectStream = client.listObjects(bucketName, prefix?, recursive?, listOpts?)
// Parameters
bucketName: string // Bucket name
prefix?: string // Object prefix filter
recursive?: boolean // List recursively (default: false)
listOpts?: ListObjectQueryOpts // Additional options
// Returns: BucketStream<ObjectInfo>const objectStream = client.listObjectsV2(bucketName, prefix?, recursive?, startAfter?)
// Parameters
bucketName: string // Bucket name
prefix?: string // Object prefix filter
recursive?: boolean // List recursively (default: false)
startAfter?: string // Start listing after this object name
// Returns: stream.Readableconst objectStream = client.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter)
// Parameters
bucketName: string // Bucket name
prefix: string // Object prefix filter
continuationToken: string // Continuation token for pagination
delimiter: string // Delimiter for grouping keys
maxKeys: number // Maximum number of keys to return
startAfter: string // Start listing after this object name
// Returns: stream.Readableinterface ObjectInfo {
name: string // Object name/key
prefix?: string // Object prefix (for grouped results)
size: number // Object size in bytes
etag?: string // Object ETag
lastModified: Date // Last modified timestamp
storageClass?: string // Storage class
isDir?: boolean // True if this represents a directory
}// List all objects
const objectStream = client.listObjects('my-bucket')
objectStream.on('data', obj => {
console.log('Object:', obj.name, 'Size:', obj.size)
})
objectStream.on('error', err => console.error('Error:', err))
objectStream.on('end', () => console.log('Listing complete'))
// List objects with prefix
const photosStream = client.listObjects('my-bucket', 'photos/', true)
photosStream.on('data', obj => {
if (!obj.isDir) {
console.log('Photo:', obj.name, 'Modified:', obj.lastModified)
}
})
// List objects V2 (recommended)
const objectStreamV2 = client.listObjectsV2('my-bucket', 'documents/', true)
objectStreamV2.on('data', obj => {
console.log('Document:', obj.name, 'Storage Class:', obj.storageClass)
})
// Advanced listing with full control (listObjectsV2Query)
const advancedStream = client.listObjectsV2Query(
'my-bucket', // bucketName
'logs/', // prefix
'', // continuationToken (empty for first page)
'/', // delimiter (for directory-style listing)
1000, // maxKeys
'logs/2023/' // startAfter
)
advancedStream.on('data', obj => {
console.log('Advanced listing:', obj.name)
})
// Collect all objects into array
function listAllObjects(bucketName, prefix) {
return new Promise((resolve, reject) => {
const objects = []
const stream = client.listObjects(bucketName, prefix, true)
stream.on('data', obj => objects.push(obj))
stream.on('error', reject)
stream.on('end', () => resolve(objects))
})
}
const allObjects = await listAllObjects('my-bucket', 'data/')const result = await client.copyObject(source, dest)
// Parameters
source: CopySourceOptions // Source object configuration
dest: CopyDestinationOptions // Destination object configuration
// Returns: Promise<CopyObjectResult>For detailed information about CopySourceOptions and CopyDestinationOptions, see Advanced Objects.
import { CopySourceOptions, CopyDestinationOptions } from 'minio'
// Simple object copy
const source = new CopySourceOptions({
Bucket: 'source-bucket',
Object: 'source-object.jpg'
})
const dest = new CopyDestinationOptions({
Bucket: 'dest-bucket',
Object: 'copied-object.jpg'
})
const result = await client.copyObject(source, dest)
console.log('Copy ETag:', result.etag)await client.removeObject(bucketName, objectName, removeOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
removeOpts?: RemoveOptions // Delete options
// Returns: Promise<void>interface RemoveOptions {
versionId?: string // Specific version to delete
governanceBypass?: boolean // Bypass governance retention
}const results = await client.removeObjects(bucketName, objectsList)
// Parameters
bucketName: string // Bucket name
objectsList: RemoveObjectsParam // Objects to delete
// Returns: Promise<RemoveObjectsResponse[]>type RemoveObjectsParam = string[] | RemoveObjectsRequestEntry[]
interface RemoveObjectsRequestEntry {
name: string // Object name
versionId?: string // Version ID (optional)
}interface RemoveObjectsResponse {
name?: string // Object name
versionId?: string // Version ID
deleteMarker?: boolean // True if delete marker created
deleteMarkerVersionId?: string // Delete marker version ID
// Error information (if deletion failed)
errorCode?: string // Error code
errorMessage?: string // Error message
}// Delete single object
await client.removeObject('my-bucket', 'old-file.txt')
// Delete specific version
await client.removeObject('my-bucket', 'versioned-file.txt', {
versionId: 'version-123'
})
// Delete multiple objects by name
const objectsToDelete = ['file1.txt', 'file2.jpg', 'file3.pdf']
const deleteResults = await client.removeObjects('my-bucket', objectsToDelete)
deleteResults.forEach(result => {
if (result.errorCode) {
console.error(`Failed to delete ${result.name}: ${result.errorMessage}`)
} else {
console.log(`Deleted: ${result.name}`)
}
})
// Delete multiple objects with versions
const objectsWithVersions = [
{ name: 'file1.txt', versionId: 'version-1' },
{ name: 'file2.txt', versionId: 'version-2' }
]
await client.removeObjects('my-bucket', objectsWithVersions)Removes an incomplete multipart upload from a bucket. This is useful for cleaning up failed or abandoned multipart uploads.
await client.removeIncompleteUpload(bucketName, objectName)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key of the incomplete upload
// Returns: Promise<void>// Remove incomplete upload for a specific object
try {
await client.removeIncompleteUpload('my-bucket', 'large-file.zip')
console.log('Incomplete upload removed successfully')
} catch (error) {
if (error.code === 'NoSuchUpload') {
console.log('No incomplete upload found for this object')
} else {
console.error('Error removing incomplete upload:', error)
}
}
// Clean up multiple incomplete uploads (use with listIncompleteUploads)
const incompleteStream = client.listIncompleteUploads('my-bucket')
incompleteStream.on('data', async (upload) => {
try {
await client.removeIncompleteUpload('my-bucket', upload.key)
console.log(`Cleaned up incomplete upload: ${upload.key}`)
} catch (error) {
console.error(`Failed to remove ${upload.key}:`, error.message)
}
})const tags = await client.getObjectTagging(bucketName, objectName, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
getOpts?: GetObjectOpts // Version and encryption options
// Returns: Promise<Tag[]>await client.setObjectTagging(bucketName, objectName, tags, putOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
tags: Tags // Tags to set
putOpts?: TaggingOpts // Version options
// Returns: Promise<void>await client.removeObjectTagging(bucketName, objectName, removeOpts)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name/key
removeOpts: TaggingOpts // Version options
// Returns: Promise<void>interface TaggingOpts {
versionId?: string // Specific version ID
}// Set object tags
await client.setObjectTagging('my-bucket', 'document.pdf', {
'Category': 'Financial',
'Department': 'Accounting',
'Year': '2023'
})
// Get object tags
const tags = await client.getObjectTagging('my-bucket', 'document.pdf')
tags.forEach(tag => {
console.log(`${tag.Key}: ${tag.Value}`)
})
// Set tags on specific version
await client.setObjectTagging('my-bucket', 'versioned-doc.pdf', {
'Status': 'Archived'
}, { versionId: 'version-456' })
// Remove all tags
await client.removeObjectTagging('my-bucket', 'document.pdf', {})import {
S3Error,
InvalidObjectNameError,
InvalidArgumentError
} from 'minio'
try {
await client.putObject('my-bucket', 'file.txt', 'content')
} catch (error) {
if (error instanceof S3Error) {
switch (error.code) {
case 'NoSuchBucket':
console.error('Bucket does not exist')
break
case 'AccessDenied':
console.error('Access denied to bucket or object')
break
case 'InvalidObjectName':
console.error('Invalid object name')
break
default:
console.error('S3 Error:', error.code, error.message)
}
} else if (error instanceof InvalidObjectNameError) {
console.error('Invalid object name format:', error.message)
}
}Next: Advanced Objects - Learn about multipart uploads, composition, and advanced features