S3 Compatible Cloud Storage client for JavaScript/TypeScript
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
This document covers advanced object features including multipart uploads, object composition, S3 Select queries, object retention, legal hold, and sophisticated copy operations with conditions.
import { CopySourceOptions } from 'minio'
class CopySourceOptions {
constructor(options: ICopySourceOptions)
// Methods
validate(): boolean
getHeaders(): RequestHeaders
}
interface ICopySourceOptions {
Bucket: string // Source bucket name
Object: string // Source object name
VersionID?: string // Source version ID
MatchETag?: string // Copy only if ETag matches
NoMatchETag?: string // Copy only if ETag doesn't match
MatchModifiedSince?: string | null // Copy if modified since date
MatchUnmodifiedSince?: string | null // Copy if unmodified since date
MatchRange?: boolean // Enable byte range matching
Start?: number // Start byte for range
End?: number // End byte for range
Encryption?: Encryption // Source object encryption
}import { CopyDestinationOptions } from 'minio'
class CopyDestinationOptions {
constructor(options: ICopyDestinationOptions)
// Methods
validate(): boolean
getHeaders(): RequestHeaders
}
interface ICopyDestinationOptions {
Bucket: string // Destination bucket name
Object: string // Destination object name
Encryption?: Encryption // Destination object encryption
UserMetadata?: ObjectMetaData // Custom metadata
UserTags?: Record<string, string> | string // Object tags
LegalHold?: 'on' | 'off' // Legal hold status
RetainUntilDate?: string // Retention until date
Mode?: RETENTION_MODES // Retention mode
MetadataDirective?: 'COPY' | 'REPLACE' // Metadata handling
Headers?: Record<string, string> // Additional headers
}import {
CopySourceOptions,
CopyDestinationOptions,
RETENTION_MODES,
ENCRYPTION_TYPES
} from 'minio'
// Copy with conditions
const source = new CopySourceOptions({
Bucket: 'source-bucket',
Object: 'source-file.pdf',
MatchETag: '"abc123def456"', // Only copy if ETag matches
MatchModifiedSince: '2023-01-01T00:00:00Z' // Only copy if modified after date
})
const dest = new CopyDestinationOptions({
Bucket: 'dest-bucket',
Object: 'copied-file.pdf',
MetadataDirective: 'REPLACE', // Replace metadata
UserMetadata: {
'x-amz-meta-copied-by': 'system',
'x-amz-meta-copy-date': new Date().toISOString()
},
UserTags: {
'source-bucket': 'source-bucket',
'copy-reason': 'backup'
}
})
const result = await client.copyObject(source, dest)
// Copy with encryption
const encryptedSource = new CopySourceOptions({
Bucket: 'encrypted-bucket',
Object: 'secret-file.txt',
Encryption: {
type: ENCRYPTION_TYPES.SSEC,
SSECustomerKey: 'source-encryption-key-32-chars',
SSECustomerKeyMD5: 'md5-hash-of-source-key'
}
})
const encryptedDest = new CopyDestinationOptions({
Bucket: 'dest-bucket',
Object: 'copied-secret.txt',
Encryption: {
type: ENCRYPTION_TYPES.SSEC,
SSECustomerKey: 'dest-encryption-key-32-chars!!',
SSECustomerKeyMD5: 'md5-hash-of-dest-key'
}
})
await client.copyObject(encryptedSource, encryptedDest)
// Copy partial object (byte range)
const partialSource = new CopySourceOptions({
Bucket: 'large-files',
Object: 'huge-file.dat',
MatchRange: true,
Start: 1024, // Start at byte 1024
End: 2047 // End at byte 2047 (1024 bytes total)
})
const partialDest = new CopyDestinationOptions({
Bucket: 'excerpts',
Object: 'file-excerpt.dat'
})
await client.copyObject(partialSource, partialDest)const result = await client.composeObject(destObjConfig, sourceObjList)
// Parameters
destObjConfig: CopyDestinationOptions // Destination configuration
sourceObjList: CopySourceOptions[] // Array of source objects
// Returns: Promise<CopyObjectResult>interface CopyObjectResult {
etag: string // Composed object ETag
lastModified: Date // Last modified timestamp
versionId?: string // Version ID if versioning enabled
}// Compose multiple log files into one
const sources = [
new CopySourceOptions({ Bucket: 'logs', Object: 'app-2023-01.log' }),
new CopySourceOptions({ Bucket: 'logs', Object: 'app-2023-02.log' }),
new CopySourceOptions({ Bucket: 'logs', Object: 'app-2023-03.log' })
]
const destination = new CopyDestinationOptions({
Bucket: 'archives',
Object: 'app-q1-2023.log',
UserMetadata: {
'x-amz-meta-composed-from': '3-monthly-logs',
'x-amz-meta-quarter': 'Q1-2023'
}
})
const result = await client.composeObject(destination, sources)
console.log('Composed object ETag:', result.etag)
// Compose with partial objects
const partialSources = [
new CopySourceOptions({
Bucket: 'data',
Object: 'file1.dat',
MatchRange: true,
Start: 0,
End: 1023 // First 1024 bytes
}),
new CopySourceOptions({
Bucket: 'data',
Object: 'file2.dat',
MatchRange: true,
Start: 1024,
End: 2047 // Second 1024 bytes
})
]
const composedDest = new CopyDestinationOptions({
Bucket: 'combined',
Object: 'merged-data.dat'
})
await client.composeObject(composedDest, partialSources)const results = await client.selectObjectContent(bucketName, objectName, selectOpts)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name
selectOpts: SelectOptions // Query options
// Returns: Promise<SelectResults | undefined>interface SelectOptions {
expression: string // SQL-like query expression
expressionType: 'SQL' // Query language (currently only SQL)
inputSerialization: { // Input format configuration
CSV?: {
FileHeaderInfo?: 'USE' | 'IGNORE' | 'NONE'
RecordDelimiter?: string // Record separator (default: \n)
FieldDelimiter?: string // Field separator (default: ,)
QuoteCharacter?: string // Quote character (default: ")
QuoteEscapeCharacter?: string // Quote escape (default: ")
Comments?: string // Comment prefix
AllowQuotedRecordDelimiter?: boolean
}
JSON?: {
Type: 'DOCUMENT' | 'LINES' // JSON format type
}
Parquet?: {} // Parquet format (empty object)
CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' // Compression
}
outputSerialization: { // Output format configuration
CSV?: {
RecordDelimiter?: string // Record separator
FieldDelimiter?: string // Field separator
QuoteCharacter?: string // Quote character
QuoteEscapeCharacter?: string // Quote escape
QuoteFields?: 'ALWAYS' | 'ASNEEDED' // When to quote
}
JSON?: {
RecordDelimiter?: string // Record separator
}
}
requestProgress?: boolean // Include progress information
}class SelectResults {
// Methods
setStats(stats: string): void
getStats(): string
setProgress(progress: unknown): void
getProgress(): unknown
setResponse(response: unknown): void
getResponse(): unknown
setRecords(records: unknown): void
getRecords(): unknown
}// Query CSV file
const csvSelectOptions = {
expression: 'SELECT name, age FROM s3object WHERE age > 25',
expressionType: 'SQL',
inputSerialization: {
CSV: {
FileHeaderInfo: 'USE', // First row contains headers
FieldDelimiter: ',',
RecordDelimiter: '\n'
},
CompressionType: 'NONE'
},
outputSerialization: {
CSV: {
FieldDelimiter: ',',
RecordDelimiter: '\n'
}
},
requestProgress: true
}
const results = await client.selectObjectContent('data-bucket', 'users.csv', csvSelectOptions)
if (results) {
console.log('Query results:', results.getRecords())
console.log('Query stats:', results.getStats())
}
// Query JSON Lines file
const jsonSelectOptions = {
expression: 'SELECT * FROM s3object[*] WHERE status = "active"',
expressionType: 'SQL',
inputSerialization: {
JSON: { Type: 'LINES' },
CompressionType: 'GZIP'
},
outputSerialization: {
JSON: { RecordDelimiter: '\n' }
}
}
const jsonResults = await client.selectObjectContent('logs', 'events.jsonl.gz', jsonSelectOptions)
// Query with aggregation
const aggregateQuery = {
expression: 'SELECT category, COUNT(*) as count, AVG(price) as avg_price FROM s3object GROUP BY category',
expressionType: 'SQL',
inputSerialization: {
CSV: {
FileHeaderInfo: 'USE',
FieldDelimiter: ','
}
},
outputSerialization: {
CSV: { FieldDelimiter: ',' }
}
}
const aggregateResults = await client.selectObjectContent('sales', 'products.csv', aggregateQuery)const retention = await client.getObjectRetention(bucketName, objectName, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name
getOpts?: GetObjectRetentionOpts // Options
// Returns: Promise<ObjectRetentionInfo | null>await client.putObjectRetention(bucketName, objectName, retentionOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name
retentionOpts?: Retention // Retention configuration
// Returns: Promise<void>interface GetObjectRetentionOpts {
versionId?: string // Specific version ID
}
interface ObjectRetentionInfo {
mode: RETENTION_MODES // GOVERNANCE or COMPLIANCE
retainUntilDate: Date // Retention expiry date
}
interface Retention {
mode: RETENTION_MODES // Retention mode
retainUntilDate: Date // Retention until date
governanceBypass?: boolean // Bypass governance retention (requires permission)
}import { RETENTION_MODES } from 'minio'
// Set governance retention for 30 days
const retentionDate = new Date()
retentionDate.setDate(retentionDate.getDate() + 30)
await client.putObjectRetention('compliance-bucket', 'important-doc.pdf', {
mode: RETENTION_MODES.GOVERNANCE,
retainUntilDate: retentionDate
})
// Set compliance retention (cannot be bypassed)
const complianceDate = new Date()
complianceDate.setFullYear(complianceDate.getFullYear() + 7) // 7 years
await client.putObjectRetention('legal-docs', 'contract.pdf', {
mode: RETENTION_MODES.COMPLIANCE,
retainUntilDate: complianceDate
})
// Check current retention
const retention = await client.getObjectRetention('compliance-bucket', 'important-doc.pdf')
if (retention) {
console.log('Retention mode:', retention.mode)
console.log('Retain until:', retention.retainUntilDate)
} else {
console.log('No retention set')
}
// Bypass governance retention (requires s3:BypassGovernanceRetention permission)
await client.putObjectRetention('compliance-bucket', 'temp-doc.pdf', {
mode: RETENTION_MODES.GOVERNANCE,
retainUntilDate: new Date(), // Immediate expiry
governanceBypass: true
})const status = await client.getObjectLegalHold(bucketName, objectName, getOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name
getOpts?: GetObjectLegalHoldOptions // Options
// Returns: Promise<LEGAL_HOLD_STATUS>await client.setObjectLegalHold(bucketName, objectName, setOpts?)
// Parameters
bucketName: string // Bucket name
objectName: string // Object name
setOpts?: PutObjectLegalHoldOptions // Legal hold configuration
// Returns: Promise<void>interface GetObjectLegalHoldOptions {
versionId?: string // Specific version ID
}
interface PutObjectLegalHoldOptions {
versionId?: string // Specific version ID
status: LEGAL_HOLD_STATUS // ON or OFF
}
enum LEGAL_HOLD_STATUS {
ENABLED = 'ON', // Legal hold is active
DISABLED = 'OFF' // Legal hold is not active
}import { LEGAL_HOLD_STATUS } from 'minio'
// Enable legal hold
await client.setObjectLegalHold('legal-bucket', 'evidence.pdf', {
status: LEGAL_HOLD_STATUS.ENABLED
})
// Check legal hold status
const holdStatus = await client.getObjectLegalHold('legal-bucket', 'evidence.pdf')
console.log('Legal hold status:', holdStatus) // 'ON' or 'OFF'
// Disable legal hold
await client.setObjectLegalHold('legal-bucket', 'evidence.pdf', {
status: LEGAL_HOLD_STATUS.DISABLED
})
// Set legal hold on specific version
await client.setObjectLegalHold('versioned-bucket', 'document.txt', {
versionId: 'version-123',
status: LEGAL_HOLD_STATUS.ENABLED
})The CopyConditions class provides legacy support for copy conditions:
import { CopyConditions } from 'minio'
class CopyConditions {
// Properties
modified: string // Modified since condition
unmodified: string // Unmodified since condition
matchETag: string // ETag match condition
matchETagExcept: string // ETag not match condition
// Methods
setModified(date: Date): void
setUnmodified(date: Date): void
setMatchETag(etag: string): void
setMatchETagExcept(etag: string): void
}import { CopyConditions } from 'minio'
// Legacy copy conditions (deprecated - use CopySourceOptions instead)
const conditions = new CopyConditions()
conditions.setMatchETag('"abc123def456"')
conditions.setModified(new Date('2023-01-01'))
// Note: This is legacy API, prefer CopySourceOptions for new codeimport { ENCRYPTION_TYPES } from 'minio'
type Encryption =
| {
type: ENCRYPTION_TYPES.SSEC // Server-side encryption with customer keys
SSECustomerKey?: string // Base64 encoded 256-bit key
SSECustomerKeyMD5?: string // MD5 hash of the key
}
| {
type: ENCRYPTION_TYPES.KMS // Server-side encryption with KMS
SSEAlgorithm?: string // Encryption algorithm
KMSMasterKeyID?: string // KMS key ID
}import { ENCRYPTION_TYPES } from 'minio'
// Upload with SSE-C encryption
const sseCustomerKey = 'your-32-character-secret-key-here!!'
const uploadInfo = await client.putObject('encrypted-bucket', 'secret.txt', 'confidential data', undefined, {
'Content-Type': 'text/plain',
// Encryption headers are handled automatically when using CopySourceOptions/CopyDestinationOptions
})
// Copy with different encryption
const source = new CopySourceOptions({
Bucket: 'source-bucket',
Object: 'encrypted-file.txt',
Encryption: {
type: ENCRYPTION_TYPES.SSEC,
SSECustomerKey: 'source-key-32-characters-long!!!',
SSECustomerKeyMD5: 'md5-hash-of-source-key'
}
})
const dest = new CopyDestinationOptions({
Bucket: 'dest-bucket',
Object: 'reencrypted-file.txt',
Encryption: {
type: ENCRYPTION_TYPES.KMS,
KMSMasterKeyID: 'arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012'
}
})
await client.copyObject(source, dest)import { S3Error, InvalidArgumentError } from 'minio'
try {
await client.selectObjectContent('bucket', 'data.csv', selectOptions)
} catch (error) {
if (error instanceof S3Error) {
switch (error.code) {
case 'NoSuchKey':
console.error('Object not found')
break
case 'InvalidRequest':
console.error('Invalid S3 Select query:', error.message)
break
case 'AccessDenied':
console.error('Access denied for S3 Select operation')
break
default:
console.error('S3 Error:', error.code, error.message)
}
}
}
// Handle retention/legal hold errors
try {
await client.putObjectRetention('bucket', 'object', retention)
} catch (error) {
if (error instanceof S3Error) {
switch (error.code) {
case 'InvalidRequest':
console.error('Object locking not enabled on bucket')
break
case 'AccessDenied':
console.error('Insufficient permissions for retention operation')
break
}
}
}Next: Presigned Operations - Learn about presigned URLs and POST policies