Google Cloud Storage library provides various utility classes and functions for checksums, signed URLs, stream validation, and other supporting functionality.
class CRC32C {
constructor(initial?: number);
// Methods
update(data: Buffer | Uint8Array | string): CRC32C;
digest(): Buffer;
toString(encoding?: string): string;
// Static methods
static from(data: Buffer | Uint8Array | string): CRC32C;
static calculate(data: Buffer | Uint8Array | string): Buffer;
}
// Basic usage
import { CRC32C } from '@google-cloud/storage';
// Calculate checksum for data
const data = Buffer.from('Hello, World!');
const checksum = CRC32C.from(data);
console.log('CRC32C:', checksum.toString('base64'));
// Incremental calculation
const crc = new CRC32C();
crc.update('Hello, ');
crc.update('World!');
const result = crc.digest();
console.log('CRC32C:', result.toString('base64'));
// One-shot calculation
const quickChecksum = CRC32C.calculate(data);
console.log('CRC32C:', quickChecksum.toString('base64'));type CRC32CValidatorGenerator = () => CRC32C;
// Default validator generator
import { CRC32C_DEFAULT_VALIDATOR_GENERATOR } from '@google-cloud/storage';
const storage = new Storage({
crc32cGenerator: CRC32C_DEFAULT_VALIDATOR_GENERATOR
});
// Custom validator generator
function customCRC32CGenerator(): CRC32C {
return new CRC32C();
}
const storage = new Storage({
crc32cGenerator: customCRC32CGenerator
});// Validate file integrity during upload
const file = bucket.file('important-data.zip');
// Calculate local file checksum
const fs = require('fs');
const localData = fs.readFileSync('/local/important-data.zip');
const expectedChecksum = CRC32C.from(localData).toString('base64');
// Upload with validation
await file.save(localData, {
validation: 'crc32c',
metadata: {
crc32c: expectedChecksum
}
});
// Download with validation
const [downloadedData] = await file.download({
validation: 'crc32c'
});
// Verify checksums match
const actualChecksum = CRC32C.from(downloadedData).toString('base64');
if (actualChecksum === expectedChecksum) {
console.log('File integrity verified');
} else {
console.error('File integrity check failed');
}class HashStreamValidator extends Transform {
constructor(options: HashStreamValidatorOptions);
// Properties
crc32c?: string;
md5?: string;
// Methods
test(hash: 'crc32c' | 'md5', sum: string): boolean;
}
interface HashStreamValidatorOptions {
crc32c?: boolean | string;
md5?: boolean | string;
}
import { HashStreamValidator } from '@google-cloud/storage';
// Validate stream with expected checksums
const validator = new HashStreamValidator({
crc32c: true,
md5: true
});
// Stream file through validator
fs.createReadStream('/local/file.txt')
.pipe(validator)
.pipe(fs.createWriteStream('/local/validated-file.txt'))
.on('finish', () => {
console.log('CRC32C:', validator.crc32c);
console.log('MD5:', validator.md5);
// Test against expected values
const crcValid = validator.test('crc32c', 'expected-crc32c-value');
const md5Valid = validator.test('md5', 'expected-md5-value');
console.log('CRC32C valid:', crcValid);
console.log('MD5 valid:', md5Valid);
});// Upload with stream validation
function uploadWithValidation(localPath: string, file: File) {
return new Promise<void>((resolve, reject) => {
const validator = new HashStreamValidator({
crc32c: true,
md5: true
});
const readStream = fs.createReadStream(localPath);
const writeStream = file.createWriteStream();
readStream
.pipe(validator)
.pipe(writeStream)
.on('error', reject)
.on('finish', async () => {
try {
// Set metadata with calculated checksums
await file.setMetadata({
crc32c: validator.crc32c,
md5Hash: validator.md5
});
console.log('Upload completed with validation');
resolve();
} catch (error) {
reject(error);
}
});
});
}
// Download with validation
function downloadWithValidation(file: File, localPath: string) {
return new Promise<void>((resolve, reject) => {
file.getMetadata()
.then(([metadata]) => {
const validator = new HashStreamValidator({
crc32c: metadata.crc32c,
md5: metadata.md5Hash
});
file.createReadStream()
.pipe(validator)
.pipe(fs.createWriteStream(localPath))
.on('error', reject)
.on('finish', () => {
console.log('Download completed with validation');
resolve();
});
})
.catch(reject);
});
}class URLSigner {
constructor(authClient: AuthClient, bucket: Bucket, file?: File);
// Methods
getSignedUrl(config: GetSignedUrlConfig): Promise<string>;
}
interface GetSignedUrlConfig {
version: 'v2' | 'v4';
action: 'read' | 'write' | 'delete' | 'resumable' | 'list';
expires: string | number | Date;
accessibleAt?: string | number | Date;
virtualHostedStyle?: boolean;
cname?: string;
contentMd5?: string;
contentType?: string;
extensionHeaders?: { [key: string]: string };
queryParams?: { [key: string]: string };
responseDisposition?: string;
responseType?: string;
promptSaveAs?: string;
}
type GetSignedUrlResponse = [string]; // [signedUrl]
type GetSignedUrlCallback = (err: Error | null, url?: string) => void;// Batch signed URL generation
async function generateBatchSignedUrls(files: File[], action: string, expires: Date) {
const urlPromises = files.map(file =>
file.getSignedUrl({
version: 'v4',
action: action as any,
expires: expires
})
);
const urls = await Promise.all(urlPromises);
const result = files.map((file, index) => ({
fileName: file.name,
signedUrl: urls[index][0]
}));
return result;
}
// Time-limited upload URLs
async function generateTimeLimitedUploadUrls(fileNames: string[], bucket: Bucket) {
const expires = Date.now() + 60 * 60 * 1000; // 1 hour
const urlPromises = fileNames.map(fileName => {
const file = bucket.file(fileName);
return file.getSignedUrl({
version: 'v4',
action: 'write',
expires: expires,
contentType: 'application/octet-stream'
});
});
const urls = await Promise.all(urlPromises);
return fileNames.map((fileName, index) => ({
fileName,
uploadUrl: urls[index][0],
expiresAt: new Date(expires)
}));
}
// Conditional access URLs
async function generateConditionalUrls(file: File) {
// URL valid only for specific IP ranges
const restrictedUrl = await file.getSignedUrl({
version: 'v4',
action: 'read',
expires: Date.now() + 3600000, // 1 hour
extensionHeaders: {
'x-goog-ip-range': '192.168.1.0/24'
}
});
// URL with custom response headers
const customHeaderUrl = await file.getSignedUrl({
version: 'v4',
action: 'read',
expires: Date.now() + 3600000,
responseDisposition: 'attachment; filename="download.pdf"',
responseType: 'application/pdf'
});
return {
restricted: restrictedUrl[0],
customHeaders: customHeaderUrl[0]
};
}class Channel {
constructor(storage: Storage, id: string, resourceId: string);
// Properties
id: string;
resourceId: string;
// Methods
stop(): Promise<void>;
}
type StopCallback = (err: Error | null) => void;
// Create and manage channels
const channel = storage.channel('my-channel', 'resource-123');
// Stop channel (Promise)
await channel.stop();
// Stop channel (callback)
channel.stop((err) => {
if (err) {
console.error('Error stopping channel:', err);
} else {
console.log('Channel stopped successfully');
}
});class ChannelManager {
private channels = new Map<string, Channel>();
private storage: Storage;
constructor(storage: Storage) {
this.storage = storage;
}
async createChannel(bucket: Bucket, channelId: string, webhookUrl: string) {
// Create watch channel
const [channel] = await bucket.createChannel(channelId, {
address: webhookUrl,
type: 'web_hook'
});
// Track channel
this.channels.set(channelId, channel);
// Auto-stop after timeout
setTimeout(async () => {
await this.stopChannel(channelId);
}, 24 * 60 * 60 * 1000); // 24 hours
return channel;
}
async stopChannel(channelId: string) {
const channel = this.channels.get(channelId);
if (channel) {
await channel.stop();
this.channels.delete(channelId);
console.log(`Channel ${channelId} stopped`);
}
}
async stopAllChannels() {
const stopPromises = Array.from(this.channels.values()).map(channel => channel.stop());
await Promise.all(stopPromises);
this.channels.clear();
console.log('All channels stopped');
}
getActiveChannels() {
return Array.from(this.channels.keys());
}
}// ApiError is exported from the common library
import { ApiError } from '@google-cloud/storage';
interface ApiErrorOptions {
code?: number;
message?: string;
errors?: any[];
}
class ApiError extends Error {
code?: number;
errors?: any[];
constructor(options?: ApiErrorOptions | string);
}
// Error handling utilities
class StorageErrorHandler {
static isRetryable(error: Error): boolean {
if (error instanceof ApiError) {
// Retry on server errors and rate limiting
return error.code >= 500 || error.code === 429 || error.code === 408;
}
return false;
}
static getErrorCategory(error: Error): string {
if (error instanceof ApiError) {
if (error.code === 400) return 'client-error';
if (error.code === 401 || error.code === 403) return 'auth-error';
if (error.code === 404) return 'not-found';
if (error.code === 409) return 'conflict';
if (error.code === 412) return 'precondition-failed';
if (error.code === 429) return 'rate-limited';
if (error.code >= 500) return 'server-error';
}
return 'unknown';
}
static async retryOperation<T>(
operation: () => Promise<T>,
maxRetries = 3,
baseDelay = 1000
): Promise<T> {
let lastError: Error;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
if (!this.isRetryable(lastError) || attempt === maxRetries) {
throw lastError;
}
// Exponential backoff with jitter
const delay = baseDelay * Math.pow(2, attempt - 1) + Math.random() * 1000;
await new Promise(resolve => setTimeout(resolve, delay));
console.warn(`Attempt ${attempt} failed, retrying in ${delay}ms:`, lastError.message);
}
}
throw lastError!;
}
}// Utility functions for common operations
class StorageUtils {
// Sanitize file names for Cloud Storage
static sanitizeFileName(fileName: string): string {
// Remove invalid characters
return fileName
.replace(/[<>:"/\\|?*]/g, '_')
.replace(/\s+/g, '_')
.toLowerCase();
}
// Generate unique file names
static generateUniqueFileName(baseName: string, extension?: string): string {
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
const ext = extension || '';
return `${baseName}_${timestamp}_${random}${ext}`;
}
// Parse Cloud Storage URLs
static parseStorageUrl(url: string): { bucket: string; object: string } | null {
// Match gs://bucket/object or https://storage.googleapis.com/bucket/object
const gsMatch = url.match(/^gs:\/\/([^\/]+)\/(.+)$/);
if (gsMatch) {
return { bucket: gsMatch[1], object: gsMatch[2] };
}
const httpsMatch = url.match(/^https:\/\/storage\.googleapis\.com\/([^\/]+)\/(.+)$/);
if (httpsMatch) {
return { bucket: httpsMatch[1], object: decodeURIComponent(httpsMatch[2]) };
}
return null;
}
// Build Cloud Storage URLs
static buildStorageUrl(bucket: string, object: string, protocol: 'gs' | 'https' = 'gs'): string {
if (protocol === 'gs') {
return `gs://${bucket}/${object}`;
} else {
return `https://storage.googleapis.com/${bucket}/${encodeURIComponent(object)}`;
}
}
// Extract metadata from file path
static getFileInfo(filePath: string) {
const path = require('path');
const fs = require('fs');
const stat = fs.statSync(filePath);
const parsed = path.parse(filePath);
return {
name: parsed.name,
extension: parsed.ext,
size: stat.size,
modified: stat.mtime,
directory: parsed.dir,
fullPath: filePath
};
}
}// Batch utility for processing large numbers of operations
class BatchProcessor<T, R> {
private batchSize: number;
private concurrency: number;
constructor(batchSize = 100, concurrency = 10) {
this.batchSize = batchSize;
this.concurrency = concurrency;
}
async processBatch<T, R>(
items: T[],
processor: (item: T) => Promise<R>
): Promise<Array<{ item: T; result?: R; error?: Error }>> {
const results: Array<{ item: T; result?: R; error?: Error }> = [];
// Split into batches
for (let i = 0; i < items.length; i += this.batchSize) {
const batch = items.slice(i, i + this.batchSize);
// Process batch with concurrency limit
const batchPromises = batch.map(async (item) => {
try {
const result = await processor(item);
return { item, result };
} catch (error) {
return { item, error: error as Error };
}
});
const batchResults = await this.limitConcurrency(batchPromises, this.concurrency);
results.push(...batchResults);
console.log(`Processed batch ${Math.floor(i / this.batchSize) + 1}/${Math.ceil(items.length / this.batchSize)}`);
}
return results;
}
private async limitConcurrency<T>(promises: Promise<T>[], limit: number): Promise<T[]> {
const results: T[] = [];
for (let i = 0; i < promises.length; i += limit) {
const batch = promises.slice(i, i + limit);
const batchResults = await Promise.all(batch);
results.push(...batchResults);
}
return results;
}
}
// Usage example
const processor = new BatchProcessor<string, File>(50, 5);
const filePaths = [/* array of file paths */];
const results = await processor.processBatch(filePaths, async (filePath) => {
const [uploadResult] = await bucket.upload(filePath);
return uploadResult;
});
// Process results
const successful = results.filter(r => r.result && !r.error);
const failed = results.filter(r => r.error);
console.log(`Success: ${successful.length}, Failed: ${failed.length}`);// Metrics collection utilities
class StorageMetrics {
private metrics = {
uploads: 0,
downloads: 0,
deletes: 0,
errors: 0,
bytesUploaded: 0,
bytesDownloaded: 0
};
recordUpload(bytes: number) {
this.metrics.uploads++;
this.metrics.bytesUploaded += bytes;
}
recordDownload(bytes: number) {
this.metrics.downloads++;
this.metrics.bytesDownloaded += bytes;
}
recordDelete() {
this.metrics.deletes++;
}
recordError() {
this.metrics.errors++;
}
getMetrics() {
return { ...this.metrics };
}
reset() {
Object.keys(this.metrics).forEach(key => {
(this.metrics as any)[key] = 0;
});
}
// Format bytes for display
formatBytes(bytes: number): string {
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
let size = bytes;
let unitIndex = 0;
while (size >= 1024 && unitIndex < units.length - 1) {
size /= 1024;
unitIndex++;
}
return `${size.toFixed(2)} ${units[unitIndex]}`;
}
getSummary() {
return {
operations: {
uploads: this.metrics.uploads,
downloads: this.metrics.downloads,
deletes: this.metrics.deletes,
errors: this.metrics.errors
},
bandwidth: {
uploaded: this.formatBytes(this.metrics.bytesUploaded),
downloaded: this.formatBytes(this.metrics.bytesDownloaded),
total: this.formatBytes(this.metrics.bytesUploaded + this.metrics.bytesDownloaded)
},
errorRate: this.metrics.uploads + this.metrics.downloads > 0
? (this.metrics.errors / (this.metrics.uploads + this.metrics.downloads) * 100).toFixed(2) + '%'
: '0%'
};
}
}
// Global metrics instance
const storageMetrics = new StorageMetrics();
// Usage in operations
async function monitoredUpload(file: File, data: Buffer) {
try {
await file.save(data);
storageMetrics.recordUpload(data.length);
} catch (error) {
storageMetrics.recordError();
throw error;
}
}
// Periodic reporting
setInterval(() => {
console.log('Storage Metrics:', storageMetrics.getSummary());
}, 60000); // Every minute// Type guards for runtime validation
class StorageTypeGuards {
static isFile(obj: any): obj is File {
return obj && typeof obj.save === 'function' && typeof obj.download === 'function';
}
static isBucket(obj: any): obj is Bucket {
return obj && typeof obj.file === 'function' && typeof obj.upload === 'function';
}
static isStorage(obj: any): obj is Storage {
return obj && typeof obj.bucket === 'function' && typeof obj.createBucket === 'function';
}
static isApiError(error: any): error is ApiError {
return error instanceof ApiError || (error && typeof error.code === 'number');
}
// Validate storage configuration
static validateStorageOptions(options: any): options is StorageOptions {
if (!options || typeof options !== 'object') return false;
if (options.projectId && typeof options.projectId !== 'string') return false;
if (options.keyFilename && typeof options.keyFilename !== 'string') return false;
if (options.apiEndpoint && typeof options.apiEndpoint !== 'string') return false;
return true;
}
// Validate file metadata
static validateFileMetadata(metadata: any): metadata is FileMetadata {
if (!metadata || typeof metadata !== 'object') return false;
if (metadata.contentType && typeof metadata.contentType !== 'string') return false;
if (metadata.size && typeof metadata.size !== 'string') return false;
if (metadata.generation && typeof metadata.generation !== 'string') return false;
return true;
}
}// Storage class constants
export const STORAGE_CLASSES = {
STANDARD: 'STANDARD',
NEARLINE: 'NEARLINE',
COLDLINE: 'COLDLINE',
ARCHIVE: 'ARCHIVE',
DURABLE_REDUCED_AVAILABILITY: 'DURABLE_REDUCED_AVAILABILITY'
} as const;
// ACL constants
export const ACL_ROLES = {
OWNER: 'OWNER',
READER: 'READER',
WRITER: 'WRITER',
FULL_CONTROL: 'FULL_CONTROL'
} as const;
export const ACL_ENTITIES = {
ALL_USERS: 'allUsers',
ALL_AUTHENTICATED_USERS: 'allAuthenticatedUsers'
} as const;
// HTTP methods for signed URLs
export const SIGNED_URL_ACTIONS = {
READ: 'read',
WRITE: 'write',
DELETE: 'delete',
RESUMABLE: 'resumable'
} as const;
// Event types for notifications
export const NOTIFICATION_EVENTS = {
OBJECT_FINALIZE: 'OBJECT_FINALIZE',
OBJECT_DELETE: 'OBJECT_DELETE',
OBJECT_METADATA_UPDATE: 'OBJECT_METADATA_UPDATE',
OBJECT_ACL_UPDATE: 'OBJECT_ACL_UPDATE'
} as const;
// Usage
const bucket = storage.bucket('my-bucket');
await bucket.setStorageClass(STORAGE_CLASSES.COLDLINE);
await file.acl.add({
entity: ACL_ENTITIES.ALL_USERS,
role: ACL_ROLES.READER
});