CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/npm-node-fetch

A light-weight module that brings Fetch API to Node.js

Pending
Quality

Pending

Does it follow best practices?

Impact

Pending

No eval scenarios have been run

SecuritybySnyk

Pending

The risk profile of this skill

Overview
Eval results
Files

file-blob.mddocs/

File and Blob Operations

Integration with file system and blob operations for uploading files and handling binary data. node-fetch re-exports classes and utilities from the fetch-blob package for comprehensive file handling.

Capabilities

Blob Class

Web-compatible Blob implementation for handling binary data with MIME type support.

/**
 * Blob implementation for binary data handling
 */
class Blob {
  constructor(blobParts?: BlobPart[], options?: BlobPropertyBag);
  
  readonly size: number;
  readonly type: string;
  
  arrayBuffer(): Promise<ArrayBuffer>;
  stream(): ReadableStream<Uint8Array>;
  text(): Promise<string>;
  slice(start?: number, end?: number, contentType?: string): Blob;
}

interface BlobPropertyBag {
  type?: string;
  endings?: 'transparent' | 'native';
}

type BlobPart = string | ArrayBuffer | ArrayBufferView | Blob;

Usage Examples:

import { Blob } from 'node-fetch';

// Create blob from string
const textBlob = new Blob(['Hello, World!'], { type: 'text/plain' });
console.log('Size:', textBlob.size);     // 13
console.log('Type:', textBlob.type);     // 'text/plain'

// Create blob from binary data
const binaryData = new Uint8Array([0x48, 0x65, 0x6c, 0x6c, 0x6f]); // "Hello"
const binaryBlob = new Blob([binaryData], { type: 'application/octet-stream' });

// Create blob from multiple parts
const multipartBlob = new Blob([
  'Data: ',
  binaryData,
  '\nEnd of data'
], { type: 'text/plain' });

// Read blob content
const text = await textBlob.text();
console.log(text); // 'Hello, World!'

const buffer = await textBlob.arrayBuffer();
console.log(new Uint8Array(buffer)); // [72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33]

File Class

Extended Blob class with file metadata like name and modification time.

/**
 * File implementation extending Blob with file metadata
 */
class File extends Blob {
  constructor(fileBits: BlobPart[], fileName: string, options?: FilePropertyBag);
  
  readonly name: string;
  readonly lastModified: number;
  readonly webkitRelativePath: string;
}

interface FilePropertyBag extends BlobPropertyBag {
  lastModified?: number;
}

Usage Examples:

import { File } from 'node-fetch';

// Create file from string content
const textFile = new File(['File content'], 'example.txt', {
  type: 'text/plain',
  lastModified: Date.now()
});

console.log('Name:', textFile.name);           // 'example.txt'
console.log('Size:', textFile.size);           // 12
console.log('Type:', textFile.type);           // 'text/plain'
console.log('Modified:', textFile.lastModified);

// Create JSON file
const jsonData = { name: 'Alice', age: 30 };
const jsonFile = new File([JSON.stringify(jsonData)], 'data.json', {
  type: 'application/json'
});

// Use with FormData for file uploads
const formData = new FormData();
formData.append('upload', textFile);
formData.append('metadata', jsonFile);

const response = await fetch('https://httpbin.org/post', {
  method: 'POST',
  body: formData
});

File System Integration

Utilities for creating Blob and File objects from file system paths.

/**
 * Create File from file system path (async)
 * @param path - File system path
 * @param type - MIME type (optional, auto-detected if not provided)
 * @returns Promise resolving to File object
 */
function fileFrom(path: string, type?: string): Promise<File>;

/**
 * Create File from file system path (sync)
 * @param path - File system path  
 * @param type - MIME type (optional, auto-detected if not provided)
 * @returns File object
 */
function fileFromSync(path: string, type?: string): File;

/**
 * Create Blob from file system path (async)
 * @param path - File system path
 * @param type - MIME type (optional, auto-detected if not provided)
 * @returns Promise resolving to Blob object
 */
function blobFrom(path: string, type?: string): Promise<Blob>;

/**
 * Create Blob from file system path (sync)
 * @param path - File system path
 * @param type - MIME type (optional, auto-detected if not provided)
 * @returns Blob object
 */
function blobFromSync(path: string, type?: string): Blob;

Usage Examples:

import { fileFrom, fileFromSync, blobFrom, blobFromSync } from 'node-fetch';

// Async file loading
const imageFile = await fileFrom('./image.jpg', 'image/jpeg');
console.log('File name:', imageFile.name);     // 'image.jpg'
console.log('File size:', imageFile.size);     // File size in bytes
console.log('File type:', imageFile.type);     // 'image/jpeg'

// Sync file loading
const textFile = fileFromSync('./document.txt', 'text/plain');

// Auto-detect MIME type
const autoFile = await fileFrom('./data.json'); // Type will be 'application/json'

// Create blob from file
const documentBlob = await blobFrom('./document.pdf');
console.log('Blob type:', documentBlob.type); // 'application/pdf'

// Use with fetch for file uploads
const uploadFile = await fileFrom('./upload.png', 'image/png');

const formData = new FormData();
formData.append('file', uploadFile);

const response = await fetch('https://httpbin.org/post', {
  method: 'POST',
  body: formData
});

FormData Integration

Using File and Blob objects with FormData for multipart uploads.

/**
 * FormData class for multipart/form-data requests
 */
class FormData {
  append(name: string, value: string | Blob | File, filename?: string): void;
  delete(name: string): void;
  get(name: string): FormDataEntryValue | null;
  getAll(name: string): FormDataEntryValue[];
  has(name: string): boolean;
  set(name: string, value: string | Blob | File, filename?: string): void;
  forEach(callback: (value: FormDataEntryValue, key: string, parent: FormData) => void): void;
  entries(): IterableIterator<[string, FormDataEntryValue]>;
  keys(): IterableIterator<string>;
  values(): IterableIterator<FormDataEntryValue>;
}

type FormDataEntryValue = string | File;

Usage Examples:

import { FormData, fileFrom } from 'node-fetch';

// Create form with file uploads
const formData = new FormData();

// Add text fields
formData.append('username', 'alice');
formData.append('description', 'Profile update');

// Add file from file system
const avatarFile = await fileFrom('./avatar.jpg', 'image/jpeg');
formData.append('avatar', avatarFile);

// Add file created in memory
const jsonFile = new File([JSON.stringify({ key: 'value' })], 'config.json', {
  type: 'application/json'
});
formData.append('config', jsonFile);

// Add blob data
const binaryBlob = new Blob([new Uint8Array([1, 2, 3, 4])], {
  type: 'application/octet-stream'
});
formData.append('data', binaryBlob, 'binary-data.bin');

// Upload form data
const response = await fetch('https://httpbin.org/post', {
  method: 'POST',
  body: formData
  // Content-Type header is set automatically with boundary
});

const result = await response.json();
console.log('Files uploaded:', Object.keys(result.files));

Stream Integration

Working with streams for efficient file processing and uploads.

interface StreamOperations {
  stream(): ReadableStream<Uint8Array>;
  arrayBuffer(): Promise<ArrayBuffer>;
}

Usage Examples:

import { createReadStream } from 'fs';
import { pipeline } from 'stream/promises';

// Stream file upload
const fileStream = createReadStream('./large-file.zip');

const response = await fetch('https://httpbin.org/post', {
  method: 'POST',
  body: fileStream,
  headers: {
    'Content-Type': 'application/zip'
  }
});

// Process blob as stream
const imageBlob = await blobFrom('./image.jpg');
const stream = imageBlob.stream();

// Transform stream data
const reader = stream.getReader();
const chunks = [];

while (true) {
  const { done, value } = await reader.read();
  if (done) break;
  
  chunks.push(value);
  console.log('Read chunk:', value.length, 'bytes');
}

const totalData = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
let offset = 0;
for (const chunk of chunks) {
  totalData.set(chunk, offset);
  offset += chunk.length;
}

// Use with pipeline for efficient processing
const sourceBlob = await blobFrom('./input.txt');
const destinationStream = createWriteStream('./output.txt');

await pipeline(sourceBlob.stream(), destinationStream);
console.log('File copied successfully');

File Upload Patterns

Common patterns for uploading files with progress tracking and error handling.

interface FileUploadPattern {
  uploadSingleFile(file: File, url: string): Promise<Response>;
  uploadMultipleFiles(files: File[], url: string): Promise<Response>;
  uploadWithProgress(file: File, url: string, onProgress: (progress: number) => void): Promise<Response>;
}

Usage Examples:

// Single file upload
async function uploadFile(filePath, uploadUrl) {
  const file = await fileFrom(filePath);
  
  const formData = new FormData();
  formData.append('file', file);
  
  const response = await fetch(uploadUrl, {
    method: 'POST',
    body: formData
  });
  
  if (!response.ok) {
    throw new Error(`Upload failed: ${response.status} ${response.statusText}`);
  }
  
  return await response.json();
}

// Multiple file upload
async function uploadFiles(filePaths, uploadUrl) {
  const formData = new FormData();
  
  for (let i = 0; i < filePaths.length; i++) {
    const file = await fileFrom(filePaths[i]);
    formData.append(`file${i}`, file);
  }
  
  return await fetch(uploadUrl, {
    method: 'POST',
    body: formData
  });
}

// File upload with custom metadata
async function uploadWithMetadata(filePath, metadata, uploadUrl) {
  const file = await fileFrom(filePath);
  
  const formData = new FormData();
  formData.append('file', file);
  formData.append('metadata', JSON.stringify(metadata));
  formData.append('timestamp', new Date().toISOString());
  
  return await fetch(uploadUrl, {
    method: 'POST',
    body: formData
  });
}

// Usage examples
try {
  // Upload single file
  const result = await uploadFile('./document.pdf', 'https://api.example.com/upload');
  console.log('Upload successful:', result);
  
  // Upload multiple files
  const multiResult = await uploadFiles([
    './image1.jpg',
    './image2.png',
    './document.txt'
  ], 'https://api.example.com/upload/batch');
  
  // Upload with metadata
  const metaResult = await uploadWithMetadata('./data.csv', {
    category: 'reports',
    department: 'sales',
    confidential: false
  }, 'https://api.example.com/upload/data');
  
} catch (error) {
  console.error('Upload failed:', error.message);
}

Binary Data Handling

Advanced patterns for working with binary data and custom file formats.

interface BinaryDataHandling {
  processBinaryFile(path: string): Promise<ProcessedData>;
  createCustomFile(data: Uint8Array, metadata: FileMetadata): File;
  validateFileFormat(file: File): Promise<boolean>;
}

Usage Examples:

// Process binary file with custom format
async function processBinaryFile(filePath) {
  const blob = await blobFrom(filePath);
  const buffer = await blob.arrayBuffer();
  const data = new Uint8Array(buffer);
  
  // Example: Read custom header (first 16 bytes)
  const header = data.slice(0, 16);
  const magic = new TextDecoder().decode(header.slice(0, 4));
  
  if (magic !== 'MYMT') { // Custom magic bytes
    throw new Error('Invalid file format');
  }
  
  const version = data[4];
  const flags = data[5];
  const dataLength = new DataView(buffer).getUint32(6, true); // Little endian
  
  return {
    magic,
    version,
    flags,
    dataLength,
    data: data.slice(16) // Actual data starts after header
  };
}

// Create custom binary file
function createBinaryFile(content, filename) {
  // Create header
  const header = new Uint8Array(16);
  const encoder = new TextEncoder();
  
  // Magic bytes
  header.set(encoder.encode('MYMT'), 0);
  
  // Version and flags
  header[4] = 1; // Version
  header[5] = 0; // Flags
  
  // Data length (little endian)
  const view = new DataView(header.buffer);
  view.setUint32(6, content.length, true);
  
  // Combine header and content
  const fileData = new Uint8Array(header.length + content.length);
  fileData.set(header, 0);
  fileData.set(content, header.length);
  
  return new File([fileData], filename, {
    type: 'application/octet-stream'
  });
}

// Validate file format
async function validateImageFile(file) {
  if (!file.type.startsWith('image/')) {
    return false;
  }
  
  const buffer = await file.arrayBuffer();
  const data = new Uint8Array(buffer);
  
  // Check for common image format signatures
  if (data.length < 4) return false;
  
  // JPEG
  if (data[0] === 0xFF && data[1] === 0xD8 && data[2] === 0xFF) {
    return true;
  }
  
  // PNG
  if (data[0] === 0x89 && data[1] === 0x50 && data[2] === 0x4E && data[3] === 0x47) {
    return true;
  }
  
  // GIF
  if ((data[0] === 0x47 && data[1] === 0x49 && data[2] === 0x46) &&
      (data[3] === 0x38 && (data[4] === 0x37 || data[4] === 0x39))) {
    return true;
  }
  
  return false;
}

// Usage
const customFile = createBinaryFile(
  new TextEncoder().encode('Hello, World!'),
  'custom-data.bin'
);

const isValid = await validateImageFile(await fileFrom('./image.jpg'));
console.log('Valid image:', isValid);

docs

body-processing.md

error-handling.md

file-blob.md

headers.md

http-client.md

index.md

request-response.md

utilities.md

tile.json