or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

devtools.mddom-components.mdindex.mdnative-modules.mdutilities.mdwinter-polyfills.md
tile.json

utilities.mddocs/

Utilities

The Expo SDK provides utility functions for blob conversion and binary data processing. These utilities simplify working with binary data conversion across different platforms.

Capabilities

Blob Utilities

Functions for converting Blob objects to ArrayBuffer format for processing binary data.

/**
 * Converts a Blob to an ArrayBuffer using modern or legacy APIs
 * @param blob - Blob object to convert
 * @returns Promise resolving to ArrayBuffer containing the blob data
 */
function blobToArrayBufferAsync(blob: Blob): Promise<ArrayBuffer>;

/**
 * Converts a Blob to an ArrayBuffer using the FileReader API (legacy method)
 * @param blob - Blob object to convert  
 * @returns Promise resolving to ArrayBuffer containing the blob data
 */
function legacyBlobToArrayBufferAsync(blob: Blob): Promise<ArrayBuffer>;

Usage Examples

Working with Blobs

import { blobToArrayBufferAsync, legacyBlobToArrayBufferAsync } from 'expo';

// Convert blob to ArrayBuffer for processing
async function processBlobData(blob: Blob): Promise<Uint8Array> {
  try {
    // Use modern approach first
    const arrayBuffer = await blobToArrayBufferAsync(blob);
    
    // Convert to Uint8Array for easier manipulation
    const uint8Array = new Uint8Array(arrayBuffer);
    
    console.log('Blob converted:', uint8Array.length, 'bytes');
    return uint8Array;
  } catch (error) {
    console.error('Failed to convert blob:', error);
    throw error;
  }
}

// Process file upload
async function handleFileUpload(file: File) {
  console.log('Processing file:', file.name, file.size, 'bytes');
  
  // Convert file (which is a Blob) to ArrayBuffer
  const arrayBuffer = await blobToArrayBufferAsync(file);
  
  // Process the binary data
  const bytes = new Uint8Array(arrayBuffer);
  
  // Example: Calculate checksum
  let checksum = 0;
  for (let i = 0; i < bytes.length; i++) {
    checksum = (checksum + bytes[i]) % 256;
  }
  
  return {
    filename: file.name,
    size: file.size,
    type: file.type,
    checksum,
    arrayBuffer,
  };
}

// Legacy conversion for older environments
async function convertBlobLegacy(blob: Blob): Promise<ArrayBuffer> {
  try {
    return await legacyBlobToArrayBufferAsync(blob);
  } catch (error) {
    console.error('Legacy blob conversion failed:', error);
    throw error;
  }
}

Binary Data Processing

import { blobToArrayBufferAsync } from 'expo';

// Image processing utilities
class ImageProcessor {
  static async processImage(imageBlob: Blob): Promise<{ 
    width: number; 
    height: number; 
    format: string; 
    data: Uint8Array; 
  }> {
    const arrayBuffer = await blobToArrayBufferAsync(imageBlob);
    const bytes = new Uint8Array(arrayBuffer);
    
    // Basic image format detection
    let format = 'unknown';
    if (bytes[0] === 0xFF && bytes[1] === 0xD8 && bytes[2] === 0xFF) {
      format = 'jpeg';
    } else if (bytes[0] === 0x89 && bytes[1] === 0x50 && bytes[2] === 0x4E && bytes[3] === 0x47) {
      format = 'png';
    } else if (bytes[0] === 0x47 && bytes[1] === 0x49 && bytes[2] === 0x46) {
      format = 'gif';
    }
    
    // Note: Real image processing would require more sophisticated parsing
    return {
      width: 0, // Would need to parse image headers
      height: 0, // Would need to parse image headers  
      format,
      data: bytes,
    };
  }

  static async extractImageData(imageFile: File): Promise<string> {
    const arrayBuffer = await blobToArrayBufferAsync(imageFile);
    const bytes = new Uint8Array(arrayBuffer);
    
    // Convert to base64 for display or storage
    let binary = '';
    for (let i = 0; i < bytes.length; i++) {
      binary += String.fromCharCode(bytes[i]);
    }
    
    return btoa(binary);
  }
}

// Usage
async function handleImageUpload(file: File) {
  try {
    const imageData = await ImageProcessor.processImage(file);
    console.log('Image processed:', imageData.format, imageData.data.length, 'bytes');
    
    const base64Data = await ImageProcessor.extractImageData(file);
    console.log('Base64 data length:', base64Data.length);
    
    return { imageData, base64Data };
  } catch (error) {
    console.error('Image processing failed:', error);
    throw error;
  }
}

Data Conversion Utilities

import { blobToArrayBufferAsync } from 'expo';

// Convert various data formats
class DataConverter {
  static async blobToBase64(blob: Blob): Promise<string> {
    const arrayBuffer = await blobToArrayBufferAsync(blob);
    const bytes = new Uint8Array(arrayBuffer);
    
    let binary = '';
    for (let i = 0; i < bytes.length; i++) {
      binary += String.fromCharCode(bytes[i]);
    }
    
    return btoa(binary);
  }

  static async blobToText(blob: Blob, encoding: string = 'utf-8'): Promise<string> {
    const arrayBuffer = await blobToArrayBufferAsync(blob);
    const decoder = new TextDecoder(encoding);
    return decoder.decode(arrayBuffer);
  }

  static arrayBufferToBlob(buffer: ArrayBuffer, mimeType: string): Blob {
    return new Blob([buffer], { type: mimeType });
  }

  static stringToArrayBuffer(str: string): ArrayBuffer {
    const encoder = new TextEncoder();
    return encoder.encode(str).buffer;
  }
}

// Usage examples
async function processDataFile(file: File) {
  try {
    // Convert to base64 for storage or transmission
    const base64 = await DataConverter.blobToBase64(file);
    
    // If it's a text file, extract the text content
    let textContent = null;
    if (file.type.startsWith('text/')) {
      textContent = await DataConverter.blobToText(file);
    }
    
    // Get raw binary data
    const arrayBuffer = await blobToArrayBufferAsync(file);
    
    return {
      filename: file.name,
      size: file.size,
      type: file.type,
      base64,
      textContent,
      arrayBuffer,
    };
  } catch (error) {
    console.error('Data processing failed:', error);
    throw error;
  }
}

Platform Considerations

Web vs React Native

The utility functions work consistently across platforms but may have different underlying implementations:

import { blobToArrayBufferAsync, getBundleUrl } from 'expo';

// Platform-aware blob processing
async function processBlobSafely(blob: Blob): Promise<ArrayBuffer | null> {
  try {
    return await blobToArrayBufferAsync(blob);
  } catch (error) {
    console.warn('Blob processing failed:', error);
    return null;
  }
}

// Safe bundle URL usage
function getBundleInfoSafe(): { url: string | null; isDev: boolean } {
  try {
    const url = getBundleUrl();
    const isDev = url ? url.includes('localhost') || url.includes('127.0.0.1') : false;
    return { url, isDev };
  } catch (error) {
    console.warn('Bundle URL not available:', error);
    return { url: null, isDev: false };
  }
}

Error Handling

// Robust error handling for utilities
async function robustBlobConversion(blob: Blob): Promise<Uint8Array | null> {
  try {
    const arrayBuffer = await blobToArrayBufferAsync(blob);
    return new Uint8Array(arrayBuffer);
  } catch (error) {
    if (error.message.includes('not supported')) {
      console.warn('Blob conversion not supported on this platform');
    } else {
      console.error('Unexpected blob conversion error:', error);
    }
    return null;
  }
}

function safeBundleUrl(): string | null {
  try {
    return getBundleUrl();
  } catch (error) {
    console.warn('Bundle URL access failed:', error);
    return null;
  }
}

Performance Considerations

// For large blobs, consider chunked processing
async function processLargeBlob(blob: Blob): Promise<void> {
  if (blob.size > 10 * 1024 * 1024) { // 10MB threshold
    console.warn('Processing large blob:', blob.size, 'bytes');
  }
  
  const arrayBuffer = await blobToArrayBufferAsync(blob);
  
  // Process in chunks for better memory management
  const chunkSize = 1024 * 1024; // 1MB chunks
  const bytes = new Uint8Array(arrayBuffer);
  
  for (let offset = 0; offset < bytes.length; offset += chunkSize) {
    const chunk = bytes.slice(offset, offset + chunkSize);
    // Process chunk
    console.log('Processed chunk:', offset, 'to', offset + chunk.length);
  }
}