or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

batch-operations.mdcache-configuration.mdcache-management.mddata-operations.mdindex.md
tile.json

batch-operations.mddocs/

Batch Operations

Efficient operations for handling multiple cache items simultaneously, including saving multiple items at once and loading all cached files within a namespace.

Capabilities

Batch Save Operation

Saves multiple cache items in a single operation with validation and error handling.

/**
 * Saves several items to the cache in one operation
 * @param items - Array of objects with key and value properties, nulls/undefined are filtered out
 * @returns Promise resolving to object containing paths of saved files
 * @throws Error if any item doesn't have required key/value properties
 */
save(items: ({key: string, value: any} | null | undefined)[]): Promise<{paths: string[]}>;

Usage Examples:

// Basic batch save
const result = await cache.save([
  { key: "user:1", value: { name: "Alice", age: 30 } },
  { key: "user:2", value: { name: "Bob", age: 25 } },
  { key: "user:3", value: { name: "Charlie", age: 35 } }
]);

console.log(`Saved ${result.paths.length} files`);
console.log("File paths:", result.paths);

// With null/undefined filtering (automatically handled)
await cache.save([
  { key: "valid:1", value: "data1" },
  null,                              // Filtered out
  undefined,                         // Filtered out
  { key: "valid:2", value: "data2" },
]);

// Mixed data types
await cache.save([
  { key: "config:app", value: { theme: "dark", language: "en" } },
  { key: "config:user", value: { notifications: true } },
  { key: "stats:counter", value: 42 },
  { key: "stats:timestamp", value: new Date() }
]);

Batch Load Operation

Loads all cached files within the cache's namespace.

/**
 * Loads all cached files within the cache's namespace
 * @returns Promise resolving to object containing array of file objects with path and value
 */
load(): Promise<{files: {path: string, value: any}[]}>;

Usage Examples:

// Load all files in cache
const result = await cache.load();

console.log(`Found ${result.files.length} cached files`);
result.files.forEach(file => {
  console.log(`Path: ${file.path}, Value:`, file.value);
});

// Process loaded data
const userData = result.files
  .filter(file => file.path.includes("user:"))
  .map(file => file.value);

// Load with namespace isolation
const userCache = Cache({ ns: "users" });
const productCache = Cache({ ns: "products" });

const users = await userCache.load();     // Only user-namespaced files
const products = await productCache.load(); // Only product-namespaced files

console.log(`Users: ${users.files.length}, Products: ${products.files.length}`);

Batch Operation Patterns

Data Migration

// Migrate data between cache instances
const oldCache = Cache({ ns: "v1" });
const newCache = Cache({ ns: "v2" });

// Load all old data
const oldData = await oldCache.load();

// Transform and save to new cache
const migratedItems = oldData.files.map(file => ({
  key: file.path.split('/').pop(), // Extract key from path
  value: transformDataToV2(file.value)
}));

await newCache.save(migratedItems);
console.log(`Migrated ${migratedItems.length} items`);

Bulk Data Processing

// Process external data and cache results
const apiResponses = await Promise.all([
  fetchUserData("123"),
  fetchUserData("456"), 
  fetchUserData("789")
]);

const cacheItems = apiResponses.map((data, index) => ({
  key: `user:${data.id}`,
  value: {
    ...data,
    cachedAt: new Date(),
    processedBy: "batch-processor"
  }
}));

const result = await cache.save(cacheItems);
console.log(`Cached ${result.paths.length} user records`);

Cache Synchronization

// Sync cache contents with external source
async function syncCacheWithDatabase() {
  // Load current cache state
  const cached = await cache.load();
  const cachedIds = new Set(
    cached.files
      .map(f => f.path.split(':')[1])
      .filter(Boolean)
  );

  // Get fresh data from database
  const dbRecords = await fetchAllFromDatabase();
  
  // Prepare updates for modified/new records
  const updates = dbRecords
    .filter(record => 
      !cachedIds.has(record.id) || 
      isNewer(record, getCachedVersion(cached.files, record.id))
    )
    .map(record => ({
      key: `record:${record.id}`,
      value: record
    }));

  if (updates.length > 0) {
    await cache.save(updates);
    console.log(`Updated ${updates.length} records in cache`);
  }
}

Export and Backup

// Export cache contents for backup
async function exportCache() {
  const allData = await cache.load();
  
  const exportData = {
    timestamp: new Date().toISOString(),
    cacheConfig: {
      basePath: cache.basePath,
      ns: cache.ns,
      hash: cache.hash
    },
    files: allData.files.map(file => ({
      key: extractKeyFromPath(file.path),
      value: file.value,
      path: file.path
    }))
  };
  
  await writeFile("cache-backup.json", JSON.stringify(exportData, null, 2));
  console.log(`Exported ${exportData.files.length} cache items`);
}

// Import cache contents from backup
async function importCache(backupFile: string) {
  const backupData = JSON.parse(await readFile(backupFile, "utf8"));
  
  const items = backupData.files.map(item => ({
    key: item.key,
    value: item.value
  }));
  
  const result = await cache.save(items);
  console.log(`Imported ${result.paths.length} items from backup`);
}

Filtered Operations

// Save with filtering and validation
async function saveValidatedData(rawItems: any[]) {
  const validItems = rawItems
    .filter(item => item && typeof item === 'object')
    .filter(item => item.key && item.value !== undefined)
    .map(item => ({
      key: sanitizeKey(item.key),
      value: validateAndTransform(item.value)
    }));

  if (validItems.length === 0) {
    console.log("No valid items to save");
    return { paths: [] };
  }

  return await cache.save(validItems);
}

// Load with filtering
async function loadUserData() {
  const allData = await cache.load();
  
  return allData.files
    .filter(file => file.path.includes("/user:"))
    .filter(file => file.value && !file.value.deleted)
    .map(file => ({
      id: extractIdFromPath(file.path),
      ...file.value
    }));
}

Error Handling

Save Operation Errors

// Handle validation errors in save operations
try {
  await cache.save([
    { key: "valid", value: "data" },
    { key: "", value: "invalid-empty-key" },  // Will cause error
    { value: "missing-key" }                   // Will cause error
  ]);
} catch (error) {
  console.error("Save failed:", error.message);
  // Error: "Save items not valid, must be an array of {key, value} objects."
}

// Robust save with error handling
async function safeBatchSave(items: any[]) {
  try {
    // Pre-validate items
    const validItems = items.filter(item => 
      item && 
      typeof item === 'object' && 
      typeof item.key === 'string' && 
      item.key.trim() !== '' &&
      item.value !== undefined
    );

    if (validItems.length === 0) {
      return { paths: [], errors: ["No valid items provided"] };
    }

    const result = await cache.save(validItems);
    return { ...result, errors: [] };
    
  } catch (error) {
    return { 
      paths: [], 
      errors: [error.message] 
    };
  }
}

Load Operation Errors

// Handle load operation errors
async function safeLoad() {
  try {
    return await cache.load();
  } catch (error) {
    console.error("Load failed:", error.message);
    return { files: [] };
  }
}

// Load with individual file error handling
async function robustLoad() {
  try {
    const result = await cache.load();
    
    // Filter out any corrupted files
    const validFiles = result.files.filter(file => {
      try {
        // Validate file structure
        return file.path && file.value !== undefined;
      } catch {
        console.warn(`Skipping corrupted cache file: ${file.path}`);
        return false;
      }
    });

    return { files: validFiles };
  } catch (error) {
    console.error("Cache load completely failed:", error.message);
    return { files: [] };
  }
}