TensorFlow backend for TensorFlow.js via Node.js - provides native TensorFlow execution in backend JavaScript applications under the Node.js runtime, accelerated by the TensorFlow C binary under the hood
—
TensorFlow.js Node extends the standard TensorFlow.js IO capabilities with Node.js-specific handlers for file system operations and HTTP requests. This enables saving and loading models using the native file system and fetching models from remote servers with enhanced Node.js features.
Create an IO handler for saving and loading models using the Node.js file system.
/**
* Create a file system IO handler for model persistence
* @param path - File path or array of paths for saving/loading models
* @returns IOHandler for file system operations
*/
function fileSystem(path: string | string[]): IOHandler;Usage Examples:
import * as tf from '@tensorflow/tfjs-node';
// Create a simple model
const model = tf.sequential({
layers: [
tf.layers.dense({ inputShape: [4], units: 10, activation: 'relu' }),
tf.layers.dense({ units: 3, activation: 'softmax' })
]
});
model.compile({ optimizer: 'adam', loss: 'sparseCategoricalCrossentropy' });
// Save model to file system
await model.save(tf.io.fileSystem('./my-model'));
// This creates:
// ./my-model/model.json (model topology and training config)
// ./my-model/weights.bin (model weights)
// Load model from file system
const loadedModel = await tf.loadLayersModel(tf.io.fileSystem('./my-model/model.json'));
// Alternative: Use file:// URL scheme (automatically routed to fileSystem)
await model.save('file://./my-model-alt');
const altLoadedModel = await tf.loadLayersModel('file://./my-model-alt/model.json');// Save with separate paths for model and weights
const customHandler = tf.io.fileSystem([
'./models/model.json',
'./weights/weights.bin'
]);
await model.save(customHandler);
// Load from custom paths
const loadedCustomModel = await tf.loadLayersModel(tf.io.fileSystem('./models/model.json'));Automatic routing for file:// URLs to the file system handler.
/**
* Router function for file:// URLs (automatically registered)
* @param url - File URL or path
* @returns FileSystem IOHandler or null if not a file URL
*/
function nodeFileSystemRouter(url: string | string[]): IOHandler | null;The file system router is automatically registered and handles file:// URLs:
// These are equivalent:
await model.save('file://./my-model');
await model.save(tf.io.fileSystem('./my-model'));
// Router handles various file URL formats
await model.save('file:///absolute/path/to/model');
await model.save('file://./relative/path/to/model');Create an IO handler for loading models from HTTP/HTTPS URLs with Node.js-specific features.
/**
* Create an HTTP handler for loading models from URLs
* @param path - URL to the model.json file
* @param requestInit - Fetch options (headers, method, etc.)
* @param weightPathPrefix - Custom prefix for weight file URLs
* @returns IOHandler for HTTP operations
*/
function nodeHTTPRequest(
path: string,
requestInit?: RequestInit,
weightPathPrefix?: string
): IOHandler;Usage Examples:
// Load model from HTTP URL
const httpModel = await tf.loadLayersModel(
tf.io.nodeHTTPRequest('https://example.com/models/my-model.json')
);
// Load with custom headers
const authenticatedModel = await tf.loadLayersModel(
tf.io.nodeHTTPRequest(
'https://api.example.com/models/my-model.json',
{
headers: {
'Authorization': 'Bearer your-token-here',
'User-Agent': 'TensorFlow.js-Node/4.22.0'
}
}
)
);
// Load with custom weight path prefix
const cdnModel = await tf.loadLayersModel(
tf.io.nodeHTTPRequest(
'https://api.example.com/models/my-model.json',
undefined,
'https://cdn.example.com/weights/' // Weights served from different domain
)
);Automatic routing for HTTP/HTTPS URLs.
/**
* Router function for HTTP/HTTPS URLs (automatically registered)
* @param url - HTTP or HTTPS URL
* @returns HTTP IOHandler or null if not an HTTP URL
*/
function nodeHTTPRequestRouter(url: string): IOHandler | null;The HTTP router is automatically registered and handles HTTP/HTTPS URLs:
// These are equivalent:
const model1 = await tf.loadLayersModel('https://example.com/model.json');
const model2 = await tf.loadLayersModel(
tf.io.nodeHTTPRequest('https://example.com/model.json')
);The io namespace is extended with Node.js-specific handlers:
const io: {
// All standard tf.io functionality plus:
fileSystem: (path: string | string[]) => IOHandler;
nodeHTTPRequest: (path: string, requestInit?: RequestInit, weightPathPrefix?: string) => IOHandler;
// Standard tf.io handlers also available:
browserFiles: (files: File | File[]) => IOHandler;
browserHTTPRequest: (path: string, requestInit?: RequestInit) => IOHandler;
indexedDB: (modelURL: string) => IOHandler;
localStorage: (modelURL: string) => IOHandler;
// ... other standard handlers
};async function saveModelVersions(model: tf.LayersModel, version: string) {
const baseDir = './models';
const versionDir = `${baseDir}/v${version}`;
// Save current version
await model.save(tf.io.fileSystem(versionDir));
// Also save as 'latest'
await model.save(tf.io.fileSystem(`${baseDir}/latest`));
console.log(`Model saved as version ${version} and latest`);
}
async function loadLatestModel(): Promise<tf.LayersModel> {
try {
return await tf.loadLayersModel(tf.io.fileSystem('./models/latest/model.json'));
} catch (error) {
console.error('Could not load latest model:', error);
throw error;
}
}async function loadModelWithFallback(urls: string[]): Promise<tf.LayersModel> {
for (const url of urls) {
try {
console.log(`Attempting to load model from ${url}`);
const model = await tf.loadLayersModel(url);
console.log(`Successfully loaded model from ${url}`);
return model;
} catch (error) {
console.warn(`Failed to load from ${url}:`, error.message);
continue;
}
}
throw new Error('All model loading attempts failed');
}
// Usage
const model = await loadModelWithFallback([
'https://cdn.example.com/models/latest.json',
'https://backup.example.com/models/latest.json',
'file://./local-models/fallback.json'
]);async function loadProtectedModel(modelUrl: string, apiKey: string): Promise<tf.LayersModel> {
const handler = tf.io.nodeHTTPRequest(modelUrl, {
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json',
'User-Agent': 'MyApp/1.0.0'
},
timeout: 30000 // 30 second timeout
});
return await tf.loadLayersModel(handler);
}async function loadModelWithCDN(modelJsonUrl: string, cdnBaseUrl: string): Promise<tf.LayersModel> {
// Load model where weights are served from a different CDN
const handler = tf.io.nodeHTTPRequest(
modelJsonUrl,
{
headers: {
'Cache-Control': 'no-cache' // Always get latest model definition
}
},
cdnBaseUrl // All weight files will be loaded from this base URL
);
return await tf.loadLayersModel(handler);
}
// Usage
const model = await loadModelWithCDN(
'https://api.example.com/models/image-classifier.json',
'https://weights-cdn.example.com/v2/'
);async function exportModelWithMetadata(
model: tf.LayersModel,
exportPath: string,
metadata: any
) {
// Save the model
await model.save(tf.io.fileSystem(exportPath));
// Save additional metadata
const fs = require('fs');
const path = require('path');
const metadataPath = path.join(exportPath, 'metadata.json');
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
// Create a manifest file
const manifest = {
modelPath: 'model.json',
weightsPath: 'weights.bin',
metadataPath: 'metadata.json',
exportedAt: new Date().toISOString(),
tfVersion: tf.version.tfjs
};
const manifestPath = path.join(exportPath, 'manifest.json');
fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));
console.log(`Model exported to ${exportPath} with metadata and manifest`);
}interface IOHandler {
load?(): Promise<ModelArtifacts>;
save?(modelArtifacts: ModelArtifacts): Promise<SaveResult>;
}
interface ModelArtifacts {
modelTopology: {} | ArrayBuffer;
weightSpecs?: WeightsManifestEntry[];
weightData?: ArrayBuffer;
format?: string;
generatedBy?: string;
convertedBy?: string;
signature?: {};
userDefinedMetadata?: {[key: string]: unknown};
modelInitializer?: {};
trainingConfig?: TrainingConfig;
}
interface SaveResult {
modelArtifactsInfo: ModelArtifactsInfo;
responses?: Response[];
}
interface ModelArtifactsInfo {
dateSaved: Date;
modelTopologyType: 'JSON' | 'Protobuf';
modelTopologyBytes?: number;
weightSpecsBytes?: number;
weightDataBytes?: number;
}
// Node.js-specific RequestInit interface
interface RequestInit {
method?: string;
headers?: Record<string, string>;
body?: string | Buffer | ArrayBuffer;
timeout?: number;
agent?: any; // HTTP/HTTPS agent
}Install with Tessl CLI
npx tessl i tessl/npm-tensorflow--tfjs-node