A slightly nicer interface to Postgres over node-libpq providing both sync and async operations
88
Quality
Pending
Does it follow best practices?
Impact
88%
1.20xAverage score across 10 eval scenarios
PostgreSQL COPY operation support through Node.js streams for high-performance bulk data import and export operations.
Get a duplex stream for PostgreSQL COPY operations.
/**
* Get a duplex stream for COPY operations
* @returns CopyStream instance for reading/writing bulk data
*/
client.getCopyStream(): CopyStream;
interface CopyStream extends Duplex {
/** Write data to the COPY stream */
write(chunk: Buffer, encoding?: string, callback?: Function): boolean;
/** End the COPY operation */
end(chunk?: Buffer, encoding?: string, callback?: Function): void;
/** Event emitted when COPY operation finishes */
on(event: 'finish', listener: () => void): this;
/** Event emitted when COPY data is available for reading */
on(event: 'data', listener: (chunk: Buffer) => void): this;
/** Event emitted when stream ends */
on(event: 'end', listener: () => void): this;
/** Event emitted on errors */
on(event: 'error', listener: (error: Error) => void): this;
}Import bulk data into PostgreSQL tables using COPY FROM.
Usage Examples:
const Client = require('pg-native');
const client = new Client();
client.connectSync();
// Create a test table
client.querySync('CREATE TEMP TABLE users(name text, age int, city text)');
// Start COPY FROM operation
client.querySync('COPY users FROM stdin');
// Get the COPY stream
const stream = client.getCopyStream();
// Write data to the stream
stream.write(Buffer.from('Alice\t25\tNew York\n', 'utf8'));
stream.write(Buffer.from('Bob\t30\tLos Angeles\n', 'utf8'));
stream.write(Buffer.from('Charlie\t28\tChicago\n', 'utf8'));
// End the COPY operation
stream.end();
// Handle completion
stream.once('finish', function() {
console.log('COPY operation completed');
// Verify the data was imported
const rows = client.querySync('SELECT COUNT(*) FROM users');
console.log('Imported rows:', rows[0].count);
client.end();
});
// Handle errors
stream.on('error', function(err) {
console.error('COPY error:', err.message);
});Export data from PostgreSQL tables using COPY TO.
Usage Examples:
const Client = require('pg-native');
const fs = require('fs');
const client = new Client();
client.connectSync();
// Start COPY TO operation
client.querySync('COPY users TO stdout');
// Get the COPY stream for reading
const stream = client.getCopyStream();
// Create output file
const outputFile = fs.createWriteStream('users_export.txt');
// Handle incoming data
stream.on('data', function(chunk) {
console.log('Received data chunk:', chunk.toString());
outputFile.write(chunk);
});
// Handle stream end
stream.on('end', function() {
console.log('COPY TO operation completed');
outputFile.end();
client.end();
});
// Handle errors
stream.on('error', function(err) {
console.error('COPY error:', err.message);
outputFile.end();
});Working with CSV format data in COPY operations.
CSV Import Example:
const Client = require('pg-native');
const client = new Client();
client.connectSync();
// Create table with headers matching CSV
client.querySync(`
CREATE TEMP TABLE products(
id serial,
name text,
price decimal,
category text
)
`);
// Start COPY with CSV format
client.querySync(`
COPY products(name, price, category)
FROM stdin
WITH (FORMAT csv, HEADER true)
`);
const stream = client.getCopyStream();
// Write CSV data including header
const csvData = `name,price,category
"Laptop",999.99,"Electronics"
"Book",29.99,"Education"
"Coffee Mug",12.50,"Kitchen"
`;
stream.write(Buffer.from(csvData, 'utf8'));
stream.end();
stream.once('finish', function() {
const rows = client.querySync('SELECT * FROM products ORDER BY id');
console.log('Imported products:', rows);
client.end();
});Working with binary format COPY operations for better performance.
Binary Import Example:
const Client = require('pg-native');
const client = new Client();
client.connectSync();
// Create table for binary data
client.querySync(`
CREATE TEMP TABLE binary_data(
id int,
data bytea,
timestamp timestamp
)
`);
// Start binary COPY
client.querySync('COPY binary_data FROM stdin WITH (FORMAT binary)');
const stream = client.getCopyStream();
// Note: Binary format requires specific PostgreSQL binary protocol
// This is a simplified example - actual binary format is complex
stream.write(Buffer.from('PGCOPY\n\xff\r\n\0', 'binary')); // Binary header
// ... write binary data rows according to PostgreSQL binary format
stream.end();
stream.once('finish', function() {
console.log('Binary COPY completed');
client.end();
});Efficiently process large datasets using COPY operations.
Streaming Large Files:
const Client = require('pg-native');
const fs = require('fs');
const readline = require('readline');
const client = new Client();
client.connectSync();
// Prepare table
client.querySync('CREATE TEMP TABLE large_dataset(col1 text, col2 int, col3 text)');
client.querySync('COPY large_dataset FROM stdin');
const copyStream = client.getCopyStream();
// Stream large file line by line
const fileStream = fs.createReadStream('large_data_file.txt');
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
let lineCount = 0;
rl.on('line', (line) => {
// Process and write each line
const processedLine = line + '\n';
copyStream.write(Buffer.from(processedLine, 'utf8'));
lineCount++;
if (lineCount % 10000 === 0) {
console.log(`Processed ${lineCount} lines`);
}
});
rl.on('close', () => {
copyStream.end();
});
copyStream.once('finish', function() {
console.log(`COPY completed. Total lines: ${lineCount}`);
const result = client.querySync('SELECT COUNT(*) FROM large_dataset');
console.log('Rows in database:', result[0].count);
client.end();
});Proper error handling for COPY operations.
const Client = require('pg-native');
const client = new Client();
client.connectSync();
try {
// Create table with constraints
client.querySync(`
CREATE TEMP TABLE constrained_table(
id int PRIMARY KEY,
email text UNIQUE,
age int CHECK (age > 0)
)
`);
client.querySync('COPY constrained_table FROM stdin');
const stream = client.getCopyStream();
// Write data that might violate constraints
stream.write(Buffer.from('1\talice@example.com\t25\n', 'utf8'));
stream.write(Buffer.from('2\tbob@example.com\t30\n', 'utf8'));
stream.write(Buffer.from('1\tcharlie@example.com\t-5\n', 'utf8')); // Duplicate ID and negative age
stream.end();
stream.on('error', function(err) {
console.error('COPY operation failed:', err.message);
// Handle constraint violations or other COPY errors
});
stream.once('finish', function() {
console.log('COPY operation completed successfully');
});
} catch (err) {
console.error('Setup error:', err.message);
}PostgreSQL COPY supports various format options:
Install with Tessl CLI
npx tessl i tessl/npm-pg-nativeevals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10