0
# COPY TO Operations (Data Export)
1
2
Creates readable streams for exporting data from PostgreSQL tables to external destinations. The COPY TO operation streams data directly from PostgreSQL without loading entire datasets into memory.
3
4
## Capabilities
5
6
### To Function
7
8
Creates a readable stream for COPY TO operations.
9
10
```javascript { .api }
11
/**
12
* Creates a readable stream for COPY TO operations
13
* @param {string} text - SQL COPY TO statement (e.g., 'COPY my_table TO STDOUT')
14
* @param {object} [options] - Optional stream configuration
15
* @returns {CopyToQueryStream} CopyToQueryStream instance
16
*/
17
function to(text, options);
18
```
19
20
**Usage Examples:**
21
22
```javascript
23
const { to: copyTo } = require('pg-copy-streams');
24
const { Pool } = require('pg');
25
26
// Basic table export
27
const pool = new Pool();
28
const client = await pool.connect();
29
const stream = client.query(copyTo('COPY users TO STDOUT'));
30
31
// Export with CSV format
32
const csvStream = client.query(copyTo("COPY products TO STDOUT WITH (FORMAT CSV, HEADER)"));
33
34
// Export with custom delimiter
35
const tsvStream = client.query(copyTo("COPY logs TO STDOUT WITH (FORMAT TEXT, DELIMITER E'\\t')"));
36
```
37
38
### CopyToQueryStream Class
39
40
Readable stream implementation for COPY TO operations.
41
42
```javascript { .api }
43
/**
44
* CopyToQueryStream - Readable stream for COPY TO operations
45
* @class
46
* @extends {Readable}
47
*/
48
class CopyToQueryStream {
49
/**
50
* @param {string} text - The SQL COPY TO statement
51
* @param {object} [options] - Stream options
52
*/
53
constructor(text, options) {}
54
55
/** @type {string} The SQL COPY TO statement */
56
text;
57
58
/** @type {number} Number of rows exported (available after completion) */
59
rowCount;
60
61
/** @type {object} PostgreSQL connection reference */
62
connection;
63
}
64
```
65
66
### Stream Submission
67
68
Submits the COPY TO query to a PostgreSQL connection.
69
70
```javascript { .api }
71
/**
72
* Submits the COPY TO query to a PostgreSQL connection
73
* @param {object} connection - pg connection object
74
*/
75
submit(connection);
76
```
77
78
**Usage Example:**
79
80
```javascript
81
const { Pool } = require('pg');
82
const { to: copyTo } = require('pg-copy-streams');
83
84
const pool = new Pool();
85
const client = await pool.connect();
86
try {
87
const stream = client.query(copyTo('COPY my_table TO STDOUT'));
88
89
// Pipe to file
90
const fs = require('fs');
91
const writeStream = fs.createWriteStream('export.csv');
92
stream.pipe(writeStream);
93
94
// Wait for completion
95
await new Promise((resolve, reject) => {
96
stream.on('end', () => {
97
console.log(`Exported ${stream.rowCount} rows`);
98
resolve();
99
});
100
stream.on('error', reject);
101
});
102
} finally {
103
client.release();
104
}
105
```
106
107
### Error Handling
108
109
Handles errors during the COPY TO operation.
110
111
```javascript { .api }
112
/**
113
* Handles errors during stream processing
114
* @param {Error} err - Error object
115
*/
116
handleError(err);
117
```
118
119
### Command Completion
120
121
Processes the CommandComplete message and extracts row count.
122
123
```javascript { .api }
124
/**
125
* Processes CommandComplete message and extracts row count
126
* @param {object} msg - Message object with text property containing row count
127
*/
128
handleCommandComplete(msg);
129
```
130
131
### Ready for Query
132
133
Called when the connection is ready for the next query.
134
135
```javascript { .api }
136
/**
137
* Called when connection is ready for next query
138
*/
139
handleReadyForQuery();
140
```
141
142
## Stream Events
143
144
As a readable stream, CopyToQueryStream emits standard Node.js stream events:
145
146
```javascript { .api }
147
/**
148
* Standard readable stream events
149
*/
150
stream.on('data', (chunk) => {
151
// Received data chunk from PostgreSQL
152
console.log(`Received ${chunk.length} bytes`);
153
});
154
155
stream.on('end', () => {
156
// All data has been read, stream is finished
157
console.log(`Export completed. Rows: ${stream.rowCount}`);
158
});
159
160
stream.on('error', (err) => {
161
// Error occurred during streaming
162
console.error('Stream error:', err.message);
163
});
164
165
stream.on('close', () => {
166
// Stream has been closed
167
console.log('Stream closed');
168
});
169
```
170
171
## Advanced Usage
172
173
### Streaming with CSV Parser
174
175
```javascript
176
const { pipeline } = require('stream/promises');
177
const csvParser = require('csv-parser');
178
const { to: copyTo } = require('pg-copy-streams');
179
180
const client = await pool.connect();
181
const copyStream = client.query(copyTo("COPY products TO STDOUT WITH (FORMAT CSV, HEADER)"));
182
183
const results = [];
184
await pipeline(
185
copyStream,
186
csvParser(),
187
async function* (source) {
188
for await (const chunk of source) {
189
// Process each parsed CSV row
190
yield { ...chunk, processed: true };
191
}
192
},
193
async function (source) {
194
for await (const row of source) {
195
results.push(row);
196
}
197
}
198
);
199
200
console.log(`Processed ${results.length} rows`);
201
```
202
203
### Binary Format Export
204
205
```javascript
206
const binaryStream = client.query(copyTo("COPY data_table TO STDOUT WITH (FORMAT BINARY)"));
207
208
// Binary data requires special handling
209
binaryStream.on('data', (chunk) => {
210
// chunk contains binary-encoded PostgreSQL data
211
console.log(`Received ${chunk.length} bytes`);
212
});
213
```
214
215
## Important Notes
216
217
- PostgreSQL chunks data on 64kB boundaries, which may cut across row boundaries
218
- The `rowCount` property is only available after the stream ends
219
- Always handle both 'end' and 'error' events
220
- Use proper backpressure handling when piping to slower destinations
221
- Binary format exports require knowledge of PostgreSQL's binary encoding