Query execution with cursor-based result iteration for handling large result sets efficiently, including query analysis, optimization, and streaming results.
Execute AQL queries with comprehensive options and result handling.
/**
* Executes an AQL query and returns a cursor for result iteration
* @param query - AQL query object with query string and bind parameters
* @param options - Query execution options
* @returns Promise resolving to Cursor for iterating results
*/
query<T>(query: AqlQuery<T>, options?: QueryOptions): Promise<Cursor<T>>;
/**
* Explains query execution plan without executing the query
* @param query - AQL query to explain
* @param options - Explain options
* @returns Promise resolving to execution plan details
*/
explain(query: AqlQuery, options?: ExplainOptions): Promise<ExplainResult>;
/**
* Parses an AQL query to validate syntax and return parse tree
* @param query - AQL query string or query object to parse
* @returns Promise resolving to parse result with AST
*/
parse(query: string | AqlQuery | AqlLiteral): Promise<ParseResult>;Usage Examples:
import { Database, aql } from "arangojs";
const db = new Database();
const users = db.collection("users");
// Basic query execution
const cursor = await db.query(aql`
FOR user IN ${users}
FILTER user.active == true
SORT user.createdAt DESC
LIMIT 100
RETURN user
`);
// Query with options
const profiledCursor = await db.query(aql`
FOR u IN ${users}
FOR p IN posts
FILTER p.authorId == u._key
RETURN { user: u.name, post: p.title }
`, {
count: true,
batchSize: 50,
profile: 2,
cache: false,
memoryLimit: 1024 * 1024 * 256, // 256MB
maxRuntime: 30.0 // 30 seconds
});
console.log("Total count:", profiledCursor.count);
console.log("Profile:", profiledCursor.extra.profile);
// Explain query execution plan
const plan = await db.explain(aql`
FOR user IN ${users}
FILTER user.age >= 18
SORT user.name
RETURN user
`, {
allPlans: true,
optimizer: { rules: ["-all", "+use-indexes"] }
});
console.log("Execution plan:", plan.plan);
console.log("Estimated cost:", plan.plan.estimatedCost);
// Parse query for validation
const parseResult = await db.parse(aql`
FOR doc IN ${users}
RETURN doc.name
`);
console.log("Query is valid:", parseResult.parsed);Navigate through query results efficiently with cursor-based iteration.
/**
* Cursor for item-wise access to query results
*/
class Cursor<ItemType> {
/** Whether more items are available */
readonly hasNext: boolean;
/** Total number of items (if count was requested) */
readonly count: number;
/** Additional information about the query execution */
readonly extra: CursorExtras;
/**
* Gets the next item from the cursor
* @returns Promise resolving to next item or undefined if no more items
*/
next(): Promise<ItemType | undefined>;
/**
* Gets all remaining items as an array
* @returns Promise resolving to array of all remaining items
*/
all(): Promise<ItemType[]>;
/**
* Executes callback for each item
* @param callback - Function to execute on each item
* @returns Promise resolving to true if all items processed
*/
forEach(
callback: (currentValue: ItemType, index: number, self: Cursor<ItemType>) => false | void
): Promise<boolean>;
/**
* Maps each item using callback function
* @param callback - Function to execute on each item
* @returns Promise resolving to array of mapped results
*/
map<R>(
callback: (currentValue: ItemType, index: number, self: Cursor<ItemType>) => R
): Promise<R[]>;
/**
* FlatMaps each item using callback function
* @param callback - Function to execute on each item
* @returns Promise resolving to flattened array of results
*/
flatMap<R>(
callback: (currentValue: ItemType, index: number, self: Cursor<ItemType>) => R | R[]
): Promise<R[]>;
/**
* Reduces items to single value using reducer function
* @param reducer - Function to execute on each item
* @param initialValue - Initial accumulator value
* @returns Promise resolving to reduced value
*/
reduce<R>(
reducer: (accumulator: R, currentValue: ItemType, index: number, self: Cursor<ItemType>) => R,
initialValue: R
): Promise<R>;
/**
* Reduces items to single value using reducer function (no initial value)
* @param reducer - Function to execute on each item
* @returns Promise resolving to reduced value
*/
reduce<R>(
reducer: (accumulator: ItemType | R, currentValue: ItemType, index: number, self: Cursor<ItemType>) => R
): Promise<R>;
/**
* Implements async iterator protocol
* @returns AsyncIterator for use in for-await-of loops
*/
[Symbol.asyncIterator](): AsyncIterator<ItemType>;
}Usage Examples:
// Item-by-item iteration
const cursor = await db.query(aql`
FOR user IN ${users}
RETURN user
`);
// Using next() method
while (cursor.hasNext) {
const user = await cursor.next();
if (user) {
console.log("User:", user.name);
}
}
// Using async iterator (for-await-of)
const cursor2 = await db.query(aql`
FOR post IN posts
SORT post.createdAt DESC
RETURN post
`);
for await (const post of cursor2) {
console.log("Post:", post.title);
// Break early if needed
if (post.featured) {
break;
}
}
// Get all results at once
const cursor3 = await db.query(aql`
FOR user IN ${users}
FILTER user.department == "Engineering"
RETURN user.name
`);
const names = await cursor3.all();
console.log("Engineering team:", names);Handle large result sets with batch-wise processing for memory efficiency.
/**
* Low-level interface for consuming BatchCursor items
*/
interface BatchCursorItemsView<ItemType = any> {
readonly isEmpty: boolean;
more(): Promise<void>;
shift(): ItemType | undefined;
}
/**
* Cursor for batch-wise access to query results
*/
class BatchCursor<ItemType> {
/** Whether more batches are available */
readonly hasNext: boolean;
/** Total number of items (if count was requested) */
readonly count: number;
/** Additional information about the query execution */
readonly extra: CursorExtras;
/** Cursor providing item-wise access to the cursor result set */
readonly items: Cursor<ItemType>;
/** Low-level interface for consuming items */
readonly itemsView: BatchCursorItemsView<ItemType>;
/**
* Loads all remaining batches from the server
* @returns Promise that resolves when all batches are loaded
*/
loadAll(): Promise<void>;
/**
* Gets the next batch of items
* @returns Promise resolving to next batch array or undefined
*/
next(): Promise<ItemType[] | undefined>;
/**
* Gets all batches as a nested array
* @returns Promise resolving to array of batch arrays
*/
all(): Promise<ItemType[][]>;
/**
* Executes callback for each batch
* @param callback - Function to execute on each batch
* @returns Promise resolving to true if all batches processed
*/
forEach(
callback: (currentBatch: ItemType[], index: number, self: BatchCursor<ItemType>) => false | void
): Promise<boolean>;
/**
* Maps each batch using callback function
* @param callback - Function to execute on each batch
* @returns Promise resolving to array of mapped results
*/
map<R>(
callback: (currentBatch: ItemType[], index: number, self: BatchCursor<ItemType>) => R
): Promise<R[]>;
/**
* FlatMaps each batch using callback function
* @param callback - Function to execute on each batch
* @returns Promise resolving to flattened array of results
*/
flatMap<R>(
callback: (currentBatch: ItemType[], index: number, self: BatchCursor<ItemType>) => R | R[]
): Promise<R[]>;
/**
* Reduces batches to single value using reducer function
* @param reducer - Function to execute on each batch
* @param initialValue - Initial accumulator value
* @returns Promise resolving to reduced value
*/
reduce<R>(
reducer: (accumulator: R, currentBatch: ItemType[], index: number, self: BatchCursor<ItemType>) => R,
initialValue: R
): Promise<R>;
/**
* Reduces batches to single value using reducer function (no initial value)
* @param reducer - Function to execute on each batch
* @returns Promise resolving to reduced value
*/
reduce<R>(
reducer: (accumulator: ItemType[] | R, currentBatch: ItemType[], index: number, self: BatchCursor<ItemType>) => R
): Promise<R>;
/**
* Implements async iterator protocol for batches
* @returns AsyncIterator for batch arrays
*/
[Symbol.asyncIterator](): AsyncIterator<ItemType[]>;
}Usage Examples:
// Process results in batches for memory efficiency
const batchCursor = await db.query(aql`
FOR doc IN large_collection
RETURN doc
`, { batchSize: 1000 });
// Process batch by batch
for await (const batch of batchCursor) {
console.log(`Processing batch of ${batch.length} items`);
// Process batch items
for (const item of batch) {
// Process individual item
await processItem(item);
}
// Optional: Add delay between batches
await new Promise(resolve => setTimeout(resolve, 100));
}
// Alternative: Load all batches then process
await batchCursor.loadAll();
const allBatches = await batchCursor.all();
console.log(`Loaded ${allBatches.length} batches`);Analyze and optimize query performance with detailed execution statistics.
/**
* Gets query execution statistics and performance metrics
* @param options - Options for retrieving current queries
* @returns Promise resolving to array of running queries
*/
listRunningQueries(options?: ListQueriesOptions): Promise<RunningQuery[]>;
/**
* Kills a running query by its ID
* @param queryId - ID of the query to kill
* @returns Promise resolving when query is killed
*/
killQuery(queryId: string): Promise<void>;
/**
* Gets query tracking properties
* @returns Promise resolving to query tracking configuration
*/
queryTracking(): Promise<QueryTrackingOptions>;
/**
* Sets query tracking properties
* @param options - Query tracking configuration to set
* @returns Promise resolving to updated configuration
*/
setQueryTracking(options: QueryTrackingOptions): Promise<QueryTrackingOptions>;Usage Examples:
// Enable query profiling
await db.setQueryTracking({
enabled: true,
trackSlowQueries: true,
slowQueryThreshold: 10,
maxSlowQueries: 100,
maxQueryStringLength: 8192
});
// Execute query with detailed profiling
const cursor = await db.query(aql`
FOR user IN ${users}
FOR post IN posts
FILTER post.authorId == user._key
COLLECT author = user.name INTO posts = post
RETURN {
author,
postCount: LENGTH(posts),
posts: posts[*].title
}
`, {
profile: 2, // Detailed profiling
count: true,
fullCount: true
});
// Analyze execution profile
const profile = cursor.extra.profile;
console.log("Query execution time:", profile.executing);
console.log("Index usage:", profile.indexesUsed);
// List currently running queries
const runningQueries = await db.listRunningQueries();
for (const query of runningQueries) {
console.log(`Query ${query.id}: ${query.query.substring(0, 100)}...`);
console.log(`Runtime: ${query.runTime}s`);
// Kill long-running queries if needed
if (query.runTime > 60) {
await db.killQuery(query.id);
console.log(`Killed long-running query ${query.id}`);
}
}Advanced query execution features including streaming and job handling.
/**
* Executes a query as an async job for long-running operations
* @param query - AQL query to execute
* @param options - Query options with async job settings
* @returns Promise resolving to Job instance
*/
queryAsJob<T>(query: AqlQuery<T>, options?: QueryOptions & JobOptions): Promise<Job<Cursor<T>>>;
/**
* Creates a streaming query cursor for real-time processing
* @param query - AQL query for streaming
* @param options - Streaming options
* @returns Promise resolving to streaming cursor
*/
queryStream<T>(query: AqlQuery<T>, options?: StreamOptions): Promise<StreamCursor<T>>;Usage Examples:
// Execute long-running query as async job
const job = await db.queryAsJob(aql`
FOR doc1 IN large_collection1
FOR doc2 IN large_collection2
FILTER doc1.correlationId == doc2.correlationId
RETURN MERGE(doc1, doc2)
`, {
batchSize: 1000,
timeout: 3600 // 1 hour timeout
});
// Check job status periodically
const checkJob = async () => {
const completed = await job.getCompleted();
if (completed) {
const cursor = await job.load();
if (cursor) {
const results = await cursor.all();
console.log(`Job completed with ${results.length} results`);
}
} else {
console.log("Job still running...");
setTimeout(checkJob, 5000); // Check again in 5 seconds
}
};
await checkJob();
// Streaming query for real-time processing
const streamCursor = await db.queryStream(aql`
FOR event IN event_stream
FILTER event.timestamp > DATE_NOW() - 3600000
RETURN event
`, {
batchSize: 100
});
// Process streaming results
for await (const event of streamCursor) {
console.log("New event:", event);
await handleRealtimeEvent(event);
}class Cursor<ItemType> {
readonly hasNext: boolean;
readonly count: number;
readonly extra: CursorExtras;
next(): Promise<ItemType | undefined>;
all(): Promise<ItemType[]>;
[Symbol.asyncIterator](): AsyncIterator<ItemType>;
}
class BatchCursor<ItemType> {
readonly hasNext: boolean;
readonly count: number;
readonly extra: CursorExtras;
loadAll(): Promise<void>;
next(): Promise<ItemType[] | undefined>;
all(): Promise<ItemType[][]>;
[Symbol.asyncIterator](): AsyncIterator<ItemType[]>;
}
interface CursorExtras {
warnings: Array<{
code: number;
message: string;
}>;
plan?: Record<string, any>;
profile?: Record<string, any>;
stats?: CursorStats;
}
interface CursorStats {
writesExecuted: number;
writesIgnored: number;
scannedFull: number;
scannedIndex: number;
filtered: number;
httpRequests: number;
executionTime: number;
peakMemoryUsage: number;
}
interface QueryOptions {
count?: boolean;
batchSize?: number;
cache?: boolean;
memoryLimit?: number;
ttl?: number;
timeout?: number;
maxRuntime?: number;
profile?: boolean | number;
satelliteSyncWait?: number;
fullCount?: boolean;
fillBlockCache?: boolean;
stream?: boolean;
allowDirtyRead?: boolean;
skipInaccessibleCollections?: boolean;
maxTransactionSize?: number;
maxWarningCount?: number;
failOnWarning?: boolean;
intermediateCommitCount?: number;
intermediateCommitSize?: number;
optimizer?: {
rules?: string[];
};
}
interface ExplainOptions {
allPlans?: boolean;
maxNumberOfPlans?: number;
optimizer?: {
rules?: string[];
};
}
interface ExplainResult {
plan: ExecutionPlan;
plans?: ExecutionPlan[];
warnings: Array<{
code: number;
message: string;
}>;
stats: {
rulesExecuted: number;
rulesSkipped: number;
plansCreated: number;
peakMemoryUsage: number;
executionTime: number;
};
}
interface ExecutionPlan {
nodes: ExecutionNode[];
rules: string[];
collections: Array<{
name: string;
type: "read" | "write";
}>;
variables: ExecutionVariable[];
estimatedCost: number;
estimatedNrItems: number;
isModificationQuery: boolean;
}
interface ParseResult {
parsed: boolean;
collections: string[];
bindVars: string[];
ast: Record<string, any>;
}
interface RunningQuery {
id: string;
database: string;
user: string;
query: string;
bindVars: Record<string, any>;
started: string;
runTime: number;
peakMemoryUsage: number;
state: "executing" | "finished" | "killed";
stream: boolean;
}
interface QueryTrackingOptions {
enabled: boolean;
trackSlowQueries: boolean;
trackBindVars: boolean;
slowQueryThreshold: number;
maxSlowQueries: number;
maxQueryStringLength: number;
}