or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

configuration.mdindex.mdstream-processing.mdsynchronous-parser.md
tile.json

stream-processing.mddocs/

Stream Processing

Async generator and Transform stream interfaces for efficiently processing multiple commit messages in sequence with error handling and warning capabilities.

Capabilities

Async Generator Function

Creates an async generator function for parsing async iterables of raw commits with configurable error handling.

/**
 * Create async generator function to parse async iterable of raw commits
 * @param options - Parser options with stream-specific configuration
 * @returns Async generator function for parsing commit streams
 */
function parseCommits(
  options?: ParserStreamOptions
): (rawCommits: Iterable<string | Buffer> | AsyncIterable<string | Buffer>) => AsyncGenerator<Commit>;

interface ParserStreamOptions extends ParserOptions {
  /**
   * Warning function for handling parse errors
   * - true: Strict mode, throws errors for unparseable commits
   * - function: Custom warning handler receiving error message
   * - false/undefined: Silent mode, skips unparseable commits
   */
  warn?: boolean | ((message: string) => void);
}

Usage Examples:

import { parseCommits } from "conventional-commits-parser";

// Basic async generator usage
const commitMessages = [
  "feat: add new feature",
  "fix: resolve bug in parser",
  "docs: update README"
];

const parseStream = parseCommits();
const commits = [];

for await (const commit of parseStream(commitMessages)) {
  commits.push(commit);
}

console.log(commits.length); // 3
console.log(commits[0].type); // "feat"

// With custom warning handler
const parseWithWarnings = parseCommits({
  warn: (message) => console.warn("Parse warning:", message)
});

const mixedMessages = [
  "feat: valid commit",
  "", // Empty commit - will trigger warning
  "fix: another valid commit"
];

const validCommits = [];
for await (const commit of parseWithWarnings(mixedMessages)) {
  validCommits.push(commit);
}
// Console output: "Parse warning: TypeError: Expected a raw commit"
console.log(validCommits.length); // 2 (empty commit was skipped)

// Strict mode - throws on invalid commits
const strictParse = parseCommits({ warn: true });

try {
  for await (const commit of strictParse(["", "feat: valid"])) {
    console.log(commit);
  }
} catch (error) {
  console.log("Parsing failed:", error.message); // "Expected a raw commit"
}

Transform Stream

Creates Node.js Transform stream for processing commits in pipelines and stream-based architectures.

/**
 * Create stream to parse commits using Node.js Transform stream
 * @param options - Parser options with stream-specific configuration  
 * @returns Transform stream that processes raw commits to parsed commits
 */
function parseCommitsStream(options?: ParserStreamOptions): Transform;

Usage Examples:

import { parseCommitsStream } from "conventional-commits-parser";
import { pipeline } from "stream/promises";
import { createReadStream, createWriteStream } from "fs";

// Basic Transform stream usage
const parseStream = parseCommitsStream();

// Process commits from readable stream
const commits = [];
parseStream.on('data', (commit) => {
  commits.push(commit);
});

parseStream.write("feat: add feature\n\n");
parseStream.write("fix: resolve issue\n\n");
parseStream.end();

// Pipeline usage with file processing
async function processCommitFile() {
  await pipeline(
    createReadStream('commits.txt'),
    parseCommitsStream({
      warn: console.warn  // Log parsing warnings
    }),
    async function* (source) {
      for await (const commit of source) {
        // Transform parsed commits to JSON
        yield JSON.stringify(commit) + '\n';
      }
    },
    createWriteStream('parsed-commits.json')
  );
}

// With custom options for different commit formats
const customStream = parseCommitsStream({
  headerPattern: /^(\w+)(?:\(([^)]*)\))?: (.+)$/,
  headerCorrespondence: ['type', 'scope', 'subject'],
  noteKeywords: ['BREAKING CHANGE', 'DEPRECATED'],
  warn: (message) => console.error('Parse error:', message)
});

Error Handling Modes

The stream processing functions provide three error handling modes:

Silent Mode (Default)

const parseStream = parseCommits(); // No warn option
// Skips invalid commits silently, continues processing

Warning Mode

const parseStream = parseCommits({
  warn: (message) => console.warn("Skipping invalid commit:", message)
});
// Logs warnings for invalid commits, continues processing

Strict Mode

const parseStream = parseCommits({ warn: true });
// Throws errors for invalid commits, stops processing

Stream Integration Patterns

Processing Git Log Output

import { spawn } from 'child_process';
import { parseCommitsStream } from 'conventional-commits-parser';

const gitLog = spawn('git', ['log', '--pretty=format:%B%n---END-COMMIT---']);
const commits = [];

await pipeline(
  gitLog.stdout,
  // Split by commit separator
  new Transform({
    transform(chunk, encoding, callback) {
      const text = chunk.toString();
      const commitMessages = text.split('---END-COMMIT---');
      for (const msg of commitMessages) {
        if (msg.trim()) this.push(msg.trim());
      }
      callback();
    }
  }),
  parseCommitsStream(),
  new Transform({
    objectMode: true,
    transform(commit, encoding, callback) {
      commits.push(commit);
      callback();
    }
  })
);

Batch Processing with Async Iteration

async function processCommitBatch(commitMessages: string[]) {
  const parseStream = parseCommits({
    noteKeywords: ['BREAKING CHANGE', 'FEATURE'],
    referenceActions: ['closes', 'fixes', 'implements']
  });

  const results = {
    features: [],
    fixes: [],
    breakingChanges: []
  };

  for await (const commit of parseStream(commitMessages)) {
    if (commit.type === 'feat') {
      results.features.push(commit);
    } else if (commit.type === 'fix') {
      results.fixes.push(commit);
    }
    
    if (commit.notes.length > 0) {
      results.breakingChanges.push(commit);
    }
  }

  return results;
}