or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

error-handling.mdindex.mdtoken-types.mdtokenization.mdtype-predicates.mdutilities.md
tile.json

utilities.mddocs/

Token Utilities

Utility functions for token manipulation, cloning, stringification, and mutation operations.

Capabilities

Token Stringification

Convert tokens back to their CSS string representation.

/**
 * Concatenates token string representations
 * @param tokens - Array of tokens to stringify
 * @returns Concatenated CSS string representation
 */
function stringify(...tokens: Array<CSSToken>): string;

Usage Examples:

import { tokenize, stringify } from "@csstools/css-tokenizer";

const tokens = tokenize({ css: ".foo { color: red; }" });

// Convert tokens back to CSS string
const cssString = stringify(...tokens);
console.log(cssString); // ".foo { color: red; }"

// Stringify specific tokens
const firstThreeTokens = tokens.slice(0, 3);
const partialCSS = stringify(...firstThreeTokens);
console.log(partialCSS); // ".foo {"

// Process and reconstruct CSS
const processedTokens = tokens.map(token => {
  // Skip whitespace tokens for minification
  if (token.type === 'whitespace-token') {
    return null;
  }
  return token;
}).filter(Boolean) as CSSToken[];

const minifiedCSS = stringify(...processedTokens);
console.log(minifiedCSS); // ".foo{color:red;}"

Token Cloning

Create deep copies of token arrays for safe mutation operations.

/**
 * Deep clones token arrays
 * @param tokens - Array of tokens to clone
 * @returns Deep copy of the token array
 */
function cloneTokens(tokens: Array<CSSToken>): Array<CSSToken>;

Usage Examples:

import { tokenize, cloneTokens, isTokenIdent } from "@csstools/css-tokenizer";

const originalTokens = tokenize({ css: ".foo { color: red; }" });

// Create a safe copy for manipulation
const clonedTokens = cloneTokens(originalTokens);

// Safely modify the cloned tokens without affecting originals
clonedTokens.forEach(token => {
  if (isTokenIdent(token) && token[4].value === 'foo') {
    // Safe to mutate the clone
    token[4].value = 'bar';
    token[1] = 'bar';
  }
});

// Original tokens remain unchanged
console.log(stringify(...originalTokens)); // ".foo { color: red; }"
console.log(stringify(...clonedTokens));   // ".bar { color: red; }"

Token Mutation Functions

Safely mutate specific token types with proper escaping and validation.

/**
 * Mutates identifier token value with proper escaping
 * @param ident - Identifier token to mutate
 * @param newValue - New value for the identifier
 */
function mutateIdent(ident: TokenIdent, newValue: string): void;

/**
 * Mutates dimension token unit with proper escaping
 * @param dimension - Dimension token to mutate
 * @param newUnit - New unit for the dimension
 */
function mutateUnit(dimension: TokenDimension, newUnit: string): void;

Usage Examples:

import { 
  tokenize, 
  mutateIdent, 
  mutateUnit, 
  isTokenIdent, 
  isTokenDimension,
  stringify 
} from "@csstools/css-tokenizer";

const tokens = tokenize({ css: ".old-class { width: 10px; }" });

tokens.forEach(token => {
  if (isTokenIdent(token) && token[4].value === 'old-class') {
    // Safely mutate identifier with proper escaping
    mutateIdent(token, 'new-class');
  } else if (isTokenDimension(token) && token[4].unit === 'px') {
    // Safely mutate dimension unit
    mutateUnit(token, 'rem');
  }
});

console.log(stringify(...tokens)); // ".new-class { width: 10rem; }"

// Handle special characters in identifiers
const specialTokens = tokenize({ css: ".foo { color: red; }" });
const identToken = specialTokens.find(isTokenIdent);
if (identToken) {
  // mutateIdent handles escaping automatically
  mutateIdent(identToken, 'my-class-with-special!@#-chars');
  console.log(stringify(...specialTokens)); // Properly escaped output
}

Mutation Safety

The mutation functions provide several safety guarantees:

  1. Proper Escaping: Automatically handle CSS escaping rules for special characters
  2. Representation Updates: Update both the value and representation properties
  3. Type Safety: Only allow mutations on appropriate token types
  4. Specification Compliance: Ensure mutated tokens remain valid per CSS specification

Performance Considerations

  • stringify: Efficient concatenation of token representations
  • cloneTokens: Creates deep copies, use sparingly for large token arrays
  • mutations: In-place modifications for optimal performance

Advanced Usage Patterns

Token Stream Processing

import { tokenizer, stringify, cloneTokens } from "@csstools/css-tokenizer";

function processCSS(css: string): string {
  const t = tokenizer({ css });
  const processedTokens: CSSToken[] = [];
  
  while (!t.endOfFile()) {
    const token = t.nextToken();
    
    // Process tokens as needed
    if (token[0] === 'ident-token' && token[4].value.startsWith('old-')) {
      const cloned = cloneTokens([token])[0] as TokenIdent;
      mutateIdent(cloned, token[4].value.replace('old-', 'new-'));
      processedTokens.push(cloned);
    } else {
      processedTokens.push(token);
    }
  }
  
  return stringify(...processedTokens);
}

Safe Token Modification Pipeline

import { tokenize, cloneTokens, stringify, isTokenIdent, mutateIdent } from "@csstools/css-tokenizer";

function createTokenProcessor() {
  return {
    process(css: string, transformations: Record<string, string>): string {
      const originalTokens = tokenize({ css });
      const workingTokens = cloneTokens(originalTokens);
      
      workingTokens.forEach(token => {
        if (isTokenIdent(token) && transformations[token[4].value]) {
          mutateIdent(token, transformations[token[4].value]);
        }
      });
      
      return stringify(...workingTokens);
    }
  };
}

const processor = createTokenProcessor();
const result = processor.process('.foo .bar { color: red; }', {
  'foo': 'header',
  'bar': 'navigation'
});
console.log(result); // ".header .navigation { color: red; }"