Comprehensive Java library providing essential utilities, immutable collections, caching, and concurrency tools for modern Java development.
—
Comprehensive I/O utilities for streams, files, resources, and data processing with proper exception handling and resource management. These utilities simplify common I/O operations while providing robust error handling.
High-level file operations that handle common tasks like reading, writing, copying, and moving files.
import com.google.common.io.Files;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.List;
// Reading files
File file = new File("data.txt");
byte[] bytes = Files.readBytes(file); // Read entire file as bytes
List<String> lines = Files.readLines(file, StandardCharsets.UTF_8); // Read as lines
String content = Files.asCharSource(file, StandardCharsets.UTF_8).read(); // Read as string
// Writing files
String data = "Hello, World!";
Files.write(data.getBytes(StandardCharsets.UTF_8), file); // Write bytes
Files.asCharSink(file, StandardCharsets.UTF_8).write(data); // Write string
Files.append(data.getBytes(StandardCharsets.UTF_8), file); // Append bytes
// Line-by-line processing (memory efficient)
Files.asCharSource(file, StandardCharsets.UTF_8).readLines(new LineProcessor<Void>() {
@Override
public boolean processLine(String line) throws IOException {
System.out.println("Processing: " + line);
return true; // Continue processing (false to stop)
}
@Override
public Void getResult() {
return null;
}
});
// File operations
File source = new File("source.txt");
File destination = new File("destination.txt");
File directory = new File("backup/");
Files.copy(source, destination); // Copy file
Files.move(source, destination); // Move/rename file
boolean identical = Files.equal(source, destination); // Compare file contents
// File metadata
Files.touch(file); // Create empty file or update timestamp
File tempDir = Files.createTempDir(); // Create temporary directory
// File extensions and names
String extension = Files.getFileExtension("document.pdf"); // "pdf"
String nameWithoutExt = Files.getNameWithoutExtension("document.pdf"); // "document"Modern NIO.2 Path-based file operations that provide enhanced functionality and better error handling compared to legacy File-based operations.
import com.google.common.io.MoreFiles;
import com.google.common.io.RecursiveDeleteOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.charset.StandardCharsets;
import java.nio.file.StandardOpenOption;
// Path-based I/O sources and sinks
Path file = Paths.get("data.txt");
ByteSource source = MoreFiles.asByteSource(file);
ByteSink sink = MoreFiles.asByteSink(file, StandardOpenOption.CREATE);
CharSource charSource = MoreFiles.asCharSource(file, StandardCharsets.UTF_8);
CharSink charSink = MoreFiles.asCharSink(file, StandardCharsets.UTF_8);
// Directory listing and traversal
Path directory = Paths.get("mydir");
ImmutableList<Path> files = MoreFiles.listFiles(directory);
// File tree traversal
Traverser<Path> traverser = MoreFiles.fileTraverser();
for (Path path : traverser.breadthFirst(directory)) {
System.out.println(path);
}
// Predicates for file filtering
Predicate<Path> isDirectory = MoreFiles.isDirectory();
Predicate<Path> isRegularFile = MoreFiles.isRegularFile();
Predicate<Path> isDirNoFollowLinks = MoreFiles.isDirectory(LinkOption.NOFOLLOW_LINKS);
// File comparison
Path file1 = Paths.get("file1.txt");
Path file2 = Paths.get("file2.txt");
boolean identical = MoreFiles.equal(file1, file2); // Content comparison
// File creation and modification
Path newFile = Paths.get("newfile.txt");
MoreFiles.touch(newFile); // Create empty file or update timestamp
// Parent directory creation
Path nested = Paths.get("deep/nested/path/file.txt");
MoreFiles.createParentDirectories(nested); // Creates parent directories if needed
// File name operations
Path document = Paths.get("report.pdf");
String extension = MoreFiles.getFileExtension(document); // "pdf"
String nameOnly = MoreFiles.getNameWithoutExtension(document); // "report"
// Recursive deletion operations
Path targetDir = Paths.get("old_data");
// Delete directory and all contents
MoreFiles.deleteRecursively(targetDir);
// Delete with options
MoreFiles.deleteRecursively(targetDir,
RecursiveDeleteOption.ALLOW_INSECURE); // Allow deletion of insecure paths
// Delete only directory contents (keep directory)
MoreFiles.deleteDirectoryContents(targetDir);
// Safe deletion with options
try {
MoreFiles.deleteRecursively(targetDir);
} catch (InsecureRecursiveDeleteException e) {
// Handle insecure path deletion attempt
System.err.println("Deletion blocked for security: " + e.getMessage());
}Key Methods:
asByteSource(Path, OpenOption...) - Create ByteSource for PathasByteSink(Path, OpenOption...) - Create ByteSink for PathasCharSource(Path, Charset, OpenOption...) - Create CharSource for PathasCharSink(Path, Charset, OpenOption...) - Create CharSink for PathlistFiles(Path) - List files in directory as ImmutableListfileTraverser() - Create Traverser for file tree walkingisDirectory(LinkOption...) - Predicate for directory testisRegularFile(LinkOption...) - Predicate for regular file testequal(Path, Path) - Compare file contents for equalitytouch(Path) - Create empty file or update timestampcreateParentDirectories(Path, FileAttribute...) - Create parent directoriesgetFileExtension(Path) - Extract file extensiongetNameWithoutExtension(Path) - Get filename without extensiondeleteRecursively(Path, RecursiveDeleteOption...) - Delete directory treedeleteDirectoryContents(Path, RecursiveDeleteOption...) - Delete directory contentsRecursive Delete Options:
ALLOW_INSECURE - Allow deletion of symbolic links and files outside directory treeAdvantages over Legacy Files Class:
Binary encoding schemes for reversibly translating between byte sequences and printable ASCII strings. Supports Base64, Base32, Base16 and custom encodings.
import com.google.common.io.BaseEncoding;
import java.nio.charset.StandardCharsets;
// Standard encodings
BaseEncoding base64 = BaseEncoding.base64();
BaseEncoding base64Url = BaseEncoding.base64Url(); // URL-safe Base64
BaseEncoding base32 = BaseEncoding.base32();
BaseEncoding base32Hex = BaseEncoding.base32Hex(); // Base32 with hex digits
BaseEncoding base16 = BaseEncoding.base16(); // Hexadecimal
// Basic encoding and decoding
String message = "Hello, World!";
byte[] data = message.getBytes(StandardCharsets.UTF_8);
// Base64 encoding
String encoded = base64.encode(data); // "SGVsbG8sIFdvcmxkIQ=="
byte[] decoded = base64.decode(encoded);
String result = new String(decoded, StandardCharsets.UTF_8); // "Hello, World!"
// Different encoding types
String base32Encoded = base32.encode(data); // "JBSWY3DPEBLW64TMMQQQ===="
String hexEncoded = base16.encode(data); // "48656C6C6F2C20576F726C6421"
// Case handling for Base16
String lowerHex = base16.lowerCase().encode(data); // "48656c6c6f2c20576f726c6421"
String upperHex = base16.upperCase().encode(data); // "48656C6C6F2C20576F726C6421"
// Padding control
BaseEncoding noPadding = base64.omitPadding();
String withoutPadding = noPadding.encode(data); // "SGVsbG8sIFdvcmxkIQ"
BaseEncoding withPadding = noPadding.withPadChar('=');
String restored = withPadding.encode(data); // "SGVsbG8sIFdvcmxkIQ=="
// Separator insertion for readability
BaseEncoding withSeparator = base64.withSeparator("-", 4);
String separated = withSeparator.encode(data); // "SGVs-bG8s-IFdv-cmxk-IQ=="
// Stream-based encoding/decoding
CharSink charSink = Files.asCharSink(file, StandardCharsets.UTF_8);
ByteSink encodingSink = base64.encodingSink(charSink);
encodingSink.write(data); // Writes encoded data to file
CharSource charSource = Files.asCharSource(file, StandardCharsets.UTF_8);
ByteSource decodingSource = base64.decodingSource(charSource);
byte[] decodedFromFile = decodingSource.read();
// Custom alphabets for specialized encoding
BaseEncoding customBase32 = BaseEncoding.base32().withPadChar('@');
String customEncoded = customBase32.encode(data);
// Handling encoding without padding requirements
try {
byte[] decoded = base64.decode("Invalid==Input");
} catch (IllegalArgumentException e) {
System.err.println("Invalid encoding: " + e.getMessage());
}
// Lenient decoding (ignores whitespace and case differences)
BaseEncoding lenient = base16.lowerCase();
byte[] result1 = lenient.decode("48656c6c"); // Works
byte[] result2 = lenient.decode("48656C6C"); // Also works (case insensitive)Standard Encodings:
base64() - Standard Base64 encoding (RFC 4648)base64Url() - URL-safe Base64 encoding (uses - and _ instead of + and /)base32() - Base32 encoding (RFC 4648)base32Hex() - Base32 encoding with hex alphabet (0-9, A-V)base16() - Base16 (hexadecimal) encodingEncoding Methods:
encode(byte[]) - Encode byte array to stringencode(byte[], int, int) - Encode byte array sliceencodingSink(CharSink) - Create ByteSink that encodes to CharSinkDecoding Methods:
decode(CharSequence) - Decode string to byte arraydecodingSource(CharSource) - Create ByteSource that decodes from CharSourceCustomization Methods:
omitPadding() - Remove padding characters from outputwithPadChar(char) - Use custom padding characterwithSeparator(String, int) - Insert separator every N characterslowerCase() / upperCase() - Control case for applicable encodingsKey Features:
Abstractions for reading from and writing to byte-oriented data sources and sinks.
import com.google.common.io.ByteSource;
import com.google.common.io.ByteSink;
import java.io.InputStream;
import java.io.OutputStream;
// ByteSource - reading bytes
ByteSource source = Files.asByteSource(file);
// Reading operations
byte[] allBytes = source.read(); // Read all bytes
long size = source.size(); // Get size if known
boolean empty = source.isEmpty(); // Check if empty
// Stream operations
try (InputStream inputStream = source.openStream()) {
// Process stream
byte[] buffer = new byte[1024];
int bytesRead = inputStream.read(buffer);
}
// Copy to other destinations
ByteSink sink = Files.asByteSink(destinationFile);
source.copyTo(sink); // Copy bytes from source to sink
OutputStream outputStream = new FileOutputStream("output.dat");
source.copyTo(outputStream); // Copy to output stream
// Slice operations
ByteSource slice = source.slice(100, 500); // Bytes 100-599
// ByteSink - writing bytes
ByteSink sink2 = Files.asByteSink(file);
sink2.write("Hello World".getBytes(StandardCharsets.UTF_8));
// Open stream for writing
try (OutputStream out = sink2.openStream()) {
out.write(data);
}
// Concatenating sources
ByteSource combined = ByteSource.concat(source1, source2, source3);
byte[] allData = combined.read();
// Empty source
ByteSource empty = ByteSource.empty();Abstractions for reading from and writing to character-oriented data sources and sinks.
import com.google.common.io.CharSource;
import com.google.common.io.CharSink;
import java.io.Reader;
import java.io.Writer;
// CharSource - reading characters
CharSource source = Files.asCharSource(file, StandardCharsets.UTF_8);
// Reading operations
String content = source.read(); // Read entire content
List<String> lines = source.readLines(); // Read as list of lines
String firstLine = source.readFirstLine(); // Read just first line
long length = source.length(); // Get character count if known
// Stream operations
try (Reader reader = source.openStream()) {
char[] buffer = new char[1024];
int charsRead = reader.read(buffer);
}
// Line processing (memory efficient for large files)
Integer lineCount = source.readLines(new LineProcessor<Integer>() {
private int count = 0;
@Override
public boolean processLine(String line) throws IOException {
count++;
return true; // Continue processing
}
@Override
public Integer getResult() {
return count;
}
});
// Copy operations
CharSink sink = Files.asCharSink(destinationFile, StandardCharsets.UTF_8);
source.copyTo(sink);
Writer writer = new FileWriter("output.txt");
source.copyTo(writer);
// CharSink - writing characters
CharSink sink2 = Files.asCharSink(file, StandardCharsets.UTF_8);
sink2.write("Hello World");
sink2.writeLines(Arrays.asList("Line 1", "Line 2", "Line 3"));
// Open stream for writing
try (Writer out = sink2.openStream()) {
out.write("Content");
}
// Concatenating sources
CharSource combined = CharSource.concat(source1, source2, source3);
String allContent = combined.read();
// Wrapping strings as sources
CharSource stringSource = CharSource.wrap("This is a string source");
List<String> stringLines = stringSource.readLines();Utilities for working with classpath resources and URLs.
import com.google.common.io.Resources;
import java.net.URL;
// Getting resource URLs
URL configUrl = Resources.getResource("config.properties"); // From classpath root
URL relativeUrl = Resources.getResource(MyClass.class, "relative-config.properties"); // Relative to class
// Reading resources
byte[] resourceBytes = Resources.toByteArray(configUrl);
String resourceContent = Resources.toString(configUrl, StandardCharsets.UTF_8);
List<String> resourceLines = Resources.readLines(configUrl, StandardCharsets.UTF_8);
// Copying resources
Resources.copy(configUrl, new FileOutputStream("local-config.properties"));
// Resource as ByteSource/CharSource
ByteSource resourceByteSource = Resources.asByteSource(configUrl);
CharSource resourceCharSource = Resources.asCharSource(configUrl, StandardCharsets.UTF_8);
// Loading properties from resources
Properties props = new Properties();
try (InputStream in = Resources.getResource("app.properties").openStream()) {
props.load(in);
}
// Utility for resource loading with error handling
public class ResourceLoader {
public static String loadResourceAsString(String resourcePath) {
try {
URL resource = Resources.getResource(resourcePath);
return Resources.toString(resource, StandardCharsets.UTF_8);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Resource not found: " + resourcePath, e);
} catch (IOException e) {
throw new RuntimeException("Failed to read resource: " + resourcePath, e);
}
}
public static Properties loadProperties(String resourcePath) {
Properties props = new Properties();
try {
URL resource = Resources.getResource(resourcePath);
try (InputStream in = resource.openStream()) {
props.load(in);
}
return props;
} catch (Exception e) {
throw new RuntimeException("Failed to load properties: " + resourcePath, e);
}
}
}Low-level utilities for working with input and output streams.
import com.google.common.io.ByteStreams;
import com.google.common.io.CharStreams;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.Writer;
// Byte stream operations
InputStream input = new FileInputStream("source.dat");
OutputStream output = new FileOutputStream("destination.dat");
// Copy entire stream
long bytesCopied = ByteStreams.copy(input, output);
// Read operations
byte[] buffer = new byte[1024];
int bytesRead = ByteStreams.read(input, buffer, 0, buffer.length); // Read exactly
ByteStreams.readFully(input, buffer); // Read exactly buffer.length bytes
ByteStreams.skipFully(input, 100); // Skip exactly 100 bytes
// Read entire stream to byte array
byte[] allBytes = ByteStreams.toByteArray(input);
// Exhaust stream (read and discard all data)
ByteStreams.exhaust(input);
// Null output stream (discards all data)
OutputStream nullOut = ByteStreams.nullOutputStream();
// Character stream operations
Reader reader = new FileReader("source.txt");
Writer writer = new FileWriter("destination.txt");
// Copy entire stream
long charsCopied = CharStreams.copy(reader, writer);
// Read operations
String content = CharStreams.toString(reader); // Read entire stream as string
List<String> lines = CharStreams.readLines(reader); // Read as list of lines
CharStreams.skipFully(reader, 50); // Skip exactly 50 characters
// Exhaust stream
CharStreams.exhaust(reader);
// Null writer (discards all data)
Writer nullWriter = CharStreams.nullWriter();
// Limited streams
InputStream limited = ByteStreams.limit(originalInputStream, 1024); // Limit to 1024 bytesUtilities for proper resource cleanup and exception handling.
import com.google.common.io.Closer;
import com.google.common.io.Closeables;
import com.google.common.io.Flushables;
// Closer - manages multiple resources
Closer closer = Closer.create();
try {
InputStream in = closer.register(new FileInputStream("input.txt"));
OutputStream out = closer.register(new FileOutputStream("output.txt"));
Reader reader = closer.register(new InputStreamReader(in, StandardCharsets.UTF_8));
Writer writer = closer.register(new OutputStreamWriter(out, StandardCharsets.UTF_8));
// Use resources
CharStreams.copy(reader, writer);
} catch (IOException e) {
throw closer.rethrow(e); // Properly handle exceptions
} finally {
closer.close(); // Closes all registered resources
}
// Simple resource cleanup
FileInputStream input = null;
try {
input = new FileInputStream("file.txt");
// Use input stream
} finally {
Closeables.closeQuietly(input); // Closes and ignores exceptions
}
// Close with exception propagation
FileOutputStream output = null;
try {
output = new FileOutputStream("file.txt");
// Use output stream
} catch (IOException e) {
Closeables.close(output, true); // Close and suppress exceptions
throw e;
} catch (RuntimeException e) {
Closeables.close(output, false); // Close and propagate exceptions
throw e;
} finally {
Closeables.close(output, true);
}
// Flushing with error handling
Writer writer = new FileWriter("output.txt");
try {
writer.write("data");
Flushables.flush(writer, false); // Flush and propagate exceptions
} catch (IOException e) {
Flushables.flushQuietly(writer); // Best effort flush
throw e;
}Enhanced input and output streams with additional capabilities.
import com.google.common.io.CountingInputStream;
import com.google.common.io.CountingOutputStream;
import com.google.common.io.LittleEndianDataInputStream;
import com.google.common.io.LittleEndianDataOutputStream;
// Counting streams - track bytes read/written
InputStream original = new FileInputStream("data.bin");
CountingInputStream counting = new CountingInputStream(original);
byte[] buffer = new byte[1024];
counting.read(buffer);
long bytesRead = counting.getCount(); // Number of bytes read so far
// Counting output stream
OutputStream originalOut = new FileOutputStream("output.bin");
CountingOutputStream countingOut = new CountingOutputStream(originalOut);
countingOut.write("Hello".getBytes());
long bytesWritten = countingOut.getCount(); // Number of bytes written
// Little-endian data streams (for binary protocol compatibility)
LittleEndianDataInputStream littleEndianIn = new LittleEndianDataInputStream(inputStream);
int value = littleEndianIn.readInt(); // Reads int in little-endian format
long longValue = littleEndianIn.readLong();
float floatValue = littleEndianIn.readFloat();
LittleEndianDataOutputStream littleEndianOut = new LittleEndianDataOutputStream(outputStream);
littleEndianOut.writeInt(42); // Writes int in little-endian format
littleEndianOut.writeLong(12345L);
littleEndianOut.writeFloat(3.14f);Common patterns for processing data with I/O utilities.
// Batch processing large files
public void processLargeFile(File file) throws IOException {
Files.asCharSource(file, StandardCharsets.UTF_8).readLines(new LineProcessor<Void>() {
private int lineCount = 0;
@Override
public boolean processLine(String line) throws IOException {
lineCount++;
// Process each line
String processed = processLine(line);
// Write to output or accumulate results
if (processed != null) {
writeToOutput(processed);
}
// Progress reporting
if (lineCount % 10000 == 0) {
System.out.println("Processed " + lineCount + " lines");
}
return true; // Continue processing
}
@Override
public Void getResult() {
System.out.println("Total lines processed: " + lineCount);
return null;
}
});
}
// Safe file operations with backup
public void safeFileWrite(File file, String content) throws IOException {
File backupFile = new File(file.getAbsolutePath() + ".backup");
File tempFile = new File(file.getAbsolutePath() + ".tmp");
try {
// Create backup if original exists
if (file.exists()) {
Files.copy(file, backupFile);
}
// Write to temporary file first
Files.asCharSink(tempFile, StandardCharsets.UTF_8).write(content);
// Atomic move to final location
Files.move(tempFile, file);
// Clean up backup on success
if (backupFile.exists()) {
backupFile.delete();
}
} catch (IOException e) {
// Clean up temp file on failure
if (tempFile.exists()) {
tempFile.delete();
}
// Restore from backup if available
if (backupFile.exists() && !file.exists()) {
Files.move(backupFile, file);
}
throw e;
}
}
// Stream transformation pipeline
public void transformData(InputStream input, OutputStream output) throws IOException {
Closer closer = Closer.create();
try {
// Create transformation pipeline
Reader reader = closer.register(new InputStreamReader(input, StandardCharsets.UTF_8));
Writer writer = closer.register(new OutputStreamWriter(output, StandardCharsets.UTF_8));
// Process data line by line
BufferedReader buffered = closer.register(new BufferedReader(reader));
PrintWriter printer = closer.register(new PrintWriter(writer));
String line;
while ((line = buffered.readLine()) != null) {
String transformed = transformLine(line);
if (transformed != null) {
printer.println(transformed);
}
}
printer.flush();
} catch (IOException e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
// Binary data processing
public void processBinaryData(ByteSource source, ByteSink sink) throws IOException {
try (InputStream in = source.openBufferedStream();
OutputStream out = sink.openBufferedStream()) {
CountingInputStream countingIn = new CountingInputStream(in);
CountingOutputStream countingOut = new CountingOutputStream(out);
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = countingIn.read(buffer)) != -1) {
// Process buffer if needed
byte[] processed = processBytes(buffer, 0, bytesRead);
countingOut.write(processed);
}
System.out.println("Processed " + countingIn.getCount() + " bytes");
System.out.println("Wrote " + countingOut.getCount() + " bytes");
}
}Utilities and patterns for testing I/O operations.
// Testing with temporary files
@Test
public void testFileProcessing() throws IOException {
File tempDir = Files.createTempDir();
File testFile = new File(tempDir, "test.txt");
try {
// Setup test data
List<String> testLines = Arrays.asList("line1", "line2", "line3");
Files.asCharSink(testFile, StandardCharsets.UTF_8).writeLines(testLines);
// Test the processing
processFile(testFile);
// Verify results
List<String> resultLines = Files.readLines(testFile, StandardCharsets.UTF_8);
assertEquals(expected, resultLines);
} finally {
// Cleanup
if (tempDir.exists()) {
deleteRecursively(tempDir);
}
}
}
// Testing with in-memory sources/sinks
@Test
public void testDataTransformation() throws IOException {
String inputData = "input line 1\ninput line 2\n";
CharSource source = CharSource.wrap(inputData);
StringBuilder output = new StringBuilder();
CharSink sink = CharSink.wrap(output);
transformData(source, sink);
String result = output.toString();
assertEquals("expected output", result);
}I/O streams that compute hash codes while reading or writing data.
import com.google.common.io.HashingInputStream;
import com.google.common.io.HashingOutputStream;
import com.google.common.hash.Hashing;
import com.google.common.hash.HashCode;
// Hash data while reading
InputStream originalInput = new FileInputStream("data.bin");
HashingInputStream hashingInput = new HashingInputStream(Hashing.sha256(), originalInput);
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = hashingInput.read(buffer)) != -1) {
// Process data
}
HashCode inputHash = hashingInput.hash(); // SHA-256 of all data read
originalInput.close();
// Hash data while writing
OutputStream originalOutput = new FileOutputStream("output.bin");
HashingOutputStream hashingOutput = new HashingOutputStream(Hashing.md5(), originalOutput);
hashingOutput.write("Hello World".getBytes(StandardCharsets.UTF_8));
hashingOutput.flush();
HashCode outputHash = hashingOutput.hash(); // MD5 of all data written
originalOutput.close();
// Verify data integrity during copy
public void copyWithVerification(File source, File destination) throws IOException {
try (InputStream in = new FileInputStream(source);
OutputStream out = new FileOutputStream(destination)) {
HashingInputStream hashingIn = new HashingInputStream(Hashing.sha256(), in);
HashingOutputStream hashingOut = new HashingOutputStream(Hashing.sha256(), out);
ByteStreams.copy(hashingIn, hashingOut);
HashCode sourceHash = hashingIn.hash();
HashCode destHash = hashingOut.hash();
if (!sourceHash.equals(destHash)) {
throw new IOException("Hash verification failed during copy");
}
}
}Callback interfaces for processing data streams.
import com.google.common.io.LineProcessor;
import com.google.common.io.ByteProcessor;
// Line processor for text data
public class WordCountProcessor implements LineProcessor<Integer> {
private int wordCount = 0;
@Override
public boolean processLine(String line) throws IOException {
if (!line.trim().isEmpty()) {
String[] words = line.trim().split("\\s+");
wordCount += words.length;
}
return true; // Continue processing
}
@Override
public Integer getResult() {
return wordCount;
}
}
// Usage
File textFile = new File("document.txt");
Integer totalWords = Files.asCharSource(textFile, StandardCharsets.UTF_8)
.readLines(new WordCountProcessor());
// Byte processor for binary data
public class ChecksumProcessor implements ByteProcessor<String> {
private final Hasher hasher = Hashing.crc32().newHasher();
@Override
public boolean processBytes(byte[] buf, int off, int len) throws IOException {
hasher.putBytes(buf, off, len);
return true; // Continue processing
}
@Override
public String getResult() {
return hasher.hash().toString();
}
}
// Usage
ByteSource source = Files.asByteSource(new File("data.bin"));
String checksum = source.read(new ChecksumProcessor());Additional I/O Classes:
HashingInputStream - InputStream that maintains running hash of data readHashingOutputStream - OutputStream that maintains running hash of data writtenLineProcessor<T> - Interface for processing text files line by lineByteProcessor<T> - Interface for processing byte arrays with result accumulationGuava's I/O utilities provide a comprehensive, safe, and efficient way to handle file operations, stream processing, and resource management with proper exception handling and cleanup patterns.
Install with Tessl CLI
npx tessl i tessl/maven-com-google-guava--guava