Java client for the Langfuse API providing access to observability and analytics features for LLM applications
The Langfuse Java client provides consistent pagination across all list endpoints using the MetaResponse type. This enables efficient retrieval of large result sets.
Standard pagination metadata included in all paginated responses.
Import: import com.langfuse.client.resources.utils.pagination.types.MetaResponse;
/**
* Pagination metadata for list responses
*/
public final class MetaResponse {
/**
* Current page number (1-based)
*/
int getPage();
/**
* Number of items per page
*/
int getLimit();
/**
* Total number of items given current filters
*/
int getTotalItems();
/**
* Total number of pages given current limit
*/
int getTotalPages();
static Builder builder();
}All list endpoints return responses with data and meta fields:
PaginatedXyz {
List<Xyz> getData(); // Current page of items
MetaResponse getMeta(); // Pagination metadata
}Examples:
PaginatedDatasetsPaginatedSessionsPaginatedModelsPaginatedAnnotationQueuesTraces (with meta)List requests typically support these parameters:
XyzRequest.builder()
.page(1) // Page number (default: 1, 1-based)
.limit(50) // Items per page (default: varies by endpoint, usually 50)
.build();import com.langfuse.client.LangfuseClient;
import com.langfuse.client.resources.datasets.types.*;
LangfuseClient client = LangfuseClient.builder()
.url("https://cloud.langfuse.com")
.credentials("pk-lf-...", "sk-lf-...")
.build();
// Get first page
GetDatasetsRequest request = GetDatasetsRequest.builder()
.page(1)
.limit(10)
.build();
PaginatedDatasets firstPage = client.datasets().list(request);
System.out.println("Total items: " + firstPage.getMeta().getTotalItems());
System.out.println("Total pages: " + firstPage.getMeta().getTotalPages());
System.out.println("Current page: " + firstPage.getMeta().getPage());
System.out.println("Items on this page: " + firstPage.getData().size());int currentPage = 1;
int pageSize = 50;
while (true) {
GetTracesRequest request = GetTracesRequest.builder()
.page(currentPage)
.limit(pageSize)
.build();
Traces traces = client.trace().list(request);
// Process current page
for (Trace trace : traces.getData()) {
System.out.println("Trace: " + trace.getId());
}
// Check if more pages exist
if (currentPage >= traces.getMeta().getTotalPages()) {
break;
}
currentPage++;
}import java.util.List;
import java.util.ArrayList;
import java.util.function.Function;
public class PaginationHelper {
/**
* Fetch all items across all pages
*/
public static <T, R> List<T> fetchAll(
Function<Integer, R> fetcher,
Function<R, List<T>> dataExtractor,
Function<R, MetaResponse> metaExtractor
) {
List<T> allItems = new ArrayList<>();
int currentPage = 1;
while (true) {
R response = fetcher.apply(currentPage);
List<T> pageData = dataExtractor.apply(response);
MetaResponse meta = metaExtractor.apply(response);
allItems.addAll(pageData);
if (currentPage >= meta.getTotalPages()) {
break;
}
currentPage++;
}
return allItems;
}
}
// Usage
List<Dataset> allDatasets = PaginationHelper.fetchAll(
page -> {
GetDatasetsRequest req = GetDatasetsRequest.builder()
.page(page)
.limit(100)
.build();
return client.datasets().list(req);
},
PaginatedDatasets::getData,
PaginatedDatasets::getMeta
);import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class PaginatedIterator<T> implements Iterator<T> {
private final LangfuseClient client;
private final int pageSize;
private int currentPage = 1;
private int totalPages = Integer.MAX_VALUE;
private List<T> currentData = new ArrayList<>();
private int currentIndex = 0;
public PaginatedIterator(LangfuseClient client, int pageSize) {
this.client = client;
this.pageSize = pageSize;
fetchNextPage();
}
@Override
public boolean hasNext() {
if (currentIndex < currentData.size()) {
return true;
}
if (currentPage >= totalPages) {
return false;
}
currentPage++;
fetchNextPage();
return currentIndex < currentData.size();
}
@Override
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return currentData.get(currentIndex++);
}
private void fetchNextPage() {
GetTracesRequest request = GetTracesRequest.builder()
.page(currentPage)
.limit(pageSize)
.build();
Traces response = client.trace().list(request);
currentData = (List<T>) response.getData();
currentIndex = 0;
totalPages = response.getMeta().getTotalPages();
}
public Stream<T> stream() {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(this, Spliterator.ORDERED),
false
);
}
}
// Usage
PaginatedIterator<Trace> iterator = new PaginatedIterator<>(client, 100);
iterator.stream()
.filter(trace -> trace.getName().isPresent())
.forEach(trace -> System.out.println(trace.getName().get()));Most endpoints default to 50 items per page. Common limits:
// Small page for quick response
.limit(10)
// Default page size
.limit(50)
// Large page for bulk processing
.limit(100)import com.langfuse.client.LangfuseClient;
import com.langfuse.client.resources.trace.types.*;
import java.util.List;
import java.util.ArrayList;
public class TraceExporter {
private final LangfuseClient client;
public TraceExporter(LangfuseClient client) {
this.client = client;
}
public List<Trace> exportAllTraces(String userId) {
List<Trace> allTraces = new ArrayList<>();
int currentPage = 1;
int pageSize = 100;
System.out.println("Exporting traces for user: " + userId);
while (true) {
GetTracesRequest request = GetTracesRequest.builder()
.userId(userId)
.page(currentPage)
.limit(pageSize)
.build();
Traces page = client.trace().list(request);
MetaResponse meta = page.getMeta();
allTraces.addAll(page.getData());
System.out.println(String.format(
"Fetched page %d/%d (%d traces)",
currentPage,
meta.getTotalPages(),
page.getData().size()
));
if (currentPage >= meta.getTotalPages()) {
break;
}
currentPage++;
}
System.out.println("Total traces exported: " + allTraces.size());
return allTraces;
}
}import java.util.concurrent.*;
import java.util.stream.IntStream;
public class ParallelPagination {
public List<Trace> fetchAllParallel(LangfuseClient client) throws Exception {
// First, get total pages
GetTracesRequest initialRequest = GetTracesRequest.builder()
.page(1)
.limit(100)
.build();
Traces firstPage = client.trace().list(initialRequest);
int totalPages = firstPage.getMeta().getTotalPages();
// Fetch all pages in parallel
ExecutorService executor = Executors.newFixedThreadPool(10);
List<CompletableFuture<Traces>> futures = IntStream.rangeClosed(1, totalPages)
.mapToObj(page -> CompletableFuture.supplyAsync(() -> {
GetTracesRequest request = GetTracesRequest.builder()
.page(page)
.limit(100)
.build();
return client.trace().list(request);
}, executor))
.collect(Collectors.toList());
// Wait for all pages and collect results
List<Trace> allTraces = futures.stream()
.map(CompletableFuture::join)
.flatMap(traces -> traces.getData().stream())
.collect(Collectors.toList());
executor.shutdown();
return allTraces;
}
}public List<Trace> searchTraces(String searchTerm) {
List<Trace> results = new ArrayList<>();
int page = 1;
int maxResults = 500; // Limit total results
while (results.size() < maxResults) {
GetTracesRequest request = GetTracesRequest.builder()
.name(searchTerm)
.page(page)
.limit(100)
.build();
Traces traces = client.trace().list(request);
if (traces.getData().isEmpty()) {
break;
}
results.addAll(traces.getData());
if (page >= traces.getMeta().getTotalPages() ||
results.size() >= maxResults) {
break;
}
page++;
}
return results.subList(0, Math.min(results.size(), maxResults));
}meta.getTotalPages() to avoid unnecessary requestsdata.isEmpty() for early terminationCombine pagination with filters for efficient queries:
GetObservationsRequest request = GetObservationsRequest.builder()
.type(ObservationType.GENERATION)
.fromStartTime("2025-10-01T00:00:00Z")
.toStartTime("2025-10-31T23:59:59Z")
.page(currentPage)
.limit(50)
.build();
ObservationsViews observations = client.observations().getMany(request);Install with Tessl CLI
npx tessl i tessl/maven-com-langfuse--langfuse-java