CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-dev-langchain4j--langchain4j-bedrock

AWS Bedrock integration for LangChain4j enabling Java applications to interact with various LLM providers through a unified interface

Overview
Eval results
Files

streaming.mddocs/quickstart/

Quick Start: Streaming Chat

Stream responses token-by-token for real-time applications.

Basic Streaming

import dev.langchain4j.model.bedrock.BedrockStreamingChatModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.data.message.UserMessage;
import software.amazon.awssdk.regions.Region;

BedrockStreamingChatModel model = BedrockStreamingChatModel.builder()
    .region(Region.US_EAST_1)
    .modelId("anthropic.claude-3-5-sonnet-20241022-v2:0")
    .build();

model.chat(
    ChatRequest.builder()
        .messages(UserMessage.from("Tell me a story"))
        .build(),
    new StreamingChatResponseHandler() {
        @Override
        public void onPartialResponse(String token) {
            System.out.print(token);
        }

        @Override
        public void onComplete(ChatResponse response) {
            System.out.println("\n\nDone! Tokens: " + response.tokenUsage());
        }

        @Override
        public void onError(Throwable error) {
            System.err.println("Error: " + error.getMessage());
        }
    }
);

With Parameters

import dev.langchain4j.model.bedrock.BedrockChatRequestParameters;

BedrockChatRequestParameters params = BedrockChatRequestParameters.builder()
    .temperature(0.8)
    .maxOutputTokens(2048)
    .build();

BedrockStreamingChatModel model = BedrockStreamingChatModel.builder()
    .region(Region.US_EAST_1)
    .modelId("anthropic.claude-3-5-sonnet-20241022-v2:0")
    .defaultRequestParameters(params)
    .build();

Custom Handler

class CustomHandler implements StreamingChatResponseHandler {
    private final StringBuilder fullResponse = new StringBuilder();

    @Override
    public void onPartialResponse(String token) {
        fullResponse.append(token);
        // Custom processing: update UI, log, etc.
    }

    @Override
    public void onComplete(ChatResponse response) {
        String complete = fullResponse.toString();
        // Process complete response
    }

    @Override
    public void onError(Throwable error) {
        // Handle errors
    }
}

model.chat(request, new CustomHandler());

Cancellation

import dev.langchain4j.model.chat.response.StreamingHandle;

final StreamingHandle[] handle = new StreamingHandle[1];

model.chat(
    request,
    new StreamingChatResponseHandler() {
        @Override
        public StreamingHandle onStart() {
            StreamingHandle h = /* provided by framework */;
            handle[0] = h;
            return h;
        }

        @Override
        public void onPartialResponse(String token) {
            System.out.print(token);
        }

        @Override
        public void onComplete(ChatResponse response) {
            System.out.println("\nComplete");
        }

        @Override
        public void onError(Throwable error) {
            System.err.println("Error: " + error.getMessage());
        }
    }
);

// Cancel if needed
if (handle[0] != null) {
    handle[0].cancel();
}

Next Steps:

  • API Reference for complete options
  • Error Handling for robust applications
  • Basic Chat for synchronous alternative

Install with Tessl CLI

npx tessl i tessl/maven-dev-langchain4j--langchain4j-bedrock

docs

index.md

README.md

tile.json