CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-com-google-genai--google-genai

Java idiomatic SDK for the Gemini Developer APIs and Vertex AI APIs

Overview
Eval results
Files

chat-sessions.mddocs/

Chat Sessions

Create and manage multi-turn chat sessions with automatic history management. Chat sessions maintain conversation context across multiple message exchanges.

Core Imports

import com.google.genai.Chats;
import com.google.genai.Chat;
import com.google.genai.AsyncChats;
import com.google.genai.AsyncChat;
import com.google.genai.ResponseStream;
import com.google.genai.types.GenerateContentResponse;
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.Content;
import java.util.List;
import java.util.concurrent.CompletableFuture;

Chats Service

package com.google.genai;

public class Chats {
  public Chat create(String model, GenerateContentConfig config);
  public Chat create(String model);
}

Chat Class

package com.google.genai;

public class Chat {
  // Send messages (synchronous)
  public GenerateContentResponse sendMessage(String text);
  public GenerateContentResponse sendMessage(String text, GenerateContentConfig config);
  public GenerateContentResponse sendMessage(Content content);
  public GenerateContentResponse sendMessage(Content content, GenerateContentConfig config);
  public GenerateContentResponse sendMessage(List<Content> contents);
  public GenerateContentResponse sendMessage(List<Content> contents, GenerateContentConfig config);

  // Stream messages
  public ResponseStream<GenerateContentResponse> sendMessageStream(String text);
  public ResponseStream<GenerateContentResponse> sendMessageStream(String text, GenerateContentConfig config);
  public ResponseStream<GenerateContentResponse> sendMessageStream(Content content);
  public ResponseStream<GenerateContentResponse> sendMessageStream(Content content, GenerateContentConfig config);
  public ResponseStream<GenerateContentResponse> sendMessageStream(List<Content> contents);
  public ResponseStream<GenerateContentResponse> sendMessageStream(List<Content> contents, GenerateContentConfig config);
}

Async Chats Service

package com.google.genai;

public class AsyncChats {
  public AsyncChat create(String model, GenerateContentConfig config);
  public AsyncChat create(String model);
}

Async Chat Class

package com.google.genai;

public class AsyncChat {
  // Send messages (asynchronous)
  public CompletableFuture<GenerateContentResponse> sendMessage(String text);
  public CompletableFuture<GenerateContentResponse> sendMessage(String text, GenerateContentConfig config);
  public CompletableFuture<GenerateContentResponse> sendMessage(Content content);
  public CompletableFuture<GenerateContentResponse> sendMessage(Content content, GenerateContentConfig config);
  public CompletableFuture<GenerateContentResponse> sendMessage(List<Content> contents);
  public CompletableFuture<GenerateContentResponse> sendMessage(List<Content> contents, GenerateContentConfig config);

  // Stream messages (asynchronous)
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(String text);
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(String text, GenerateContentConfig config);
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(Content content);
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(Content content, GenerateContentConfig config);
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(List<Content> contents);
  public CompletableFuture<ResponseStream<GenerateContentResponse>> sendMessageStream(List<Content> contents, GenerateContentConfig config);
}

Basic Usage

Create and Use Chat

import com.google.genai.Client;
import com.google.genai.Chat;

Client client = new Client();

// Create chat session
Chat chat = client.chats.create("gemini-2.0-flash");

// Send first message
GenerateContentResponse response1 = chat.sendMessage("Hello! My name is Alice.");
System.out.println("Bot: " + response1.text());

// Send follow-up (context is maintained)
GenerateContentResponse response2 = chat.sendMessage("What's my name?");
System.out.println("Bot: " + response2.text());
// Expected: "Your name is Alice." (remembers previous message)

Chat with Configuration

import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.Content;
import com.google.genai.types.Part;

// Create system instruction
Content systemInstruction = Content.fromParts(
    Part.fromText("You are a helpful math tutor. Be encouraging and explain step by step.")
);

GenerateContentConfig config = GenerateContentConfig.builder()
    .systemInstruction(systemInstruction)
    .temperature(0.2)
    .build();

Chat chat = client.chats.create("gemini-2.0-flash", config);

GenerateContentResponse response = chat.sendMessage("How do I solve x + 5 = 10?");
System.out.println(response.text());

Multi-Turn Conversations

Extended Conversation

Chat chat = client.chats.create("gemini-2.0-flash");

// Turn 1
chat.sendMessage("I'm planning a trip to Paris.");
// Bot responds with suggestions

// Turn 2
chat.sendMessage("What's the best time to visit?");
// Bot understands context (Paris)

// Turn 3
chat.sendMessage("How many days should I spend there?");
// Bot continues conversation naturally

// Turn 4
chat.sendMessage("What are must-see attractions?");
// Bot provides Paris-specific attractions

Interactive Chat Loop

import java.util.Scanner;

Chat chat = client.chats.create("gemini-2.0-flash");
Scanner scanner = new Scanner(System.in);

System.out.println("Chat started. Type 'exit' to quit.");

while (true) {
    System.out.print("You: ");
    String userInput = scanner.nextLine();

    if ("exit".equalsIgnoreCase(userInput)) {
        break;
    }

    GenerateContentResponse response = chat.sendMessage(userInput);
    System.out.println("Bot: " + response.text());
    System.out.println();
}

scanner.close();

History Management

Note: The Chat class maintains conversation history internally but does not expose methods to directly access or clear the history. History is automatically maintained across sendMessage() and sendMessageStream() calls within the same Chat instance. To start a new conversation without previous history, create a new Chat instance.

Chat chat1 = client.chats.create("gemini-2.0-flash");

chat1.sendMessage("Tell me about AI");
chat1.sendMessage("What are neural networks?");
chat1.sendMessage("Explain backpropagation");

// Model remembers context from previous messages
// History is maintained automatically

// To start fresh conversation, create a new Chat instance
Chat chat2 = client.chats.create("gemini-2.0-flash");
chat2.sendMessage("Let's talk about something completely different");
// No history from chat1

Streaming Chat

Basic Streaming

Chat chat = client.chats.create("gemini-2.0-flash");

System.out.print("Bot: ");
try (ResponseStream<GenerateContentResponse> stream =
    chat.sendMessageStream("Tell me a story")) {

    for (GenerateContentResponse chunk : stream) {
        System.out.print(chunk.text());
    }
    System.out.println();
}

// Continue conversation
GenerateContentResponse response = chat.sendMessage("Make it shorter");
System.out.println("Bot: " + response.text());

Streaming Interactive Chat

import java.util.Scanner;

Chat chat = client.chats.create("gemini-2.0-flash");
Scanner scanner = new Scanner(System.in);

while (true) {
    System.out.print("You: ");
    String input = scanner.nextLine();

    if ("exit".equalsIgnoreCase(input)) break;

    System.out.print("Bot: ");
    try (ResponseStream<GenerateContentResponse> stream =
        chat.sendMessageStream(input)) {

        for (GenerateContentResponse chunk : stream) {
            System.out.print(chunk.text());
            System.out.flush();
        }
        System.out.println("\n");
    }
}

Multimodal Chat

Chat with Images

import com.google.genai.types.Content;
import com.google.genai.types.Part;

Chat chat = client.chats.create("gemini-2.0-flash");

// First message with image
Content imageContent = Content.fromParts(
    Part.fromText("What's in this image?"),
    Part.fromUri("gs://bucket/image.jpg", "image/jpeg")
);

GenerateContentResponse response1 = chat.sendMessage(imageContent);
System.out.println(response1.text());

// Follow-up without image (model remembers the image)
GenerateContentResponse response2 = chat.sendMessage("What colors are prominent?");
System.out.println(response2.text());

// Another follow-up
GenerateContentResponse response3 = chat.sendMessage("Describe the lighting");
System.out.println(response3.text());

Mixed Media Chat

Chat chat = client.chats.create("gemini-2.0-flash");

// Message 1: Text only
chat.sendMessage("I'm analyzing some data");

// Message 2: With image
Content imageMsg = Content.fromParts(
    Part.fromText("Here's a chart"),
    Part.fromUri("gs://bucket/chart.png", "image/png")
);
chat.sendMessage(imageMsg);

// Message 3: With document
Content docMsg = Content.fromParts(
    Part.fromText("And here's the report"),
    Part.fromUri("gs://bucket/report.pdf", "application/pdf")
);
chat.sendMessage(docMsg);

// Message 4: Follow-up question
GenerateContentResponse response = chat.sendMessage(
    "What are the key findings from the chart and report?"
);
System.out.println(response.text());

Async Chat

Basic Async Chat

import java.util.concurrent.CompletableFuture;

AsyncChat chat = client.async.chats.create("gemini-2.0-flash");

CompletableFuture<GenerateContentResponse> future1 =
    chat.sendMessage("What is machine learning?");

future1.thenAccept(response -> {
    System.out.println("Response 1: " + response.text());

    // Send follow-up
    chat.sendMessage("Give me an example")
        .thenAccept(response2 -> {
            System.out.println("Response 2: " + response2.text());
        });
});

Async Streaming

AsyncChat chat = client.async.chats.create("gemini-2.0-flash");

CompletableFuture<ResponseStream<GenerateContentResponse>> futureStream =
    chat.sendMessageStream("Tell me about space");

futureStream.thenAccept(stream -> {
    try (ResponseStream<GenerateContentResponse> s = stream) {
        System.out.print("Bot: ");
        for (GenerateContentResponse chunk : s) {
            System.out.print(chunk.text());
        }
        System.out.println();
    }
});

Per-Message Configuration

Override Config Per Message

// Create chat with default config
GenerateContentConfig defaultConfig = GenerateContentConfig.builder()
    .temperature(0.7)
    .build();

Chat chat = client.chats.create("gemini-2.0-flash", defaultConfig);

// Send with default config
chat.sendMessage("Tell me something creative");

// Override for specific message
GenerateContentConfig factualConfig = GenerateContentConfig.builder()
    .temperature(0.1)
    .build();

chat.sendMessage("Now give me just facts about the sun", factualConfig);

Thread Safety

Important: Chat and AsyncChat instances are NOT thread-safe. Each chat session manages conversation history and should not be accessed from multiple threads simultaneously.

// Wrong - Don't do this
Chat chat = client.chats.create("gemini-2.0-flash");

// Multiple threads accessing same chat - UNSAFE
new Thread(() -> chat.sendMessage("Hello from thread 1")).start();
new Thread(() -> chat.sendMessage("Hello from thread 2")).start();

// Right - Create separate chat per thread
new Thread(() -> {
    Chat threadChat = client.chats.create("gemini-2.0-flash");
    threadChat.sendMessage("Hello from thread 1");
}).start();

Best Practices

Handle Long Conversations

Chat chat = client.chats.create("gemini-2.0-flash");

int maxTurns = 20;
int turnCount = 0;

while (turnCount < maxTurns) {
    // ... chat interaction ...
    turnCount++;

    // Check token usage
    List<Content> history = chat.history();
    // If history too long, summarize and start fresh
    if (history.size() > 40) {
        // Get summary of conversation
        GenerateContentResponse summary = chat.sendMessage(
            "Summarize our conversation so far in 2-3 sentences"
        );

        // Start fresh with summary as context
        chat.clearHistory();

        // Continue with summary
        GenerateContentConfig config = GenerateContentConfig.builder()
            .systemInstruction(Content.fromParts(
                Part.fromText("Previous conversation summary: " + summary.text())
            ))
            .build();

        chat = client.chats.create("gemini-2.0-flash", config);
    }
}

Save and Restore Conversation

import com.google.common.collect.ImmutableList;

// During conversation
Chat chat = client.chats.create("gemini-2.0-flash");
chat.sendMessage("Hello");
chat.sendMessage("Tell me about AI");

// Save history
List<Content> savedHistory = ImmutableList.copyOf(chat.history());

// Later, create new chat with saved history
Chat newChat = client.chats.create("gemini-2.0-flash");

// Restore history by sending it as context
if (!savedHistory.isEmpty()) {
    // Send saved messages to rebuild context
    newChat.sendMessage(savedHistory);
}

// Continue conversation
newChat.sendMessage("Continue where we left off");

Error Handling in Chat

import com.google.genai.errors.ApiException;

Chat chat = client.chats.create("gemini-2.0-flash");

try {
    GenerateContentResponse response = chat.sendMessage("Your message");
    System.out.println(response.text());
} catch (ApiException e) {
    if (e.code() == 429) {
        System.err.println("Rate limited, waiting...");
        Thread.sleep(5000);
        // Retry
        GenerateContentResponse response = chat.sendMessage("Your message");
    } else {
        System.err.println("Error: " + e.message());
    }
}

Install with Tessl CLI

npx tessl i tessl/maven-com-google-genai--google-genai

docs

batch-operations.md

caching.md

chat-sessions.md

client-configuration.md

content-generation.md

embeddings-tokens.md

error-handling.md

file-search-stores.md

files-management.md

image-operations.md

index.md

live-sessions.md

model-tuning.md

operations.md

tools-functions.md

types-reference.md

video-generation.md

tile.json