The MongoDB Synchronous Driver for Java providing blocking I/O patterns for database operations
—
Index creation, management, and Atlas Search index operations for optimizing query performance, ensuring efficient data access patterns, and enabling full-text search capabilities.
Core index management operations for creating, listing, and managing database indexes.
/**
* Creates a single index on the collection
* @param keys the index specification as Bson
* @return the name of the created index
*/
String createIndex(Bson keys);
/**
* Creates a single index with options
* @param keys the index specification as Bson
* @param indexOptions options for index creation
* @return the name of the created index
*/
String createIndex(Bson keys, IndexOptions indexOptions);
/**
* Creates a single index with session support
* @param clientSession the session to use for the operation
* @param keys the index specification as Bson
* @return the name of the created index
*/
String createIndex(ClientSession clientSession, Bson keys);
/**
* Creates a single index with session and options
* @param clientSession the session to use for the operation
* @param keys the index specification as Bson
* @param indexOptions options for index creation
* @return the name of the created index
*/
String createIndex(ClientSession clientSession, Bson keys, IndexOptions indexOptions);
/**
* Creates multiple indexes in a single operation
* @param indexes list of IndexModel objects defining the indexes
* @return list of created index names
*/
List<String> createIndexes(List<IndexModel> indexes);
/**
* Creates multiple indexes with options
* @param indexes list of IndexModel objects defining the indexes
* @param createIndexOptions options for the create indexes operation
* @return list of created index names
*/
List<String> createIndexes(List<IndexModel> indexes, CreateIndexOptions createIndexOptions);
/**
* Creates multiple indexes with session support
* @param clientSession the session to use for the operation
* @param indexes list of IndexModel objects defining the indexes
* @return list of created index names
*/
List<String> createIndexes(ClientSession clientSession, List<IndexModel> indexes);
/**
* Drops an index by name
* @param indexName the name of the index to drop
*/
void dropIndex(String indexName);
/**
* Drops an index by keys specification
* @param keys the index specification as Bson
*/
void dropIndex(Bson keys);
/**
* Drops an index with session support
* @param clientSession the session to use for the operation
* @param indexName the name of the index to drop
*/
void dropIndex(ClientSession clientSession, String indexName);
/**
* Drops all indexes on the collection except the default _id index
*/
void dropIndexes();
/**
* Drops all indexes with session support
* @param clientSession the session to use for the operation
*/
void dropIndexes(ClientSession clientSession);
/**
* Lists all indexes on the collection
* @return ListIndexesIterable for iterating over index information
*/
ListIndexesIterable<Document> listIndexes();
/**
* Lists all indexes returning custom type
* @param clazz the class to decode each index document to
* @return ListIndexesIterable for iterating over index information
*/
<TResult> ListIndexesIterable<TResult> listIndexes(Class<TResult> clazz);
/**
* Lists all indexes with session support
* @param clientSession the session to use for the operation
* @return ListIndexesIterable for iterating over index information
*/
ListIndexesIterable<Document> listIndexes(ClientSession clientSession);Usage Examples:
import com.mongodb.client.model.Indexes;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.IndexModel;
import java.util.concurrent.TimeUnit;
// Create simple ascending index
String indexName = collection.createIndex(Indexes.ascending("username"));
System.out.println("Created index: " + indexName);
// Create compound index
collection.createIndex(Indexes.compound(
Indexes.ascending("category"),
Indexes.descending("price"),
Indexes.ascending("name")
));
// Create index with options
IndexOptions options = new IndexOptions()
.unique(true)
.background(true)
.name("unique_email_idx")
.expireAfter(30, TimeUnit.DAYS);
collection.createIndex(Indexes.ascending("email"), options);
// Create multiple indexes efficiently
List<IndexModel> indexes = Arrays.asList(
new IndexModel(Indexes.ascending("userId")),
new IndexModel(Indexes.ascending("timestamp")),
new IndexModel(Indexes.compound(
Indexes.ascending("userId"),
Indexes.descending("timestamp")
), new IndexOptions().name("user_timeline_idx"))
);
List<String> createdIndexes = collection.createIndexes(indexes);
System.out.println("Created indexes: " + createdIndexes);
// List all indexes
for (Document index : collection.listIndexes()) {
System.out.println("Index: " + index.toJson());
}
// Drop specific index
collection.dropIndex("unique_email_idx");
// Drop index by specification
collection.dropIndex(Indexes.ascending("username"));Various index types supported by MongoDB for different use cases.
// Single field indexes
Bson ascendingIndex = Indexes.ascending("fieldName");
Bson descendingIndex = Indexes.descending("fieldName");
// Compound indexes
Bson compoundIndex = Indexes.compound(
Indexes.ascending("field1"),
Indexes.descending("field2"),
Indexes.ascending("field3")
);
// Text indexes for full-text search
Bson textIndex = Indexes.text("title");
Bson multiFieldTextIndex = Indexes.compoundIndex(
Indexes.text("title"),
Indexes.text("content"),
Indexes.text("tags")
);
// Geospatial indexes
Bson geo2dIndex = Indexes.geo2d("location");
Bson geo2dsphereIndex = Indexes.geo2dsphere("coordinates");
// Hashed indexes for sharding
Bson hashedIndex = Indexes.hashed("shardKey");
// Wildcard indexes for dynamic schemas
Bson wildcardIndex = Indexes.text("$**");
Bson fieldWildcardIndex = Indexes.text("metadata.$**");Usage Examples:
// Create text index for search functionality
collection.createIndex(Indexes.compoundIndex(
Indexes.text("title"),
Indexes.text("content"),
Indexes.text("tags")
), new IndexOptions().name("content_search_idx"));
// Create geospatial index for location-based queries
collection.createIndex(Indexes.geo2dsphere("location"));
// Query using geospatial index
List<Document> nearbyPlaces = collection.find(
Filters.near("location", -73.9857, 40.7484, 1000.0, 0.0)
).into(new ArrayList<>());
// Create partial index for efficient sparse data
IndexOptions partialOptions = new IndexOptions()
.partialFilterExpression(Filters.exists("premiumUser", true))
.name("premium_users_idx");
collection.createIndex(Indexes.ascending("userId"), partialOptions);
// Create TTL index for automatic document expiration
IndexOptions ttlOptions = new IndexOptions()
.expireAfter(7, TimeUnit.DAYS)
.name("session_ttl_idx");
collection.createIndex(Indexes.ascending("createdAt"), ttlOptions);Comprehensive index configuration options for performance optimization and specialized use cases.
/**
* IndexOptions for configuring index creation
*/
public class IndexOptions {
/**
* Sets whether the index should enforce uniqueness
* @param unique true for unique index
* @return IndexOptions with unique setting
*/
public IndexOptions unique(boolean unique);
/**
* Sets whether the index should be built in the background
* @param background true for background index build
* @return IndexOptions with background setting
*/
public IndexOptions background(boolean background);
/**
* Sets whether the index should be sparse (skip null values)
* @param sparse true for sparse index
* @return IndexOptions with sparse setting
*/
public IndexOptions sparse(boolean sparse);
/**
* Sets the name of the index
* @param name the index name
* @return IndexOptions with specified name
*/
public IndexOptions name(String name);
/**
* Sets the partial filter expression for partial indexes
* @param partialFilterExpression the filter expression
* @return IndexOptions with partial filter
*/
public IndexOptions partialFilterExpression(Bson partialFilterExpression);
/**
* Sets the TTL (time to live) for documents
* @param expireAfter the time after which documents expire
* @param timeUnit the time unit
* @return IndexOptions with TTL setting
*/
public IndexOptions expireAfter(Long expireAfter, TimeUnit timeUnit);
/**
* Sets the collation for string comparisons in the index
* @param collation the collation specification
* @return IndexOptions with collation
*/
public IndexOptions collation(Collation collation);
/**
* Sets wildcard projection for wildcard indexes
* @param wildcardProjection the projection specification
* @return IndexOptions with wildcard projection
*/
public IndexOptions wildcardProjection(Bson wildcardProjection);
/**
* Sets storage engine options
* @param storageEngine storage engine specific options
* @return IndexOptions with storage engine settings
*/
public IndexOptions storageEngine(Bson storageEngine);
}Usage Examples:
// Unique index with custom name and collation
Collation caseInsensitive = Collation.builder()
.locale("en")
.caseLevel(false)
.build();
IndexOptions uniqueOptions = new IndexOptions()
.unique(true)
.name("unique_username_ci")
.collation(caseInsensitive);
collection.createIndex(Indexes.ascending("username"), uniqueOptions);
// Partial index for active users only
IndexOptions activeUserOptions = new IndexOptions()
.partialFilterExpression(Filters.eq("status", "active"))
.name("active_users_email_idx");
collection.createIndex(Indexes.ascending("email"), activeUserOptions);
// Sparse index for optional fields
IndexOptions sparseOptions = new IndexOptions()
.sparse(true)
.name("optional_phone_idx");
collection.createIndex(Indexes.ascending("phoneNumber"), sparseOptions);
// Wildcard index with projection
IndexOptions wildcardOptions = new IndexOptions()
.wildcardProjection(new Document()
.append("metadata.tags", 1)
.append("metadata.category", 1)
.append("metadata.priority", 0)) // Exclude priority
.name("metadata_wildcard_idx");
collection.createIndex(Indexes.text("metadata.$**"), wildcardOptions);Operations for managing Atlas Search indexes for full-text search capabilities.
/**
* Creates an Atlas Search index
* @param indexName the name of the search index
* @param definition the search index definition as Bson
* @return the name of the created search index
*/
String createSearchIndex(String indexName, Bson definition);
/**
* Creates an Atlas Search index with default name
* @param definition the search index definition as Bson
* @return the name of the created search index
*/
String createSearchIndex(Bson definition);
/**
* Creates multiple Atlas Search indexes
* @param searchIndexes list of SearchIndexModel objects
* @return list of created search index names
*/
List<String> createSearchIndexes(List<SearchIndexModel> searchIndexes);
/**
* Updates an Atlas Search index
* @param indexName the name of the search index to update
* @param definition the new search index definition
*/
void updateSearchIndex(String indexName, Bson definition);
/**
* Drops an Atlas Search index
* @param indexName the name of the search index to drop
*/
void dropSearchIndex(String indexName);
/**
* Lists Atlas Search indexes
* @return ListSearchIndexesIterable for iterating over search indexes
*/
ListSearchIndexesIterable<Document> listSearchIndexes();
/**
* Lists Atlas Search indexes with a specific name
* @param indexName the name of the search indexes to list
* @return ListSearchIndexesIterable for iterating over search indexes
*/
ListSearchIndexesIterable<Document> listSearchIndexes(String indexName);Usage Examples:
// Create basic Atlas Search index
Document searchIndexDefinition = new Document()
.append("mappings", new Document()
.append("dynamic", true)
.append("fields", new Document()
.append("title", new Document()
.append("type", "string")
.append("analyzer", "lucene.standard"))
.append("content", new Document()
.append("type", "string")
.append("analyzer", "lucene.english"))
.append("tags", new Document()
.append("type", "stringFacet"))));
String searchIndexName = collection.createSearchIndex("content_search", searchIndexDefinition);
System.out.println("Created search index: " + searchIndexName);
// Create search index with autocomplete
Document autocompleteDefinition = new Document()
.append("mappings", new Document()
.append("fields", new Document()
.append("title", new Document()
.append("type", "autocomplete")
.append("analyzer", "lucene.standard")
.append("tokenization", "edgeGram")
.append("minGrams", 2)
.append("maxGrams", 10))));
collection.createSearchIndex("autocomplete_idx", autocompleteDefinition);
// Create vector search index for semantic search
Document vectorDefinition = new Document()
.append("fields", new Document()
.append("embedding", new Document()
.append("type", "vector")
.append("dimensions", 768)
.append("similarity", "cosine")));
collection.createSearchIndex("vector_search", vectorDefinition);
// List all search indexes
for (Document searchIndex : collection.listSearchIndexes()) {
System.out.println("Search Index: " + searchIndex.toJson());
}
// Update search index
Document updatedDefinition = new Document()
.append("mappings", new Document()
.append("dynamic", false)
.append("fields", new Document()
.append("title", new Document()
.append("type", "string")
.append("analyzer", "lucene.keyword"))
.append("description", new Document()
.append("type", "string")
.append("analyzer", "lucene.english"))));
collection.updateSearchIndex("content_search", updatedDefinition);Interface for querying and filtering index information.
/**
* Interface for listing indexes with configuration options
*/
public interface ListIndexesIterable<TResult> extends MongoIterable<TResult> {
/**
* Sets the maximum execution time
* @param maxTime the maximum time
* @param timeUnit the time unit
* @return ListIndexesIterable with time limit
*/
ListIndexesIterable<TResult> maxTime(long maxTime, TimeUnit timeUnit);
/**
* Sets the batch size for cursor operations
* @param batchSize the batch size
* @return ListIndexesIterable with specified batch size
*/
ListIndexesIterable<TResult> batchSize(int batchSize);
/**
* Adds a comment to the list indexes operation
* @param comment the comment string
* @return ListIndexesIterable with comment
*/
ListIndexesIterable<TResult> comment(String comment);
}Tools and techniques for monitoring and optimizing index performance.
// Analyze index usage statistics
Document indexStats = database.runCommand(new Document("collStats", "myCollection")
.append("indexDetails", true));
System.out.println("Index statistics: " + indexStats.toJson());
// Get index usage statistics
Document indexUsage = database.runCommand(new Document("aggregate", "myCollection")
.append("pipeline", Arrays.asList(
new Document("$indexStats", new Document())
)));
// Explain query to verify index usage
Document explainResult = collection.find(Filters.eq("userId", 12345))
.explain(ExplainVerbosity.EXECUTION_STATS);
System.out.println("Query execution plan: " + explainResult.toJson());
// Monitor slow queries and missing indexes
MongoCollection<Document> profilerCollection = database.getCollection("system.profile");
// Enable profiling for slow operations
database.runCommand(new Document("profile", 2)
.append("slowms", 100)
.append("sampleRate", 1.0));
// Query profiler data for operations without index usage
List<Document> slowQueries = profilerCollection.find(
Filters.and(
Filters.gte("ts", Date.from(Instant.now().minus(1, ChronoUnit.HOURS))),
Filters.eq("planSummary", "COLLSCAN")
)
).into(new ArrayList<>());
for (Document slowQuery : slowQueries) {
System.out.println("Slow query without index: " + slowQuery.toJson());
}Guidelines for maintaining optimal index performance and managing index lifecycle.
// Regular index maintenance
private void performIndexMaintenance() {
// Rebuild fragmented indexes (MongoDB handles this automatically in most cases)
// Remove unused indexes
removeUnusedIndexes();
// Update index statistics
database.runCommand(new Document("planCacheClear", collection.getNamespace().getCollectionName()));
// Monitor index size and performance
monitorIndexPerformance();
}
private void removeUnusedIndexes() {
// Get index usage statistics
List<Document> indexStats = collection.aggregate(Arrays.asList(
new Document("$indexStats", new Document())
)).into(new ArrayList<>());
Date thirtyDaysAgo = Date.from(Instant.now().minus(30, ChronoUnit.DAYS));
for (Document indexStat : indexStats) {
String indexName = indexStat.getString("name");
Document accesses = indexStat.get("accesses", Document.class);
if (!indexName.equals("_id_") && accesses.getInteger("ops", 0) == 0) {
Date lastAccess = accesses.getDate("since");
if (lastAccess.before(thirtyDaysAgo)) {
System.out.println("Considering removal of unused index: " + indexName);
// collection.dropIndex(indexName); // Uncomment to actually drop
}
}
}
}
// Index creation strategy for large collections
private void createIndexesForLargeCollection() {
// Create indexes during low-traffic periods
CreateIndexOptions options = new CreateIndexOptions()
.background(true) // Note: background option is deprecated in MongoDB 4.2+
.maxTime(2, TimeUnit.HOURS);
List<IndexModel> indexes = Arrays.asList(
new IndexModel(Indexes.ascending("frequently_queried_field")),
new IndexModel(Indexes.compound(
Indexes.ascending("user_id"),
Indexes.descending("timestamp")
))
);
try {
List<String> created = collection.createIndexes(indexes, options);
System.out.println("Successfully created indexes: " + created);
} catch (MongoException e) {
System.err.println("Index creation failed: " + e.getMessage());
// Handle index creation failure
}
}
// Index optimization for different query patterns
private void optimizeIndexesForQueryPatterns() {
// For equality queries: single field ascending index
collection.createIndex(Indexes.ascending("status"));
// For range queries: single field index (ascending/descending based on sort)
collection.createIndex(Indexes.descending("createdAt"));
// For sorting: compound index with sort fields last
collection.createIndex(Indexes.compound(
Indexes.ascending("category"), // Filter field first
Indexes.descending("priority"), // Sort field second
Indexes.descending("createdAt") // Additional sort field
));
// For text search: text index with weights
collection.createIndex(Indexes.compoundIndex(
Indexes.text("title"),
Indexes.text("content")
), new IndexOptions()
.weights(new Document("title", 10).append("content", 1))
.name("weighted_text_search"));
}Install with Tessl CLI
npx tessl i tessl/maven-org-mongodb--mongodb-driver-sync