A fully-featured and blazing-fast Python API client to interact with Algolia's search-as-a-service platform.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Controlled experimentation platform for testing search configurations, UI changes, and business strategies with statistical significance. Run experiments to optimize search performance and user experience.
class AbtestingClient:
def __init__(
self,
app_id: Optional[str] = None,
api_key: Optional[str] = None,
transporter: Optional[Transporter] = None,
config: Optional[AbtestingConfig] = None
) -> None: ...
async def close(self) -> None: ...
async def set_client_api_key(self, api_key: str) -> None: ...Create and manage A/B tests for search optimization.
async def list_ab_tests(
self,
offset: Optional[int] = None,
limit: Optional[int] = None,
index_prefix: Optional[str] = None,
index_suffix: Optional[str] = None,
request_options: Optional[Union[dict, RequestOptions]] = None
) -> ListABTestsResponse:
"""
List all A/B tests in the application.
Parameters:
- offset: Offset for pagination
- limit: Maximum number of tests to return
- index_prefix: Filter by index name prefix
- index_suffix: Filter by index name suffix
- request_options: Additional request options
Returns:
ListABTestsResponse with test list
"""
async def get_ab_test(
self,
id: int,
request_options: Optional[Union[dict, RequestOptions]] = None
) -> ABTest:
"""
Get details of a specific A/B test.
Parameters:
- id: Test identifier
- request_options: Additional request options
Returns:
ABTest object with test details
"""
async def add_ab_test(
self,
ab_test_create: Union[ABTestCreate, dict],
request_options: Optional[Union[dict, RequestOptions]] = None
) -> ABTestResponse:
"""
Create a new A/B test.
Parameters:
- ab_test_create: Test configuration
- request_options: Additional request options
Returns:
ABTestResponse with test creation details
"""
async def stop_ab_test(
self,
id: int,
request_options: Optional[Union[dict, RequestOptions]] = None
) -> ABTestResponse:
"""
Stop a running A/B test.
Parameters:
- id: Test identifier
- request_options: Additional request options
Returns:
ABTestResponse with stop confirmation
"""
async def delete_ab_test(
self,
id: int,
request_options: Optional[Union[dict, RequestOptions]] = None
) -> ABTestResponse:
"""
Delete an A/B test.
Parameters:
- id: Test identifier
- request_options: Additional request options
Returns:
ABTestResponse with deletion confirmation
"""from algoliasearch.abtesting.client import AbtestingClient
# Initialize client
client = AbtestingClient("YOUR_APP_ID", "YOUR_API_KEY")
# Create an A/B test
test_response = await client.add_ab_test({
"name": "Search Ranking Test",
"variants": [
{
"index": "products_variant_a",
"trafficPercentage": 50,
"description": "Default ranking"
},
{
"index": "products_variant_b",
"trafficPercentage": 50,
"description": "Custom ranking with boost"
}
],
"endAt": "2024-12-31T23:59:59Z"
})
print(f"Test created with ID: {test_response.ab_test_id}")class ABTestCreate(BaseModel):
name: str
variants: List[ABTestVariant]
end_at: str
class ABTestVariant(BaseModel):
index: str
traffic_percentage: int
description: Optional[str] = None
class ABTest(BaseModel):
ab_test_id: int
name: str
variants: List[ABTestVariant]
status: str
created_at: str
end_at: Optional[str] = None
class ListABTestsResponse(BaseModel):
ab_tests: List[ABTest]
count: int
total: intInstall with Tessl CLI
npx tessl i tessl/pypi-algoliasearch