or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

ai-integration.mdclient-management.mdconfiguration-options.mddata-types.mderror-handling.mdindex.mdquery-execution.mdschema-introspection.mdtransaction-management.md

ai-integration.mddocs/

0

# AI Integration

1

2

EdgeDB AI capabilities for retrieval-augmented generation (RAG) queries, natural language database interactions, and embeddings generation using EdgeDB's integrated AI features.

3

4

## Capabilities

5

6

### AI Client Creation

7

8

Factory functions for creating AI-enabled EdgeDB clients that can process natural language queries using RAG (Retrieval-Augmented Generation).

9

10

```python { .api }

11

def create_ai(client: Client, **kwargs) -> EdgeDBAI:

12

"""

13

Create a synchronous EdgeDB AI client.

14

15

Parameters:

16

- client: EdgeDB Client instance (synchronous)

17

- **kwargs: AIOptions arguments (model, prompt)

18

19

Returns:

20

EdgeDBAI instance for AI-powered RAG operations

21

"""

22

23

def create_async_ai(client: AsyncIOClient, **kwargs) -> AsyncEdgeDBAI:

24

"""

25

Create an asynchronous EdgeDB AI client.

26

27

Parameters:

28

- client: AsyncIOClient instance

29

- **kwargs: AIOptions arguments (model, prompt)

30

31

Returns:

32

AsyncEdgeDBAI instance for async AI RAG operations

33

"""

34

```

35

36

### AI Client Classes

37

38

Main classes for AI-powered database interactions using RAG and embeddings.

39

40

```python { .api }

41

class EdgeDBAI:

42

"""

43

Synchronous EdgeDB AI client.

44

45

Provides RAG (Retrieval-Augmented Generation) query capabilities

46

and AI-powered database interactions using EdgeDB's integrated AI features.

47

"""

48

49

def query_rag(

50

self,

51

message: str,

52

context: Optional[QueryContext] = None

53

) -> str:

54

"""

55

Execute RAG query using natural language message.

56

57

Parameters:

58

- message: Natural language query or message

59

- context: Query context for AI processing

60

61

Returns:

62

RAG response string based on AI interpretation

63

"""

64

65

def stream_rag(

66

self,

67

message: str,

68

context: Optional[QueryContext] = None

69

) -> Iterator[str]:

70

"""

71

Execute streaming RAG query using natural language message.

72

73

Parameters:

74

- message: Natural language query or message

75

- context: Query context for AI processing

76

77

Returns:

78

Iterator yielding streaming RAG response chunks

79

"""

80

81

def generate_embeddings(

82

self,

83

*inputs: str,

84

model: str

85

) -> List[float]:

86

"""

87

Generate embeddings for input texts.

88

89

Parameters:

90

- *inputs: Text inputs to generate embeddings for

91

- model: Embedding model identifier

92

93

Returns:

94

List of floating-point embedding values

95

"""

96

97

def with_config(self, **kwargs) -> EdgeDBAI:

98

"""

99

Create new AI client with modified configuration.

100

101

Parameters:

102

- **kwargs: AIOptions parameters to override

103

104

Returns:

105

New EdgeDBAI instance with updated configuration

106

"""

107

108

def with_context(self, **kwargs) -> EdgeDBAI:

109

"""

110

Create new AI client with modified context.

111

112

Parameters:

113

- **kwargs: QueryContext parameters to override

114

115

Returns:

116

New EdgeDBAI instance with updated context

117

"""

118

119

class AsyncEdgeDBAI:

120

"""

121

Asynchronous EdgeDB AI client.

122

123

Async version of EdgeDBAI with identical method signatures

124

but async/await support for RAG and embedding operations.

125

"""

126

127

async def query_rag(

128

self,

129

message: str,

130

context: Optional[QueryContext] = None

131

) -> str:

132

"""Async version of EdgeDBAI.query_rag()."""

133

134

async def stream_rag(

135

self,

136

message: str,

137

context: Optional[QueryContext] = None

138

) -> AsyncIterator[str]:

139

"""Async version of EdgeDBAI.stream_rag()."""

140

141

async def generate_embeddings(

142

self,

143

*inputs: str,

144

model: str

145

) -> List[float]:

146

"""Async version of EdgeDBAI.generate_embeddings()."""

147

148

def with_config(self, **kwargs) -> AsyncEdgeDBAI:

149

"""Async version of EdgeDBAI.with_config()."""

150

151

def with_context(self, **kwargs) -> AsyncEdgeDBAI:

152

"""Async version of EdgeDBAI.with_context()."""

153

```

154

155

### AI Configuration Types

156

157

Configuration classes and types for AI query processing and RAG operations.

158

159

```python { .api }

160

class AIOptions:

161

"""

162

AI query configuration options.

163

164

Controls AI model selection and prompt configuration for RAG operations.

165

"""

166

167

def __init__(self, model: str, prompt: Optional[Prompt] = None):

168

"""

169

Create AI options.

170

171

Parameters:

172

- model: AI model identifier

173

- prompt: Custom prompt configuration

174

"""

175

176

def derive(self, kwargs: Dict[str, Any]) -> AIOptions:

177

"""

178

Create derived AIOptions with modified parameters.

179

180

Parameters:

181

- kwargs: Parameters to override

182

183

Returns:

184

New AIOptions instance with updated parameters

185

"""

186

187

class QueryContext:

188

"""

189

AI query context for RAG processing.

190

191

Provides context information for AI query processing including

192

variables, globals, and processing constraints.

193

"""

194

195

def __init__(

196

self,

197

query: str = "",

198

variables: Optional[Dict[str, Any]] = None,

199

globals: Optional[Dict[str, Any]] = None,

200

max_object_count: Optional[int] = None

201

):

202

"""

203

Create query context.

204

205

Parameters:

206

- query: Query string context

207

- variables: Query variables for processing

208

- globals: Global variables for context

209

- max_object_count: Maximum number of objects to process

210

"""

211

212

def derive(self, kwargs: Dict[str, Any]) -> QueryContext:

213

"""

214

Create derived QueryContext with modified parameters.

215

216

Parameters:

217

- kwargs: Parameters to override

218

219

Returns:

220

New QueryContext instance with updated parameters

221

"""

222

223

class Prompt:

224

"""

225

AI prompt configuration.

226

227

Manages prompts and templates for AI RAG query generation.

228

"""

229

230

name: Optional[str]

231

"""Named prompt template identifier."""

232

233

id: Optional[str]

234

"""Prompt identifier."""

235

236

custom: Optional[List[Custom]]

237

"""Custom prompt conversation history."""

238

239

class ChatParticipantRole(Enum):

240

"""

241

Chat participant roles for conversation context.

242

243

Defines roles in AI chat conversations for RAG processing.

244

"""

245

SYSTEM = "system" # System instructions and context

246

USER = "user" # User queries and requests

247

ASSISTANT = "assistant" # AI assistant responses

248

TOOL = "tool" # Tool/function call results

249

250

class Custom(TypedDict):

251

"""

252

Custom chat message for prompt configuration.

253

254

Used in Prompt.custom for conversation history.

255

"""

256

role: ChatParticipantRole

257

content: str

258

```

259

260

## Usage Examples

261

262

### Basic RAG Queries

263

264

```python

265

import edgedb

266

from edgedb.ai import create_ai

267

268

# Create regular client

269

client = edgedb.create_client()

270

271

# Create AI client with RAG capabilities

272

ai = create_ai(client, model="gpt-4")

273

274

# Natural language RAG queries

275

response = ai.query_rag("What users have registered in the last month?")

276

print(f"RAG Response: {response}")

277

278

# Get information about database structure

279

schema_info = ai.query_rag("Describe the schema and main object types")

280

print(f"Schema Info: {schema_info}")

281

282

# Ask about data patterns

283

patterns = ai.query_rag("What are the most common user activity patterns?")

284

print(f"Patterns: {patterns}")

285

```

286

287

### Streaming RAG Queries

288

289

```python

290

import edgedb

291

from edgedb.ai import create_ai

292

293

client = edgedb.create_client()

294

ai = create_ai(client, model="gpt-4")

295

296

# Stream RAG response for long queries

297

message = "Analyze user engagement trends and provide detailed insights"

298

299

print("RAG Response (streaming):")

300

for chunk in ai.stream_rag(message):

301

print(chunk, end="", flush=True)

302

print() # New line after streaming

303

```

304

305

### Async RAG Queries

306

307

```python

308

import asyncio

309

import edgedb

310

from edgedb.ai import create_async_ai

311

312

async def main():

313

# Create async client

314

client = edgedb.create_async_client()

315

316

# Create async AI client

317

ai = create_async_ai(client, model="gpt-4")

318

319

# Natural language RAG queries with async/await

320

response = await ai.query_rag("Find patterns in user behavior data")

321

print(f"Async RAG Response: {response}")

322

323

# Streaming async RAG

324

print("Streaming RAG response:")

325

async for chunk in ai.stream_rag("Provide detailed analysis of recent data"):

326

print(chunk, end="", flush=True)

327

print()

328

329

await client.aclose()

330

331

asyncio.run(main())

332

```

333

334

### RAG with Custom Context

335

336

```python

337

import edgedb

338

from edgedb.ai import create_ai, QueryContext

339

340

client = edgedb.create_client()

341

ai = create_ai(client, model="gpt-4")

342

343

# Create custom context for RAG queries

344

context = QueryContext(

345

query="user_analysis",

346

variables={"timeframe": "last_30_days"},

347

globals={"app_name": "my_app"},

348

max_object_count=1000

349

)

350

351

# Query with custom context

352

response = ai.query_rag(

353

"Analyze user engagement within the specified timeframe",

354

context=context

355

)

356

print(f"Contextual RAG Response: {response}")

357

358

# Use with_context for modified context

359

ai_with_context = ai.with_context(

360

variables={"timeframe": "last_7_days"},

361

max_object_count=500

362

)

363

364

weekly_response = ai_with_context.query_rag("Show recent user activity")

365

print(f"Weekly Response: {weekly_response}")

366

```

367

368

### Embeddings Generation

369

370

```python

371

import edgedb

372

from edgedb.ai import create_ai

373

374

client = edgedb.create_client()

375

ai = create_ai(client, model="gpt-4")

376

377

# Generate embeddings for text inputs

378

texts = [

379

"User authentication and security",

380

"Database query optimization",

381

"Real-time data synchronization"

382

]

383

384

embeddings = ai.generate_embeddings(*texts, model="text-embedding-ada-002")

385

print(f"Generated {len(embeddings)} embedding dimensions")

386

print(f"First few values: {embeddings[:5]}")

387

388

# Use embeddings for similarity search or clustering

389

# (Integration with vector database operations)

390

```

391

392

### AI Configuration Management

393

394

```python

395

import edgedb

396

from edgedb.ai import create_ai, AIOptions, Prompt, ChatParticipantRole

397

398

client = edgedb.create_client()

399

400

# Create AI client with specific configuration

401

ai_options = AIOptions(

402

model="gpt-4",

403

prompt=Prompt(

404

name="database_analyst",

405

custom=[

406

{

407

"role": ChatParticipantRole.SYSTEM,

408

"content": "You are a database analyst specializing in EdgeDB schemas and query optimization."

409

},

410

{

411

"role": ChatParticipantRole.USER,

412

"content": "Focus on providing actionable insights about data patterns."

413

}

414

]

415

)

416

)

417

418

ai = create_ai(client, **ai_options.__dict__)

419

420

# Use configured AI client

421

analysis = ai.query_rag("What optimization opportunities exist in this database?")

422

print(f"Expert Analysis: {analysis}")

423

424

# Create variant with different model

425

ai_variant = ai.with_config(model="gpt-3.5-turbo")

426

quick_summary = ai_variant.query_rag("Provide a brief database summary")

427

print(f"Quick Summary: {quick_summary}")

428

```

429

430

### Error Handling with RAG

431

432

```python

433

import edgedb

434

from edgedb.ai import create_ai

435

from edgedb import EdgeDBError

436

437

client = edgedb.create_client()

438

ai = create_ai(client, model="gpt-4")

439

440

def safe_rag_query(ai_client, message, max_retries=3):

441

"""Execute RAG query with error handling and retries."""

442

443

for attempt in range(max_retries):

444

try:

445

response = ai_client.query_rag(message)

446

return response

447

448

except EdgeDBError as e:

449

print(f"EdgeDB error on attempt {attempt + 1}: {e}")

450

if attempt == max_retries - 1:

451

raise

452

except Exception as e:

453

print(f"AI error on attempt {attempt + 1}: {e}")

454

if attempt == max_retries - 1:

455

return f"Unable to process query after {max_retries} attempts: {e}"

456

457

return None

458

459

# Safe RAG query execution

460

result = safe_rag_query(ai, "Analyze database performance metrics")

461

if result:

462

print(f"RAG Result: {result}")

463

else:

464

print("Query failed after all retries")

465

```

466

467

### Integration with Traditional Queries

468

469

```python

470

import edgedb

471

from edgedb.ai import create_ai

472

473

client = edgedb.create_client()

474

ai = create_ai(client, model="gpt-4")

475

476

def hybrid_analysis(natural_query: str, fallback_edgeql: str = None):

477

"""Combine RAG with traditional EdgeQL for comprehensive analysis."""

478

479

# Get RAG insights

480

try:

481

rag_insights = ai.query_rag(f"Provide insights: {natural_query}")

482

print(f"AI Insights: {rag_insights}")

483

except Exception as e:

484

print(f"RAG query failed: {e}")

485

rag_insights = None

486

487

# Execute traditional query if provided

488

if fallback_edgeql:

489

try:

490

data = client.query(fallback_edgeql)

491

print(f"Raw data: {len(data)} records")

492

return {"insights": rag_insights, "data": data}

493

except Exception as e:

494

print(f"EdgeQL query failed: {e}")

495

496

return {"insights": rag_insights, "data": None}

497

498

# Combined analysis approach

499

result = hybrid_analysis(

500

natural_query="User engagement trends over time",

501

fallback_edgeql="SELECT User { name, created_at, last_login } ORDER BY .created_at DESC"

502

)

503

504

if result["insights"]:

505

print("Analysis completed with AI insights")

506

if result["data"]:

507

print(f"Supporting data: {len(result['data'])} records")

508

```