or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

assistants.mdaudio.mdbatches.mdchat-completions.mdchatkit.mdclient-initialization.mdcompletions.mdcontainers.mdconversations.mdembeddings.mdevals.mdfiles.mdfine-tuning.mdimages.mdindex.mdmodels.mdmoderations.mdrealtime.mdresponses.mdruns.mdthreads-messages.mduploads.mdvector-stores.mdvideos.mdwebhooks.md
KNOWN_ISSUES.md

runs.mddocs/

0

# Runs

1

2

Execute assistants on threads and handle tool calls. Runs represent the execution of an assistant on a thread, managing the conversation flow and tool interactions.

3

4

## Capabilities

5

6

### Create Run

7

8

Execute an assistant on a thread.

9

10

```python { .api }

11

def create(

12

self,

13

thread_id: str,

14

*,

15

assistant_id: str,

16

additional_instructions: str | Omit = omit,

17

additional_messages: list[dict] | Omit = omit,

18

instructions: str | Omit = omit,

19

max_completion_tokens: int | Omit = omit,

20

max_prompt_tokens: int | Omit = omit,

21

metadata: dict[str, str] | Omit = omit,

22

model: str | Omit = omit,

23

parallel_tool_calls: bool | Omit = omit,

24

response_format: dict | Omit = omit,

25

stream: bool | Omit = omit,

26

temperature: float | Omit = omit,

27

tool_choice: str | dict | Omit = omit,

28

tools: list[dict] | Omit = omit,

29

top_p: float | Omit = omit,

30

truncation_strategy: dict | Omit = omit,

31

extra_headers: dict[str, str] | None = None,

32

extra_query: dict[str, object] | None = None,

33

extra_body: dict[str, object] | None = None,

34

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

35

) -> Run:

36

"""

37

Run an assistant on a thread.

38

39

Args:

40

thread_id: The thread ID.

41

assistant_id: The assistant ID.

42

additional_instructions: Append to assistant instructions.

43

additional_messages: Add messages before run.

44

instructions: Override assistant instructions.

45

max_completion_tokens: Maximum completion tokens.

46

max_prompt_tokens: Maximum prompt tokens.

47

metadata: Key-value pairs (max 16).

48

model: Override assistant model.

49

parallel_tool_calls: Enable parallel tool calls.

50

response_format: Override response format.

51

stream: Enable streaming.

52

temperature: Sampling temperature.

53

tool_choice: Tool choice configuration.

54

tools: Override assistant tools.

55

top_p: Nucleus sampling.

56

truncation_strategy: Message truncation config.

57

58

Returns:

59

Run: Created run.

60

"""

61

```

62

63

Usage examples:

64

65

```python

66

from openai import OpenAI

67

68

client = OpenAI()

69

70

# Basic run

71

run = client.beta.threads.runs.create(

72

thread_id="thread_abc123",

73

assistant_id="asst_abc123"

74

)

75

76

print(f"Run ID: {run.id}")

77

print(f"Status: {run.status}")

78

79

# With additional instructions

80

run = client.beta.threads.runs.create(

81

thread_id="thread_abc123",

82

assistant_id="asst_abc123",

83

additional_instructions="Be concise."

84

)

85

86

# Override model

87

run = client.beta.threads.runs.create(

88

thread_id="thread_abc123",

89

assistant_id="asst_abc123",

90

model="gpt-4-turbo"

91

)

92

93

# With streaming

94

stream = client.beta.threads.runs.create(

95

thread_id="thread_abc123",

96

assistant_id="asst_abc123",

97

stream=True

98

)

99

100

for event in stream:

101

print(event)

102

```

103

104

### Retrieve Run

105

106

Get run status and details.

107

108

```python { .api }

109

def retrieve(

110

self,

111

thread_id: str,

112

run_id: str,

113

*,

114

extra_headers: dict[str, str] | None = None,

115

extra_query: dict[str, object] | None = None,

116

extra_body: dict[str, object] | None = None,

117

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

118

) -> Run:

119

"""Get run details."""

120

```

121

122

Usage example:

123

124

```python

125

run = client.beta.threads.runs.retrieve(

126

thread_id="thread_abc123",

127

run_id="run_abc123"

128

)

129

130

print(f"Status: {run.status}")

131

print(f"Model: {run.model}")

132

```

133

134

### Update Run

135

136

Update a run's metadata.

137

138

```python { .api }

139

def update(

140

self,

141

run_id: str,

142

*,

143

thread_id: str,

144

metadata: dict[str, str] | Omit = omit,

145

extra_headers: dict[str, str] | None = None,

146

extra_query: dict[str, object] | None = None,

147

extra_body: dict[str, object] | None = None,

148

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

149

) -> Run:

150

"""

151

Modify a run's metadata.

152

153

Args:

154

run_id: The run ID to update.

155

thread_id: The thread ID containing the run.

156

metadata: Set of 16 key-value pairs for storing additional information.

157

Keys max 64 characters, values max 512 characters.

158

extra_headers: Additional HTTP headers.

159

extra_query: Additional query parameters.

160

extra_body: Additional JSON fields.

161

timeout: Request timeout in seconds.

162

163

Returns:

164

Run: Updated run object.

165

"""

166

```

167

168

Usage example:

169

170

```python

171

# Update run metadata

172

run = client.beta.threads.runs.update(

173

run_id="run_abc123",

174

thread_id="thread_abc123",

175

metadata={

176

"user_id": "user-456",

177

"priority": "high"

178

}

179

)

180

181

print(f"Updated metadata: {run.metadata}")

182

```

183

184

### List Runs

185

186

List runs for a thread.

187

188

```python { .api }

189

def list(

190

self,

191

thread_id: str,

192

*,

193

after: str | Omit = omit,

194

before: str | Omit = omit,

195

limit: int | Omit = omit,

196

order: Literal["asc", "desc"] | Omit = omit,

197

extra_headers: dict[str, str] | None = None,

198

extra_query: dict[str, object] | None = None,

199

extra_body: dict[str, object] | None = None,

200

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

201

) -> SyncCursorPage[Run]:

202

"""List runs for a thread."""

203

```

204

205

### Cancel Run

206

207

Cancel an in-progress run.

208

209

```python { .api }

210

def cancel(

211

self,

212

thread_id: str,

213

run_id: str,

214

*,

215

extra_headers: dict[str, str] | None = None,

216

extra_query: dict[str, object] | None = None,

217

extra_body: dict[str, object] | None = None,

218

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

219

) -> Run:

220

"""Cancel a run."""

221

```

222

223

Usage example:

224

225

```python

226

run = client.beta.threads.runs.cancel(

227

thread_id="thread_abc123",

228

run_id="run_abc123"

229

)

230

231

print(f"Status: {run.status}") # "cancelling" or "cancelled"

232

```

233

234

### Submit Tool Outputs

235

236

Submit results of tool calls back to the run.

237

238

```python { .api }

239

def submit_tool_outputs(

240

self,

241

thread_id: str,

242

run_id: str,

243

*,

244

tool_outputs: list[dict],

245

stream: bool | Omit = omit,

246

extra_headers: dict[str, str] | None = None,

247

extra_query: dict[str, object] | None = None,

248

extra_body: dict[str, object] | None = None,

249

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

250

) -> Run:

251

"""

252

Submit tool call results.

253

254

Args:

255

thread_id: The thread ID.

256

run_id: The run ID.

257

tool_outputs: List of tool outputs.

258

[{"tool_call_id": "call_abc", "output": "result"}]

259

stream: Enable streaming.

260

261

Returns:

262

Run: Updated run.

263

"""

264

```

265

266

Usage example:

267

268

```python

269

# Check for required actions

270

run = client.beta.threads.runs.retrieve(

271

thread_id="thread_abc123",

272

run_id="run_abc123"

273

)

274

275

if run.status == "requires_action":

276

tool_calls = run.required_action.submit_tool_outputs.tool_calls

277

278

# Execute tool calls

279

tool_outputs = []

280

for tool_call in tool_calls:

281

function_name = tool_call.function.name

282

arguments = tool_call.function.arguments

283

284

# Call your function

285

result = execute_function(function_name, arguments)

286

287

tool_outputs.append({

288

"tool_call_id": tool_call.id,

289

"output": str(result)

290

})

291

292

# Submit outputs

293

run = client.beta.threads.runs.submit_tool_outputs(

294

thread_id="thread_abc123",

295

run_id="run_abc123",

296

tool_outputs=tool_outputs

297

)

298

```

299

300

### Polling Helpers

301

302

Wait for run completion with automatic polling.

303

304

```python { .api }

305

def create_and_poll(

306

self,

307

thread_id: str,

308

*,

309

assistant_id: str,

310

poll_interval_ms: int = 1000,

311

**kwargs

312

) -> Run:

313

"""Create run and poll until completion."""

314

315

def poll(

316

self,

317

thread_id: str,

318

run_id: str,

319

*,

320

poll_interval_ms: int = 1000,

321

) -> Run:

322

"""Poll run until completion."""

323

324

def submit_tool_outputs_and_poll(

325

self,

326

thread_id: str,

327

run_id: str,

328

*,

329

tool_outputs: list[dict],

330

poll_interval_ms: int = 1000,

331

) -> Run:

332

"""Submit tool outputs and poll until completion."""

333

```

334

335

Usage examples:

336

337

```python

338

# Create and wait for completion

339

run = client.beta.threads.runs.create_and_poll(

340

thread_id="thread_abc123",

341

assistant_id="asst_abc123"

342

)

343

344

print(f"Final status: {run.status}")

345

346

# Poll existing run

347

run = client.beta.threads.runs.poll(

348

thread_id="thread_abc123",

349

run_id="run_abc123"

350

)

351

352

# Submit and wait

353

run = client.beta.threads.runs.submit_tool_outputs_and_poll(

354

thread_id="thread_abc123",

355

run_id="run_abc123",

356

tool_outputs=[{"tool_call_id": "call_abc", "output": "42"}]

357

)

358

```

359

360

### Streaming

361

362

Stream run events in real-time.

363

364

```python { .api }

365

def stream(

366

self,

367

thread_id: str,

368

*,

369

assistant_id: str,

370

**kwargs

371

) -> AssistantStreamManager:

372

"""Stream run events with event handler."""

373

374

def create_and_stream(

375

self,

376

thread_id: str,

377

*,

378

assistant_id: str,

379

**kwargs

380

) -> AssistantStreamManager:

381

"""Create run and stream events."""

382

```

383

384

Usage example:

385

386

```python

387

from openai import AssistantEventHandler

388

389

# Define event handler

390

class EventHandler(AssistantEventHandler):

391

def on_text_created(self, text):

392

print(f"\\nassistant > ", end="", flush=True)

393

394

def on_text_delta(self, delta, snapshot):

395

print(delta.value, end="", flush=True)

396

397

def on_tool_call_created(self, tool_call):

398

print(f"\\nassistant > {tool_call.type}\\n", flush=True)

399

400

# Stream events

401

with client.beta.threads.runs.stream(

402

thread_id="thread_abc123",

403

assistant_id="asst_abc123",

404

event_handler=EventHandler()

405

) as stream:

406

stream.until_done()

407

```

408

409

### Submit Tool Outputs with Streaming

410

411

Submit tool outputs and stream the run to completion in real-time.

412

413

```python { .api }

414

def submit_tool_outputs_stream(

415

self,

416

*,

417

tool_outputs: list[dict],

418

run_id: str,

419

thread_id: str,

420

event_handler: AssistantEventHandler | None = None,

421

extra_headers: dict[str, str] | None = None,

422

extra_query: dict[str, object] | None = None,

423

extra_body: dict[str, object] | None = None,

424

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

425

) -> AssistantStreamManager[AssistantEventHandler]:

426

"""

427

Submit tool outputs and stream the run to terminal state.

428

429

Helper method that submits tool call results and streams run events in real-time

430

until the run reaches a terminal state. More information on Run lifecycles:

431

https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps

432

433

Args:

434

tool_outputs: List of tool call outputs.

435

[{"tool_call_id": "call_abc", "output": "result"}]

436

run_id: The run ID requiring tool outputs.

437

thread_id: The thread ID containing the run.

438

event_handler: Optional custom event handler for processing stream events.

439

extra_headers: Additional HTTP headers.

440

extra_query: Additional query parameters.

441

extra_body: Additional JSON fields.

442

timeout: Request timeout in seconds.

443

444

Returns:

445

AssistantStreamManager: Stream manager for handling assistant events.

446

"""

447

```

448

449

Usage example:

450

451

```python

452

from openai import AssistantEventHandler

453

454

# Stream tool output submission

455

with client.beta.threads.runs.submit_tool_outputs_stream(

456

thread_id="thread_abc123",

457

run_id="run_abc123",

458

tool_outputs=[

459

{"tool_call_id": "call_abc", "output": "42"},

460

{"tool_call_id": "call_def", "output": "Hello"}

461

]

462

) as stream:

463

for event in stream:

464

if event.event == 'thread.run.completed':

465

print("Run completed!")

466

467

# With custom event handler

468

class ToolOutputHandler(AssistantEventHandler):

469

def on_text_delta(self, delta, snapshot):

470

print(delta.value, end='', flush=True)

471

472

def on_run_step_done(self, run_step):

473

print(f"\nStep {run_step.id} completed")

474

475

with client.beta.threads.runs.submit_tool_outputs_stream(

476

thread_id="thread_abc123",

477

run_id="run_abc123",

478

tool_outputs=[{"tool_call_id": "call_abc", "output": "result"}],

479

event_handler=ToolOutputHandler()

480

) as stream:

481

stream.until_done()

482

```

483

484

### Run Steps

485

486

View detailed steps of a run.

487

488

```python { .api }

489

def retrieve(

490

self,

491

thread_id: str,

492

run_id: str,

493

step_id: str,

494

*,

495

extra_headers: dict[str, str] | None = None,

496

extra_query: dict[str, object] | None = None,

497

extra_body: dict[str, object] | None = None,

498

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

499

) -> RunStep:

500

"""Retrieve run step details."""

501

502

def list(

503

self,

504

thread_id: str,

505

run_id: str,

506

*,

507

after: str | Omit = omit,

508

before: str | Omit = omit,

509

limit: int | Omit = omit,

510

order: Literal["asc", "desc"] | Omit = omit,

511

extra_headers: dict[str, str] | None = None,

512

extra_query: dict[str, object] | None = None,

513

extra_body: dict[str, object] | None = None,

514

timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,

515

) -> SyncCursorPage[RunStep]:

516

"""List run steps."""

517

```

518

519

Usage example:

520

521

```python

522

# List steps

523

steps = client.beta.threads.runs.steps.list(

524

thread_id="thread_abc123",

525

run_id="run_abc123"

526

)

527

528

for step in steps:

529

print(f"Step: {step.type}")

530

print(f"Status: {step.status}")

531

```

532

533

## Types

534

535

```python { .api }

536

from typing import Literal

537

from pydantic import BaseModel

538

539

class Run(BaseModel):

540

"""Assistant run."""

541

id: str

542

assistant_id: str

543

cancelled_at: int | None

544

completed_at: int | None

545

created_at: int

546

expires_at: int

547

failed_at: int | None

548

incomplete_details: dict | None

549

instructions: str

550

last_error: dict | None

551

max_completion_tokens: int | None

552

max_prompt_tokens: int | None

553

metadata: dict[str, str] | None

554

model: str

555

object: Literal["thread.run"]

556

parallel_tool_calls: bool

557

required_action: RequiredAction | None

558

response_format: dict | None

559

started_at: int | None

560

status: Literal[

561

"queued", "in_progress", "requires_action",

562

"cancelling", "cancelled", "failed",

563

"completed", "incomplete", "expired"

564

]

565

thread_id: str

566

tool_choice: dict | str

567

tools: list[dict]

568

truncation_strategy: dict | None

569

usage: Usage | None

570

temperature: float | None

571

top_p: float | None

572

573

class RequiredAction(BaseModel):

574

"""Required action for run."""

575

type: Literal["submit_tool_outputs"]

576

submit_tool_outputs: SubmitToolOutputs

577

578

class SubmitToolOutputs(BaseModel):

579

"""Tool outputs to submit."""

580

tool_calls: list[ToolCall]

581

582

class ToolCall(BaseModel):

583

"""Tool call from assistant."""

584

id: str

585

type: Literal["function"]

586

function: Function

587

588

class Function(BaseModel):

589

"""Function call details."""

590

name: str

591

arguments: str # JSON string

592

593

class Usage(BaseModel):

594

"""Token usage."""

595

completion_tokens: int

596

prompt_tokens: int

597

total_tokens: int

598

599

class RunStep(BaseModel):

600

"""Individual run step."""

601

id: str

602

assistant_id: str

603

cancelled_at: int | None

604

completed_at: int | None

605

created_at: int

606

expired_at: int | None

607

failed_at: int | None

608

last_error: dict | None

609

metadata: dict[str, str] | None

610

object: Literal["thread.run.step"]

611

run_id: str

612

status: Literal[

613

"in_progress", "cancelled", "failed",

614

"completed", "expired"

615

]

616

step_details: StepDetails

617

thread_id: str

618

type: Literal["message_creation", "tool_calls"]

619

usage: Usage | None

620

621

class StepDetails(BaseModel):

622

"""Step details (message or tool calls)."""

623

type: Literal["message_creation", "tool_calls"]

624

message_creation: dict | None

625

tool_calls: list[dict] | None

626

```

627

628

## Complete Example with Tool Calls

629

630

```python

631

from openai import OpenAI

632

import json

633

634

client = OpenAI()

635

636

# 1. Create assistant with function

637

assistant = client.beta.assistants.create(

638

name="Weather Assistant",

639

instructions="Help with weather queries.",

640

model="gpt-4",

641

tools=[{

642

"type": "function",

643

"function": {

644

"name": "get_weather",

645

"description": "Get weather for a location",

646

"parameters": {

647

"type": "object",

648

"properties": {

649

"location": {"type": "string"}

650

},

651

"required": ["location"]

652

}

653

}

654

}]

655

)

656

657

# 2. Create thread and message

658

thread = client.beta.threads.create()

659

660

client.beta.threads.messages.create(

661

thread_id=thread.id,

662

role="user",

663

content="What's the weather in San Francisco?"

664

)

665

666

# 3. Run assistant

667

run = client.beta.threads.runs.create_and_poll(

668

thread_id=thread.id,

669

assistant_id=assistant.id

670

)

671

672

# 4. Handle tool calls

673

if run.status == "requires_action":

674

tool_calls = run.required_action.submit_tool_outputs.tool_calls

675

676

tool_outputs = []

677

for tool_call in tool_calls:

678

if tool_call.function.name == "get_weather":

679

args = json.loads(tool_call.function.arguments)

680

# Call actual weather API

681

result = {"temp": 72, "condition": "sunny"}

682

683

tool_outputs.append({

684

"tool_call_id": tool_call.id,

685

"output": json.dumps(result)

686

})

687

688

# Submit and wait

689

run = client.beta.threads.runs.submit_tool_outputs_and_poll(

690

thread_id=thread.id,

691

run_id=run.id,

692

tool_outputs=tool_outputs

693

)

694

695

# 5. Get final response

696

messages = client.beta.threads.messages.list(thread_id=thread.id)

697

print(messages.data[0].content[0].text.value)

698

```

699

700

## Async Usage

701

702

```python

703

import asyncio

704

from openai import AsyncOpenAI

705

706

async def run_assistant():

707

client = AsyncOpenAI()

708

709

run = await client.beta.threads.runs.create(

710

thread_id="thread_abc123",

711

assistant_id="asst_abc123"

712

)

713

714

while run.status not in ["completed", "failed"]:

715

await asyncio.sleep(1)

716

run = await client.beta.threads.runs.retrieve(

717

thread_id="thread_abc123",

718

run_id=run.id

719

)

720

721

return run

722

723

run = asyncio.run(run_assistant())

724

```

725