pypi-openai

Description
Official Python library for the OpenAI API providing chat completions, embeddings, audio, images, and more
Author
tessl
Last updated

How to use

npx @tessl/cli registry install tessl/pypi-openai@1.106.0

assistants.md docs/

1
# Assistants API
2
3
Build AI assistants with persistent conversations, file access, function calling, and code interpretation capabilities using the beta assistants framework.
4
5
## Capabilities
6
7
### Assistant Management
8
9
Create, configure, and manage AI assistants with specific instructions and capabilities.
10
11
```python { .api }
12
def create(
13
self,
14
*,
15
model: str,
16
description: str | NotGiven = NOT_GIVEN,
17
instructions: str | NotGiven = NOT_GIVEN,
18
name: str | NotGiven = NOT_GIVEN,
19
tools: List[AssistantToolUnionParam] | NotGiven = NOT_GIVEN,
20
tool_resources: ToolResourcesParam | NotGiven = NOT_GIVEN,
21
metadata: Optional[object] | NotGiven = NOT_GIVEN,
22
temperature: float | NotGiven = NOT_GIVEN,
23
top_p: float | NotGiven = NOT_GIVEN,
24
response_format: AssistantResponseFormatParam | NotGiven = NOT_GIVEN
25
) -> Assistant: ...
26
27
def list(
28
self,
29
*,
30
after: str | NotGiven = NOT_GIVEN,
31
before: str | NotGiven = NOT_GIVEN,
32
limit: int | NotGiven = NOT_GIVEN,
33
order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN
34
) -> SyncCursorPage[Assistant]: ...
35
36
def retrieve(
37
self,
38
assistant_id: str
39
) -> Assistant: ...
40
41
def update(
42
self,
43
assistant_id: str,
44
*,
45
model: str | NotGiven = NOT_GIVEN,
46
name: str | NotGiven = NOT_GIVEN,
47
description: str | NotGiven = NOT_GIVEN,
48
instructions: str | NotGiven = NOT_GIVEN,
49
tools: List[AssistantToolUnionParam] | NotGiven = NOT_GIVEN,
50
tool_resources: ToolResourcesParam | NotGiven = NOT_GIVEN,
51
metadata: Optional[object] | NotGiven = NOT_GIVEN,
52
temperature: float | NotGiven = NOT_GIVEN,
53
top_p: float | NotGiven = NOT_GIVEN,
54
response_format: AssistantResponseFormatParam | NotGiven = NOT_GIVEN
55
) -> Assistant: ...
56
57
def delete(
58
self,
59
assistant_id: str
60
) -> AssistantDeleted: ...
61
```
62
63
Usage examples:
64
65
```python
66
from openai import OpenAI
67
68
client = OpenAI()
69
70
# Create a basic assistant
71
assistant = client.beta.assistants.create(
72
name="Math Tutor",
73
instructions="You are a personal math tutor. Write and run code to answer math questions.",
74
tools=[{"type": "code_interpreter"}],
75
model="gpt-4-turbo"
76
)
77
78
print(f"Created assistant: {assistant.id}")
79
print(f"Name: {assistant.name}")
80
81
# Create assistant with file search
82
assistant = client.beta.assistants.create(
83
name="Research Assistant",
84
instructions="You are a helpful research assistant. Use the provided documents to answer questions.",
85
tools=[{"type": "file_search"}],
86
model="gpt-4-turbo"
87
)
88
89
# Create assistant with function calling
90
assistant = client.beta.assistants.create(
91
name="Weather Assistant",
92
instructions="You help users get weather information. Use the get_weather function when needed.",
93
tools=[
94
{
95
"type": "function",
96
"function": {
97
"name": "get_weather",
98
"description": "Get current weather for a location",
99
"parameters": {
100
"type": "object",
101
"properties": {
102
"location": {
103
"type": "string",
104
"description": "City name"
105
}
106
},
107
"required": ["location"]
108
}
109
}
110
}
111
],
112
model="gpt-4-turbo"
113
)
114
115
# List assistants
116
assistants = client.beta.assistants.list(limit=10)
117
118
print("Your assistants:")
119
for assistant in assistants:
120
print(f" {assistant.id}: {assistant.name}")
121
122
# Update assistant
123
updated_assistant = client.beta.assistants.update(
124
assistant.id,
125
instructions="You are an advanced math tutor specializing in calculus and linear algebra.",
126
tools=[{"type": "code_interpreter"}]
127
)
128
129
# Delete assistant
130
deletion_result = client.beta.assistants.delete(assistant.id)
131
print(f"Assistant deleted: {deletion_result.deleted}")
132
```
133
134
### Thread Management
135
136
Manage conversation threads for persistent multi-turn conversations with assistants.
137
138
```python { .api }
139
def create(
140
self,
141
*,
142
messages: List[ThreadMessageParam] | NotGiven = NOT_GIVEN,
143
tool_resources: ToolResourcesParam | NotGiven = NOT_GIVEN,
144
metadata: Optional[object] | NotGiven = NOT_GIVEN
145
) -> Thread: ...
146
147
def retrieve(
148
self,
149
thread_id: str
150
) -> Thread: ...
151
152
def update(
153
self,
154
thread_id: str,
155
*,
156
tool_resources: ToolResourcesParam | NotGiven = NOT_GIVEN,
157
metadata: Optional[object] | NotGiven = NOT_GIVEN
158
) -> Thread: ...
159
160
def delete(
161
self,
162
thread_id: str
163
) -> ThreadDeleted: ...
164
```
165
166
Usage examples:
167
168
```python
169
# Create empty thread
170
thread = client.beta.threads.create()
171
print(f"Created thread: {thread.id}")
172
173
# Create thread with initial messages
174
thread = client.beta.threads.create(
175
messages=[
176
{
177
"role": "user",
178
"content": "I need help with calculus. Can you explain derivatives?"
179
}
180
]
181
)
182
183
print(f"Thread with initial message: {thread.id}")
184
185
# Create thread with file attachments
186
thread = client.beta.threads.create(
187
messages=[
188
{
189
"role": "user",
190
"content": "Please analyze this data file",
191
"attachments": [
192
{
193
"file_id": "file-abc123",
194
"tools": [{"type": "code_interpreter"}]
195
}
196
]
197
}
198
]
199
)
200
201
# Update thread metadata
202
updated_thread = client.beta.threads.update(
203
thread.id,
204
metadata={"user_id": "user123", "session": "2024-01"}
205
)
206
207
# Retrieve thread
208
thread_info = client.beta.threads.retrieve(thread.id)
209
print(f"Thread metadata: {thread_info.metadata}")
210
211
# Delete thread
212
deletion_result = client.beta.threads.delete(thread.id)
213
print(f"Thread deleted: {deletion_result.deleted}")
214
```
215
216
### Message Management
217
218
Add, retrieve, and manage messages within conversation threads.
219
220
```python { .api }
221
def create(
222
self,
223
thread_id: str,
224
*,
225
role: Literal["user", "assistant"],
226
content: Union[str, List[MessageContentPartParam]],
227
attachments: Optional[List[AttachmentParam]] | NotGiven = NOT_GIVEN,
228
metadata: Optional[object] | NotGiven = NOT_GIVEN
229
) -> ThreadMessage: ...
230
231
def list(
232
self,
233
thread_id: str,
234
*,
235
after: str | NotGiven = NOT_GIVEN,
236
before: str | NotGiven = NOT_GIVEN,
237
limit: int | NotGiven = NOT_GIVEN,
238
order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
239
run_id: str | NotGiven = NOT_GIVEN
240
) -> SyncCursorPage[ThreadMessage]: ...
241
242
def retrieve(
243
self,
244
thread_id: str,
245
message_id: str
246
) -> ThreadMessage: ...
247
248
def update(
249
self,
250
thread_id: str,
251
message_id: str,
252
*,
253
metadata: Optional[object] | NotGiven = NOT_GIVEN
254
) -> ThreadMessage: ...
255
```
256
257
Usage examples:
258
259
```python
260
# Add user message to thread
261
message = client.beta.threads.messages.create(
262
thread_id=thread.id,
263
role="user",
264
content="What is the derivative of x^2?"
265
)
266
267
print(f"Added message: {message.id}")
268
269
# Add message with file attachment
270
message = client.beta.threads.messages.create(
271
thread_id=thread.id,
272
role="user",
273
content="Please analyze this dataset",
274
attachments=[
275
{
276
"file_id": "file-abc123",
277
"tools": [{"type": "code_interpreter"}]
278
}
279
]
280
)
281
282
# Add message with image
283
message = client.beta.threads.messages.create(
284
thread_id=thread.id,
285
role="user",
286
content=[
287
{
288
"type": "text",
289
"text": "What do you see in this image?"
290
},
291
{
292
"type": "image_file",
293
"image_file": {"file_id": "file-image123"}
294
}
295
]
296
)
297
298
# List messages in thread
299
messages = client.beta.threads.messages.list(
300
thread_id=thread.id,
301
order="desc",
302
limit=20
303
)
304
305
print("Thread conversation:")
306
for message in reversed(list(messages)):
307
role = message.role
308
content = message.content[0].text.value if message.content else ""
309
print(f"{role}: {content}")
310
311
# Get specific message
312
message = client.beta.threads.messages.retrieve(
313
thread_id=thread.id,
314
message_id=message.id
315
)
316
317
print(f"Message details: {message.content}")
318
```
319
320
### Run Management
321
322
Execute assistants on threads and manage conversation runs with streaming support.
323
324
```python { .api }
325
def create(
326
self,
327
thread_id: str,
328
*,
329
assistant_id: str,
330
model: str | NotGiven = NOT_GIVEN,
331
instructions: str | NotGiven = NOT_GIVEN,
332
additional_instructions: str | NotGiven = NOT_GIVEN,
333
additional_messages: List[ThreadMessageParam] | NotGiven = NOT_GIVEN,
334
tools: List[AssistantToolUnionParam] | NotGiven = NOT_GIVEN,
335
metadata: Optional[object] | NotGiven = NOT_GIVEN,
336
temperature: float | NotGiven = NOT_GIVEN,
337
top_p: float | NotGiven = NOT_GIVEN,
338
stream: Optional[bool] | NotGiven = NOT_GIVEN,
339
max_prompt_tokens: int | NotGiven = NOT_GIVEN,
340
max_completion_tokens: int | NotGiven = NOT_GIVEN,
341
truncation_strategy: TruncationStrategyParam | NotGiven = NOT_GIVEN,
342
tool_choice: AssistantToolChoiceParam | NotGiven = NOT_GIVEN,
343
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
344
response_format: AssistantResponseFormatParam | NotGiven = NOT_GIVEN
345
) -> Run | Stream[AssistantStreamEvent]: ...
346
347
def retrieve(
348
self,
349
thread_id: str,
350
run_id: str
351
) -> Run: ...
352
353
def update(
354
self,
355
thread_id: str,
356
run_id: str,
357
*,
358
metadata: Optional[object] | NotGiven = NOT_GIVEN
359
) -> Run: ...
360
361
def list(
362
self,
363
thread_id: str,
364
*,
365
after: str | NotGiven = NOT_GIVEN,
366
before: str | NotGiven = NOT_GIVEN,
367
limit: int | NotGiven = NOT_GIVEN,
368
order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN
369
) -> SyncCursorPage[Run]: ...
370
371
def cancel(
372
self,
373
thread_id: str,
374
run_id: str
375
) -> Run: ...
376
377
def submit_tool_outputs(
378
self,
379
thread_id: str,
380
run_id: str,
381
*,
382
tool_outputs: List[ToolOutputParam],
383
stream: Optional[bool] | NotGiven = NOT_GIVEN
384
) -> Run | Stream[AssistantStreamEvent]: ...
385
```
386
387
Usage examples:
388
389
```python
390
import time
391
392
# Create and run assistant
393
run = client.beta.threads.runs.create(
394
thread_id=thread.id,
395
assistant_id=assistant.id
396
)
397
398
print(f"Started run: {run.id}")
399
print(f"Status: {run.status}")
400
401
# Poll run until completion
402
def wait_for_run_completion(thread_id: str, run_id: str):
403
"""Wait for run to complete"""
404
405
while True:
406
run = client.beta.threads.runs.retrieve(
407
thread_id=thread_id,
408
run_id=run_id
409
)
410
411
print(f"Run status: {run.status}")
412
413
if run.status in ["completed", "failed", "cancelled", "expired"]:
414
return run
415
elif run.status == "requires_action":
416
print("Run requires action (function calls)")
417
return run
418
419
time.sleep(1)
420
421
# Wait for completion
422
completed_run = wait_for_run_completion(thread.id, run.id)
423
424
if completed_run.status == "completed":
425
# Get assistant's response
426
messages = client.beta.threads.messages.list(thread_id=thread.id)
427
428
latest_message = messages.data[0]
429
if latest_message.role == "assistant":
430
print(f"Assistant: {latest_message.content[0].text.value}")
431
432
# Handle function calling
433
def handle_function_calls(thread_id: str, run_id: str):
434
"""Handle required function calls"""
435
436
run = client.beta.threads.runs.retrieve(
437
thread_id=thread_id,
438
run_id=run_id
439
)
440
441
if run.status == "requires_action":
442
tool_calls = run.required_action.submit_tool_outputs.tool_calls
443
tool_outputs = []
444
445
for tool_call in tool_calls:
446
function_name = tool_call.function.name
447
function_args = tool_call.function.arguments
448
449
# Call your function
450
if function_name == "get_weather":
451
import json
452
args = json.loads(function_args)
453
# Your weather function implementation
454
result = f"Weather in {args['location']}: Sunny, 75°F"
455
else:
456
result = "Function not implemented"
457
458
tool_outputs.append({
459
"tool_call_id": tool_call.id,
460
"output": result
461
})
462
463
# Submit function outputs
464
run = client.beta.threads.runs.submit_tool_outputs(
465
thread_id=thread_id,
466
run_id=run_id,
467
tool_outputs=tool_outputs
468
)
469
470
return run
471
472
# Example with function calling
473
weather_assistant = client.beta.assistants.create(
474
name="Weather Helper",
475
instructions="Get weather information when requested",
476
tools=[
477
{
478
"type": "function",
479
"function": {
480
"name": "get_weather",
481
"description": "Get weather for a location",
482
"parameters": {
483
"type": "object",
484
"properties": {
485
"location": {"type": "string"}
486
},
487
"required": ["location"]
488
}
489
}
490
}
491
],
492
model="gpt-4-turbo"
493
)
494
495
# Add weather question
496
client.beta.threads.messages.create(
497
thread_id=thread.id,
498
role="user",
499
content="What's the weather like in San Francisco?"
500
)
501
502
# Run with function calling
503
run = client.beta.threads.runs.create(
504
thread_id=thread.id,
505
assistant_id=weather_assistant.id
506
)
507
508
# Handle function calls
509
completed_run = wait_for_run_completion(thread.id, run.id)
510
511
if completed_run.status == "requires_action":
512
final_run = handle_function_calls(thread.id, run.id)
513
final_run = wait_for_run_completion(thread.id, run.id)
514
```
515
516
### Streaming Runs
517
518
Handle real-time streaming responses from assistant runs for better user experience.
519
520
```python { .api }
521
def create(
522
self,
523
thread_id: str,
524
*,
525
assistant_id: str,
526
stream: Literal[True],
527
# ... other parameters
528
) -> Stream[AssistantStreamEvent]: ...
529
```
530
531
Usage examples:
532
533
```python
534
# Stream assistant response
535
stream = client.beta.threads.runs.create(
536
thread_id=thread.id,
537
assistant_id=assistant.id,
538
stream=True
539
)
540
541
print("Streaming assistant response:")
542
543
for event in stream:
544
if event.event == "thread.message.delta":
545
if hasattr(event.data.delta.content[0], 'text'):
546
print(event.data.delta.content[0].text.value, end="", flush=True)
547
elif event.event == "thread.run.completed":
548
print("\nRun completed")
549
break
550
elif event.event == "thread.run.failed":
551
print(f"\nRun failed: {event.data.last_error}")
552
break
553
554
# Advanced streaming handler
555
class AssistantStreamHandler:
556
def __init__(self):
557
self.current_message = ""
558
self.tool_calls = []
559
560
def handle_event(self, event):
561
if event.event == "thread.message.delta":
562
# Handle message content updates
563
delta = event.data.delta
564
if delta.content:
565
for content in delta.content:
566
if hasattr(content, 'text') and content.text:
567
if content.text.value:
568
self.current_message += content.text.value
569
print(content.text.value, end="", flush=True)
570
571
elif event.event == "thread.run.requires_action":
572
# Handle function calls
573
self.tool_calls = event.data.required_action.submit_tool_outputs.tool_calls
574
print(f"\nFunction call required: {len(self.tool_calls)} calls")
575
576
elif event.event == "thread.run.completed":
577
print("\n✓ Run completed")
578
579
elif event.event == "thread.run.failed":
580
print(f"\n✗ Run failed: {event.data.last_error}")
581
582
# Use streaming handler
583
handler = AssistantStreamHandler()
584
585
stream = client.beta.threads.runs.create(
586
thread_id=thread.id,
587
assistant_id=assistant.id,
588
stream=True
589
)
590
591
for event in stream:
592
handler.handle_event(event)
593
594
# Handle any required tool calls
595
if handler.tool_calls:
596
# Process tool calls and submit outputs
597
# (implementation depends on your specific functions)
598
pass
599
```
600
601
### Vector Stores and File Search
602
603
Use vector stores for efficient file search and retrieval augmented generation (RAG).
604
605
```python { .api }
606
# Vector store management
607
def create_vector_store(
608
self,
609
*,
610
file_ids: List[str] | NotGiven = NOT_GIVEN,
611
name: str | NotGiven = NOT_GIVEN,
612
expires_after: ExpiresAfterParam | NotGiven = NOT_GIVEN,
613
chunking_strategy: ChunkingStrategyParam | NotGiven = NOT_GIVEN,
614
metadata: Optional[object] | NotGiven = NOT_GIVEN
615
) -> VectorStore: ...
616
```
617
618
Usage examples:
619
620
```python
621
# Upload documents for file search
622
with open("knowledge_base.pdf", "rb") as f:
623
file1 = client.files.create(file=f, purpose="assistants")
624
625
with open("documentation.txt", "rb") as f:
626
file2 = client.files.create(file=f, purpose="assistants")
627
628
# Create vector store
629
vector_store = client.beta.vector_stores.create(
630
name="Knowledge Base",
631
file_ids=[file1.id, file2.id]
632
)
633
634
print(f"Created vector store: {vector_store.id}")
635
636
# Create assistant with file search
637
search_assistant = client.beta.assistants.create(
638
name="Document Assistant",
639
instructions="Use the uploaded documents to answer questions accurately. Always cite your sources.",
640
model="gpt-4-turbo",
641
tools=[{"type": "file_search"}],
642
tool_resources={
643
"file_search": {
644
"vector_store_ids": [vector_store.id]
645
}
646
}
647
)
648
649
# Use file search in conversation
650
thread = client.beta.threads.create(
651
messages=[
652
{
653
"role": "user",
654
"content": "What information do you have about machine learning algorithms?"
655
}
656
]
657
)
658
659
run = client.beta.threads.runs.create(
660
thread_id=thread.id,
661
assistant_id=search_assistant.id
662
)
663
664
# Wait for response with file citations
665
completed_run = wait_for_run_completion(thread.id, run.id)
666
667
if completed_run.status == "completed":
668
messages = client.beta.threads.messages.list(thread_id=thread.id)
669
670
assistant_message = messages.data[0]
671
672
# Display content with citations
673
for content in assistant_message.content:
674
if hasattr(content, 'text'):
675
print(content.text.value)
676
677
# Show citations if available
678
if content.text.annotations:
679
print("\nSources:")
680
for annotation in content.text.annotations:
681
if hasattr(annotation, 'file_citation'):
682
citation = annotation.file_citation
683
print(f"- {citation.file_id}: {citation.quote}")
684
```
685
686
## Types
687
688
### Core Response Types
689
690
```python { .api }
691
class Assistant(BaseModel):
692
id: str
693
created_at: int
694
description: Optional[str]
695
instructions: Optional[str]
696
metadata: Optional[Dict[str, str]]
697
model: str
698
name: Optional[str]
699
object: Literal["assistant"]
700
tools: List[AssistantTool]
701
tool_resources: Optional[ToolResources]
702
temperature: Optional[float]
703
top_p: Optional[float]
704
response_format: Optional[AssistantResponseFormat]
705
706
class Thread(BaseModel):
707
id: str
708
created_at: int
709
metadata: Optional[Dict[str, str]]
710
object: Literal["thread"]
711
tool_resources: Optional[ToolResources]
712
713
class ThreadMessage(BaseModel):
714
id: str
715
assistant_id: Optional[str]
716
attachments: Optional[List[Attachment]]
717
completed_at: Optional[int]
718
content: List[MessageContent]
719
created_at: int
720
incomplete_at: Optional[int]
721
incomplete_details: Optional[MessageIncompleteDetails]
722
metadata: Optional[Dict[str, str]]
723
object: Literal["thread.message"]
724
role: Literal["user", "assistant"]
725
run_id: Optional[str]
726
status: Literal["in_progress", "incomplete", "completed"]
727
thread_id: str
728
729
class Run(BaseModel):
730
id: str
731
assistant_id: str
732
cancelled_at: Optional[int]
733
completed_at: Optional[int]
734
created_at: int
735
expires_at: Optional[int]
736
failed_at: Optional[int]
737
incomplete_details: Optional[RunIncompleteDetails]
738
instructions: str
739
last_error: Optional[LastError]
740
max_completion_tokens: Optional[int]
741
max_prompt_tokens: Optional[int]
742
metadata: Optional[Dict[str, str]]
743
model: str
744
object: Literal["thread.run"]
745
parallel_tool_calls: bool
746
required_action: Optional[RequiredAction]
747
response_format: Optional[AssistantResponseFormat]
748
started_at: Optional[int]
749
status: RunStatus
750
temperature: Optional[float]
751
thread_id: str
752
tool_choice: Optional[AssistantToolChoice]
753
tools: List[AssistantTool]
754
tool_resources: Optional[ToolResources]
755
top_p: Optional[float]
756
truncation_strategy: Optional[TruncationStrategy]
757
usage: Optional[RunUsage]
758
```
759
760
### Parameter Types
761
762
```python { .api }
763
# Assistant creation parameters
764
AssistantCreateParams = TypedDict('AssistantCreateParams', {
765
'model': Required[str],
766
'description': NotRequired[str],
767
'instructions': NotRequired[str],
768
'name': NotRequired[str],
769
'tools': NotRequired[List[AssistantToolUnionParam]],
770
'tool_resources': NotRequired[ToolResourcesParam],
771
'metadata': NotRequired[Optional[object]],
772
'temperature': NotRequired[float],
773
'top_p': NotRequired[float],
774
'response_format': NotRequired[AssistantResponseFormatParam],
775
}, total=False)
776
777
# Thread message parameters
778
ThreadMessageParam = TypedDict('ThreadMessageParam', {
779
'role': Required[Literal["user", "assistant"]],
780
'content': Required[Union[str, List[MessageContentPartParam]]],
781
'attachments': NotRequired[Optional[List[AttachmentParam]]],
782
'metadata': NotRequired[Optional[object]],
783
}, total=False)
784
785
# Run creation parameters
786
RunCreateParams = TypedDict('RunCreateParams', {
787
'assistant_id': Required[str],
788
'model': NotRequired[str],
789
'instructions': NotRequired[str],
790
'additional_instructions': NotRequired[str],
791
'additional_messages': NotRequired[List[ThreadMessageParam]],
792
'tools': NotRequired[List[AssistantToolUnionParam]],
793
'metadata': NotRequired[Optional[object]],
794
'temperature': NotRequired[float],
795
'top_p': NotRequired[float],
796
'stream': NotRequired[Optional[bool]],
797
'max_prompt_tokens': NotRequired[int],
798
'max_completion_tokens': NotRequired[int],
799
'truncation_strategy': NotRequired[TruncationStrategyParam],
800
'tool_choice': NotRequired[AssistantToolChoiceParam],
801
'parallel_tool_calls': NotRequired[bool],
802
'response_format': NotRequired[AssistantResponseFormatParam],
803
}, total=False)
804
```
805
806
### Tool Types
807
808
```python { .api }
809
# Assistant tools
810
AssistantToolUnionParam = Union[
811
CodeInterpreterToolParam,
812
FileSearchToolParam,
813
FunctionToolParam
814
]
815
816
class CodeInterpreterToolParam(TypedDict, total=False):
817
type: Required[Literal["code_interpreter"]]
818
819
class FileSearchToolParam(TypedDict, total=False):
820
type: Required[Literal["file_search"]]
821
file_search: FileSearchParam
822
823
class FunctionToolParam(TypedDict, total=False):
824
type: Required[Literal["function"]]
825
function: Required[FunctionDefinition]
826
827
# Tool resources
828
class ToolResourcesParam(TypedDict, total=False):
829
code_interpreter: CodeInterpreterResourceParam
830
file_search: FileSearchResourceParam
831
832
class CodeInterpreterResourceParam(TypedDict, total=False):
833
file_ids: List[str]
834
835
class FileSearchResourceParam(TypedDict, total=False):
836
vector_store_ids: List[str]
837
vector_stores: List[VectorStoreParam]
838
```
839
840
### Status and Event Types
841
842
```python { .api }
843
# Run status enumeration
844
RunStatus = Literal[
845
"queued",
846
"in_progress",
847
"requires_action",
848
"cancelling",
849
"cancelled",
850
"failed",
851
"completed",
852
"incomplete",
853
"expired"
854
]
855
856
# Stream event types
857
AssistantStreamEvent = Union[
858
ThreadRunEvent,
859
ThreadMessageEvent,
860
ThreadMessageDeltaEvent,
861
RunStepEvent,
862
RunStepDeltaEvent,
863
ErrorEvent
864
]
865
866
# Required action for function calls
867
class RequiredAction(BaseModel):
868
submit_tool_outputs: RequiredActionSubmitToolOutputs
869
type: Literal["submit_tool_outputs"]
870
871
class RequiredActionSubmitToolOutputs(BaseModel):
872
tool_calls: List[RequiredActionFunctionToolCall]
873
874
class ToolOutputParam(TypedDict, total=False):
875
tool_call_id: Required[str]
876
output: Required[str]
877
```
878
879
### Content Types
880
881
```python { .api }
882
# Message content types
883
MessageContent = Union[
884
MessageContentImageFile,
885
MessageContentImageUrl,
886
MessageContentText,
887
MessageContentRefusal
888
]
889
890
class MessageContentText(BaseModel):
891
text: MessageContentTextObject
892
type: Literal["text"]
893
894
class MessageContentTextObject(BaseModel):
895
annotations: List[MessageContentTextAnnotation]
896
value: str
897
898
# Annotations for citations
899
MessageContentTextAnnotation = Union[
900
MessageContentTextAnnotationFileCitation,
901
MessageContentTextAnnotationFilePath
902
]
903
904
class MessageContentTextAnnotationFileCitation(BaseModel):
905
end_index: int
906
file_citation: MessageContentTextAnnotationFileCitationObject
907
start_index: int
908
text: str
909
type: Literal["file_citation"]
910
911
# Attachment types
912
class AttachmentParam(TypedDict, total=False):
913
file_id: Required[str]
914
tools: Required[List[AssistantToolUnionParam]]
915
```
916
917
## Best Practices
918
919
### Assistant Design
920
921
- Write clear, specific instructions for consistent behavior
922
- Use appropriate tools for the assistant's intended purpose
923
- Set reasonable temperature values (0.1-0.7 for most tasks)
924
- Include examples in instructions for complex tasks
925
- Test assistants thoroughly before deployment
926
927
### Thread Management
928
929
- Create separate threads for different conversation topics
930
- Use metadata to track thread context and user information
931
- Clean up old threads periodically to manage costs
932
- Consider thread limits and conversation length
933
934
### File and Vector Store Usage
935
936
- Organize files logically in vector stores
937
- Use descriptive names for files and vector stores
938
- Monitor file storage usage and costs
939
- Update vector stores when source documents change
940
- Implement proper file access controls
941
942
### Function Calling
943
944
- Design functions with clear, specific purposes
945
- Provide detailed function descriptions and parameter schemas
946
- Handle function errors gracefully
947
- Validate function inputs and outputs
948
- Test function calling workflows thoroughly
949
950
### Production Considerations
951
952
- Implement proper error handling for all assistant operations
953
- Monitor usage and costs for assistants and threads
954
- Use streaming for better user experience with long responses
955
- Implement rate limiting and abuse prevention
956
- Keep assistant instructions and tools up to date
957
- Plan for assistant versioning and updates