forked from microsoft/semantic-kernel
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathopenai_assistant_structured_outputs.py
92 lines (75 loc) · 3.14 KB
/
openai_assistant_structured_outputs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
# Copyright (c) Microsoft. All rights reserved.
import asyncio
from pydantic import BaseModel
from semantic_kernel.agents import AssistantAgentThread, AzureAssistantAgent
"""
The following sample demonstrates how to create an OpenAI
assistant using either Azure OpenAI or OpenAI and leverage the
assistant's ability to returned structured outputs, based on a user-defined
Pydantic model. This could also be a non-Pydantic model. Use the convenience
method on the OpenAIAssistantAgent class to configure the response format,
as shown below.
Note, you may specify your own JSON Schema. You'll need to make sure it is correct
if not using the convenience method, per the following format:
json_schema = {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {
"response": {"title": "Response", "type": "string"},
"items": {"items": {"type": "string"}, "title": "Items", "type": "array"},
},
"required": ["response", "items"],
"title": "ResponseModel",
"type": "object",
"additionalProperties": False,
},
"name": "ResponseModel",
"strict": True,
},
}
# Create the assistant definition
definition = await client.beta.assistants.create(
model=model,
name="Assistant",
instructions="You are a helpful assistant answering questions about the world in one sentence.",
response_format=json_schema,
)
"""
# Define a Pydantic model that represents the structured output from the OpenAI service
class ResponseModel(BaseModel):
response: str
items: list[str]
async def main():
# Create the client using Azure OpenAI resources and configuration
client, model = AzureAssistantAgent.setup_resources()
# Create the assistant definition
definition = await client.beta.assistants.create(
model=model,
name="Assistant",
instructions="You are a helpful assistant answering questions about the world in one sentence.",
response_format=AzureAssistantAgent.configure_response_format(ResponseModel),
)
# Create the AzureAssistantAgent instance using the client and the assistant definition
agent = AzureAssistantAgent(
client=client,
definition=definition,
)
# Create a new thread for use with the assistant
# If no thread is provided, a new thread will be
# created and returned with the initial response
thread: AssistantAgentThread = None
user_inputs = ["Why is the sky blue?"]
try:
for user_input in user_inputs:
print(f"# User: '{user_input}'")
async for response in agent.invoke(messages=user_input, thread=thread):
# The response returned is a Pydantic Model, so we can validate it using the model_validate_json method
response_model = ResponseModel.model_validate_json(str(response.content))
print(f"# {response.role}: {response_model}")
thread = response.thread
finally:
await thread.delete() if thread else None
await client.beta.assistants.delete(agent.id)
if __name__ == "__main__":
asyncio.run(main())