Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -670,6 +670,7 @@ def _prepare_options(
tool_choice = options.get("tool_choice")
tools = options.get("tools")
response_format = options.get("response_format")
tool_resources = options.get("tool_resources")

if max_tokens is not None:
run_options["max_completion_tokens"] = max_tokens
Expand All @@ -683,6 +684,9 @@ def _prepare_options(
if allow_multiple_tool_calls is not None:
run_options["parallel_tool_calls"] = allow_multiple_tool_calls

if tool_resources is not None:
run_options["tool_resources"] = tool_resources

tool_mode = validate_tool_mode(tool_choice)
tool_definitions: list[MutableMapping[str, Any]] = []
# Always include tools if provided, regardless of tool_choice
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio
import contextlib

from agent_framework import Agent, Content
from agent_framework import Agent
from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential

Expand All @@ -22,7 +23,7 @@
# Helper functions


async def create_vector_store(client: AzureOpenAIResponsesClient) -> tuple[str, Content]:
async def create_vector_store(client: AzureOpenAIResponsesClient) -> tuple[str, str]:
"""Create a vector store with sample documents."""
file = await client.client.files.create(
file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."), purpose="assistants"
Expand All @@ -35,13 +36,15 @@ async def create_vector_store(client: AzureOpenAIResponsesClient) -> tuple[str,
if result.last_error is not None:
raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")

return file.id, Content.from_hosted_vector_store(vector_store_id=vector_store.id)
return file.id, vector_store.id


async def delete_vector_store(client: AzureOpenAIResponsesClient, file_id: str, vector_store_id: str) -> None:
"""Delete the vector store after using it."""
await client.client.vector_stores.delete(vector_store_id=vector_store_id)
await client.client.files.delete(file_id=file_id)
with contextlib.suppress(Exception):
await client.client.vector_stores.delete(vector_store_id=vector_store_id)
with contextlib.suppress(Exception):
await client.client.files.delete(file_id=file_id)


async def main() -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,12 @@ async def main() -> None:

# Create web search tool with location context
web_search_tool = client.get_web_search_tool(
user_location={"city": "Seattle", "country": "US"},
web_search_options={
"user_location": {
"type": "approximate",
"approximate": {"city": "Seattle", "country": "US"},
},
},
)

agent = Agent(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import asyncio

from agent_framework import Agent, Content
from agent_framework import Agent
from agent_framework.openai import OpenAIResponsesClient

"""
Expand All @@ -15,7 +15,7 @@
# Helper functions


async def create_vector_store(client: OpenAIResponsesClient) -> tuple[str, Content]:
async def create_vector_store(client: OpenAIResponsesClient) -> tuple[str, str]:
"""Create a vector store with sample documents."""
file = await client.client.files.create(
file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."), purpose="user_data"
Expand All @@ -28,7 +28,7 @@ async def create_vector_store(client: OpenAIResponsesClient) -> tuple[str, Conte
if result.last_error is not None:
raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")

return file.id, Content.from_hosted_vector_store(vector_store_id=vector_store.id)
return file.id, vector_store.id


async def delete_vector_store(client: OpenAIResponsesClient, file_id: str, vector_store_id: str) -> None:
Expand All @@ -53,14 +53,14 @@ async def main() -> None:
)

if stream:
print("Assistant: ", end="")
print("Agent: ", end="")
async for chunk in agent.run(message, stream=True):
if chunk.text:
print(chunk.text, end="")
print("")
else:
response = await agent.run(message)
print(f"Assistant: {response}")
print(f"Agent: {response}")
await delete_vector_store(client, file_id, vector_store_id)


Expand Down