mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-18 23:28:53 +00:00
feat(responses)!: improve responses + conversations implementations (#3810)
This PR updates the Conversation item related types and improves a couple critical parts of the implemenation: - it creates a streaming output item for the final assistant message output by the model. until now we only added content parts and included that message in the final response. - rewrites the conversation update code completely to account for items other than messages (tool calls, outputs, etc.) ## Test Plan Used the test script from https://github.com/llamastack/llama-stack-client-python/pull/281 for this ``` TEST_API_BASE_URL=http://localhost:8321/v1 \ pytest tests/integration/test_agent_turn_step_events.py::test_client_side_function_tool -xvs ```
This commit is contained in:
parent
add8cd801b
commit
e9b4278a51
129 changed files with 86266 additions and 903 deletions
|
@ -16,21 +16,11 @@ def new_vector_store(openai_client, name, embedding_model, embedding_dimension):
|
|||
openai_client.vector_stores.delete(vector_store_id=vector_store.id)
|
||||
|
||||
# Create a new vector store
|
||||
# OpenAI SDK client uses extra_body for non-standard parameters
|
||||
from openai import OpenAI
|
||||
|
||||
if isinstance(openai_client, OpenAI):
|
||||
# OpenAI SDK client - use extra_body
|
||||
vector_store = openai_client.vector_stores.create(
|
||||
name=name,
|
||||
extra_body={"embedding_model": embedding_model, "embedding_dimension": embedding_dimension},
|
||||
)
|
||||
else:
|
||||
# LlamaStack client - direct parameter
|
||||
vector_store = openai_client.vector_stores.create(
|
||||
name=name, embedding_model=embedding_model, embedding_dimension=embedding_dimension
|
||||
)
|
||||
|
||||
vector_store = openai_client.vector_stores.create(
|
||||
name=name,
|
||||
extra_body={"embedding_model": embedding_model, "embedding_dimension": embedding_dimension},
|
||||
)
|
||||
return vector_store
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue