Merge remote-tracking branch 'upstream/main' into api-pkg

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-11-12 13:53:31 -05:00
commit d6b915ce0a
48 changed files with 1990 additions and 425 deletions

View file

@ -115,7 +115,15 @@ def openai_client(base_url, api_key, provider):
client = LlamaStackAsLibraryClient(config, skip_logger_removal=True)
return client
return OpenAI(
client = OpenAI(
base_url=base_url,
api_key=api_key,
max_retries=0,
timeout=30.0,
)
yield client
# Cleanup: close HTTP connections
try:
client.close()
except Exception:
pass

View file

@ -65,8 +65,14 @@ class TestConversationResponses:
conversation_items = openai_client.conversations.items.list(conversation.id)
assert len(conversation_items.data) >= 4 # 2 user + 2 assistant messages
@pytest.mark.timeout(60, method="thread")
def test_conversation_context_loading(self, openai_client, text_model_id):
"""Test that conversation context is properly loaded for responses."""
"""Test that conversation context is properly loaded for responses.
Note: 60s timeout added due to CI-specific deadlock in pytest/OpenAI client/httpx
after running 25+ tests. Hangs before first HTTP request is made. Works fine locally.
Investigation needed: connection pool exhaustion or event loop state issue.
"""
conversation = openai_client.conversations.create(
items=[
{"type": "message", "role": "user", "content": "My name is Alice. I like to eat apples."},