mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
fix(mypy): resolve OpenAI SDK NotGiven/Omit type mismatches
Refactor embeddings API calls to avoid NotGiven/Omit type incompatibility by conditionally building kwargs dict with only non-None parameters. - openai_mixin.py: Build kwargs conditionally for embeddings.create() - gemini.py: Apply same pattern + add Any import This approach avoids type:ignore comments by not passing NOT_GIVEN sentinel values that conflict with Omit type annotations in OpenAI SDK. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
382900d7a8
commit
7e37790647
2 changed files with 25 additions and 24 deletions
|
|
@ -4,6 +4,8 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from openai import NOT_GIVEN
|
from openai import NOT_GIVEN
|
||||||
|
|
||||||
from llama_stack.apis.inference import (
|
from llama_stack.apis.inference import (
|
||||||
|
|
@ -37,22 +39,21 @@ class GeminiInferenceAdapter(OpenAIMixin):
|
||||||
Override embeddings method to handle Gemini's missing usage statistics.
|
Override embeddings method to handle Gemini's missing usage statistics.
|
||||||
Gemini's embedding API doesn't return usage information, so we provide default values.
|
Gemini's embedding API doesn't return usage information, so we provide default values.
|
||||||
"""
|
"""
|
||||||
# Prepare request parameters
|
# Build kwargs conditionally to avoid NotGiven/Omit type mismatch
|
||||||
request_params = {
|
kwargs: dict[str, Any] = {
|
||||||
"model": await self._get_provider_model_id(params.model),
|
"model": await self._get_provider_model_id(params.model),
|
||||||
"input": params.input,
|
"input": params.input,
|
||||||
"encoding_format": params.encoding_format if params.encoding_format is not None else NOT_GIVEN,
|
|
||||||
"dimensions": params.dimensions if params.dimensions is not None else NOT_GIVEN,
|
|
||||||
"user": params.user if params.user is not None else NOT_GIVEN,
|
|
||||||
}
|
}
|
||||||
|
if params.encoding_format is not None:
|
||||||
|
kwargs["encoding_format"] = params.encoding_format
|
||||||
|
if params.dimensions is not None:
|
||||||
|
kwargs["dimensions"] = params.dimensions
|
||||||
|
if params.user is not None:
|
||||||
|
kwargs["user"] = params.user
|
||||||
|
if params.model_extra:
|
||||||
|
kwargs["extra_body"] = params.model_extra
|
||||||
|
|
||||||
# Add extra_body if present
|
response = await self.client.embeddings.create(**kwargs)
|
||||||
extra_body = params.model_extra
|
|
||||||
if extra_body:
|
|
||||||
request_params["extra_body"] = extra_body
|
|
||||||
|
|
||||||
# Call OpenAI embeddings API with properly typed parameters
|
|
||||||
response = await self.client.embeddings.create(**request_params)
|
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for i, embedding_data in enumerate(response.data):
|
for i, embedding_data in enumerate(response.data):
|
||||||
|
|
|
||||||
|
|
@ -351,22 +351,22 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel):
|
||||||
"""
|
"""
|
||||||
Direct OpenAI embeddings API call.
|
Direct OpenAI embeddings API call.
|
||||||
"""
|
"""
|
||||||
# Prepare request parameters
|
# Build kwargs conditionally to avoid NotGiven/Omit type mismatch
|
||||||
request_params = {
|
# The OpenAI SDK uses Omit in signatures but NOT_GIVEN has type NotGiven
|
||||||
|
kwargs: dict[str, Any] = {
|
||||||
"model": await self._get_provider_model_id(params.model),
|
"model": await self._get_provider_model_id(params.model),
|
||||||
"input": params.input,
|
"input": params.input,
|
||||||
"encoding_format": params.encoding_format if params.encoding_format is not None else NOT_GIVEN,
|
|
||||||
"dimensions": params.dimensions if params.dimensions is not None else NOT_GIVEN,
|
|
||||||
"user": params.user if params.user is not None else NOT_GIVEN,
|
|
||||||
}
|
}
|
||||||
|
if params.encoding_format is not None:
|
||||||
|
kwargs["encoding_format"] = params.encoding_format
|
||||||
|
if params.dimensions is not None:
|
||||||
|
kwargs["dimensions"] = params.dimensions
|
||||||
|
if params.user is not None:
|
||||||
|
kwargs["user"] = params.user
|
||||||
|
if params.model_extra:
|
||||||
|
kwargs["extra_body"] = params.model_extra
|
||||||
|
|
||||||
# Add extra_body if present
|
response = await self.client.embeddings.create(**kwargs)
|
||||||
extra_body = params.model_extra
|
|
||||||
if extra_body:
|
|
||||||
request_params["extra_body"] = extra_body
|
|
||||||
|
|
||||||
# Call OpenAI embeddings API with properly typed parameters
|
|
||||||
response = await self.client.embeddings.create(**request_params)
|
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for i, embedding_data in enumerate(response.data):
|
for i, embedding_data in enumerate(response.data):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue