From 7e377906478a0f9f5000a9ef099f83df94894713 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Mon, 27 Oct 2025 23:12:59 -0700 Subject: [PATCH] fix(mypy): resolve OpenAI SDK NotGiven/Omit type mismatches MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refactor embeddings API calls to avoid NotGiven/Omit type incompatibility by conditionally building kwargs dict with only non-None parameters. - openai_mixin.py: Build kwargs conditionally for embeddings.create() - gemini.py: Apply same pattern + add Any import This approach avoids type:ignore comments by not passing NOT_GIVEN sentinel values that conflict with Omit type annotations in OpenAI SDK. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../remote/inference/gemini/gemini.py | 25 ++++++++++--------- .../providers/utils/inference/openai_mixin.py | 24 +++++++++--------- 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/src/llama_stack/providers/remote/inference/gemini/gemini.py b/src/llama_stack/providers/remote/inference/gemini/gemini.py index 27fea8b32..f96693ec4 100644 --- a/src/llama_stack/providers/remote/inference/gemini/gemini.py +++ b/src/llama_stack/providers/remote/inference/gemini/gemini.py @@ -4,6 +4,8 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +from typing import Any + from openai import NOT_GIVEN from llama_stack.apis.inference import ( @@ -37,22 +39,21 @@ class GeminiInferenceAdapter(OpenAIMixin): Override embeddings method to handle Gemini's missing usage statistics. Gemini's embedding API doesn't return usage information, so we provide default values. """ - # Prepare request parameters - request_params = { + # Build kwargs conditionally to avoid NotGiven/Omit type mismatch + kwargs: dict[str, Any] = { "model": await self._get_provider_model_id(params.model), "input": params.input, - "encoding_format": params.encoding_format if params.encoding_format is not None else NOT_GIVEN, - "dimensions": params.dimensions if params.dimensions is not None else NOT_GIVEN, - "user": params.user if params.user is not None else NOT_GIVEN, } + if params.encoding_format is not None: + kwargs["encoding_format"] = params.encoding_format + if params.dimensions is not None: + kwargs["dimensions"] = params.dimensions + if params.user is not None: + kwargs["user"] = params.user + if params.model_extra: + kwargs["extra_body"] = params.model_extra - # Add extra_body if present - extra_body = params.model_extra - if extra_body: - request_params["extra_body"] = extra_body - - # Call OpenAI embeddings API with properly typed parameters - response = await self.client.embeddings.create(**request_params) + response = await self.client.embeddings.create(**kwargs) data = [] for i, embedding_data in enumerate(response.data): diff --git a/src/llama_stack/providers/utils/inference/openai_mixin.py b/src/llama_stack/providers/utils/inference/openai_mixin.py index bbd3d2e10..7c8c4159a 100644 --- a/src/llama_stack/providers/utils/inference/openai_mixin.py +++ b/src/llama_stack/providers/utils/inference/openai_mixin.py @@ -351,22 +351,22 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel): """ Direct OpenAI embeddings API call. """ - # Prepare request parameters - request_params = { + # Build kwargs conditionally to avoid NotGiven/Omit type mismatch + # The OpenAI SDK uses Omit in signatures but NOT_GIVEN has type NotGiven + kwargs: dict[str, Any] = { "model": await self._get_provider_model_id(params.model), "input": params.input, - "encoding_format": params.encoding_format if params.encoding_format is not None else NOT_GIVEN, - "dimensions": params.dimensions if params.dimensions is not None else NOT_GIVEN, - "user": params.user if params.user is not None else NOT_GIVEN, } + if params.encoding_format is not None: + kwargs["encoding_format"] = params.encoding_format + if params.dimensions is not None: + kwargs["dimensions"] = params.dimensions + if params.user is not None: + kwargs["user"] = params.user + if params.model_extra: + kwargs["extra_body"] = params.model_extra - # Add extra_body if present - extra_body = params.model_extra - if extra_body: - request_params["extra_body"] = extra_body - - # Call OpenAI embeddings API with properly typed parameters - response = await self.client.embeddings.create(**request_params) + response = await self.client.embeddings.create(**kwargs) data = [] for i, embedding_data in enumerate(response.data):