fix ollama registry

This commit is contained in:
Dinesh Yeduguru 2024-11-13 12:03:43 -08:00
parent 96e7ef646f
commit a019011326
2 changed files with 19 additions and 1 deletions

View file

@ -282,6 +282,21 @@ class OllamaInferenceAdapter(Inference, ModelRegistryHelper, ModelsProtocolPriva
) -> EmbeddingsResponse:
raise NotImplementedError()
async def register_model(self, model: Model) -> Model:
# First perform the parent class's registration check
model = await super().register_model(model)
# Additional Ollama-specific check
models = await self.client.ps()
available_models = [m["model"] for m in models["models"]]
if model.provider_resource_id not in available_models:
raise ValueError(
f"Model '{model.provider_resource_id}' is not available in Ollama. "
f"Available models: {', '.join(available_models)}"
)
return model
async def convert_message_to_dict_for_ollama(message: Message) -> List[dict]:
async def _convert_content(content) -> dict:

View file

@ -54,7 +54,10 @@ class ModelRegistryHelper(ModelsProtocolPrivate):
raise ValueError(f"Unknown model: `{identifier}`")
def get_llama_model(self, provider_model_id: str) -> str:
return self.provider_id_to_llama_model_map[provider_model_id]
if provider_model_id in self.provider_id_to_llama_model_map:
return self.provider_id_to_llama_model_map[provider_model_id]
else:
None
async def register_model(self, model: Model) -> Model:
model.provider_resource_id = self.get_provider_model_id(