fix(dev): fix vllm inference recording (await models.list)

This commit is contained in:
Matthew Farrellee 2025-09-23 10:33:55 -04:00
parent a7f9ce9a3a
commit 3ea30c0a9c

View file

@ -504,7 +504,7 @@ class VLLMInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin, Inference, ModelsPro
except ValueError: except ValueError:
pass # Ignore statically unknown model, will check live listing pass # Ignore statically unknown model, will check live listing
try: try:
res = await self.client.models.list() res = self.client.models.list()
except APIConnectionError as e: except APIConnectionError as e:
raise ValueError( raise ValueError(
f"Failed to connect to vLLM at {self.config.url}. Please check if vLLM is running and accessible at that URL." f"Failed to connect to vLLM at {self.config.url}. Please check if vLLM is running and accessible at that URL."