mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-28 02:22:00 +00:00
chore: allow to pass CA cert to remote vllm
The `tls_verify` can now receive a path to a certificate file if the endpoint requires it. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
9623d5d230
commit
f52b97843b
2 changed files with 16 additions and 4 deletions
|
|
@ -313,7 +313,7 @@ class VLLMInferenceAdapter(Inference, ModelsProtocolPrivate):
|
|||
return AsyncOpenAI(
|
||||
base_url=self.config.url,
|
||||
api_key=self.config.api_token,
|
||||
http_client=None if self.config.tls_verify else httpx.AsyncClient(verify=False),
|
||||
http_client=httpx.AsyncClient(verify=self.config.tls_verify),
|
||||
)
|
||||
|
||||
async def completion(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue