mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix: use custom_llm_provider from kwargs if provided
This commit is contained in:
parent
0cfc3e822c
commit
71b99b640d
1 changed files with 3 additions and 1 deletions
|
@ -3244,7 +3244,9 @@ async def aembedding(*args, **kwargs) -> EmbeddingResponse:
|
||||||
func_with_context = partial(ctx.run, func)
|
func_with_context = partial(ctx.run, func)
|
||||||
|
|
||||||
_, custom_llm_provider, _, _ = get_llm_provider(
|
_, custom_llm_provider, _, _ = get_llm_provider(
|
||||||
model=model, api_base=kwargs.get("api_base", None)
|
model=model,
|
||||||
|
custom_llm_provider=kwargs.get("custom_llm_provider", None),
|
||||||
|
api_base=kwargs.get("api_base", None),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Await normally
|
# Await normally
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue