mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Fix ollama api_base to enable remote url
This commit is contained in:
parent
2ae489c506
commit
ef69eefcdb
1 changed files with 7 additions and 0 deletions
|
@ -2759,6 +2759,12 @@ def embedding(
|
|||
model_response=EmbeddingResponse(),
|
||||
)
|
||||
elif custom_llm_provider == "ollama":
|
||||
api_base = (
|
||||
litellm.api_base
|
||||
or api_base
|
||||
or get_secret("OLLAMA_API_BASE")
|
||||
or "http://localhost:11434"
|
||||
)
|
||||
ollama_input = None
|
||||
if isinstance(input, list) and len(input) > 1:
|
||||
raise litellm.BadRequestError(
|
||||
|
@ -2779,6 +2785,7 @@ def embedding(
|
|||
|
||||
if aembedding == True:
|
||||
response = ollama.ollama_aembeddings(
|
||||
api_base=api_base,
|
||||
model=model,
|
||||
prompt=ollama_input,
|
||||
encoding=encoding,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue