This commit is contained in:
ishaan-jaff 2023-09-26 10:00:56 -07:00
parent 6eea9da4ab
commit d6bc20d5be
2 changed files with 130 additions and 0 deletions

View file

@ -44,6 +44,7 @@ from .llms import ollama
from .llms import cohere
from .llms import petals
from .llms import oobabooga
from .llms import palm
import tiktoken
from concurrent.futures import ThreadPoolExecutor
from typing import Callable, List, Optional, Dict
@ -792,6 +793,32 @@ def completion(
)
return response
response = model_response
elif custom_llm_provider == "palm":
api_key = (
api_key
or get_secret("PALM_API_KEY")
or litellm.api_key
)
model_response = palm.completion(
model=model,
messages=messages,
model_response=model_response,
print_verbose=print_verbose,
optional_params=optional_params,
litellm_params=litellm_params,
logger_fn=logger_fn,
encoding=encoding,
api_key=api_key,
logging_obj=logging
)
if "stream_tokens" in optional_params and optional_params["stream_tokens"] == True:
# don't try to access stream object,
response = CustomStreamWrapper(
model_response, model, custom_llm_provider="palm", logging_obj=logging
)
return response
response = model_response
elif model in litellm.vertex_chat_models or model in litellm.vertex_code_chat_models:
try:
import vertexai