Fixes to bedrock

This commit is contained in:
Krrish Dholakia 2023-10-02 16:09:06 -07:00
parent 8c48af11c2
commit d49a36004c
5 changed files with 6 additions and 6 deletions

View file

@ -1,4 +1,4 @@
import json
import json, copy
from enum import Enum
import time
from typing import Callable
@ -85,7 +85,6 @@ def completion(
encoding,
logging_obj,
optional_params=None,
stream=False,
litellm_params=None,
logger_fn=None,
):
@ -99,10 +98,12 @@ def completion(
model = model
provider = model.split(".")[0]
prompt = convert_messages_to_prompt(messages, provider)
inference_params = copy.deepcopy(optional_params)
stream = inference_params.pop("stream", False)
if provider == "anthropic":
data = json.dumps({
"prompt": prompt,
**optional_params
**inference_params
})
elif provider == "ai21":
data = json.dumps({
@ -112,7 +113,7 @@ def completion(
else: # amazon titan
data = json.dumps({
"inputText": prompt,
"textGenerationConfig": optional_params,
"textGenerationConfig": inference_params,
})
## LOGGING
logging_obj.pre_call(

View file

@ -1003,7 +1003,6 @@ def completion(
logger_fn=logger_fn,
encoding=encoding,
logging_obj=logging,
stream=stream,
)

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.812"
version = "0.1.813"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"