forked from phoenix/litellm-mirror
bug fixes
This commit is contained in:
parent
122c993e6f
commit
d2f8a7b2ae
6 changed files with 3 additions and 3 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -349,7 +349,6 @@ def test_completion_nlp_cloud_streaming():
|
|||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
#### Test Function calling + streaming ####
|
||||
|
||||
def test_completion_openai_with_functions():
|
||||
|
|
|
@ -1086,7 +1086,7 @@ def get_llm_provider(model: str, custom_llm_provider: Optional[str] = None):
|
|||
elif model in litellm.ai21_models:
|
||||
custom_llm_provider = "ai21"
|
||||
## together_ai
|
||||
elif model in litellm.together_ai_models or "togethercomputer":
|
||||
elif model in litellm.together_ai_models:
|
||||
custom_llm_provider = "together_ai"
|
||||
## aleph_alpha
|
||||
elif model in litellm.aleph_alpha_models:
|
||||
|
@ -2435,6 +2435,7 @@ class CustomStreamWrapper:
|
|||
chunk = chunk.decode("utf-8")
|
||||
data_json = json.loads(chunk)
|
||||
try:
|
||||
print(f"data json: {data_json}")
|
||||
return data_json["generated_text"]
|
||||
except:
|
||||
raise ValueError(f"Unable to parse response. Original response: {chunk}")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "litellm"
|
||||
version = "0.1.682"
|
||||
version = "0.1.683"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
authors = ["BerriAI"]
|
||||
license = "MIT License"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue