fix(huggingface_restapi.py): fix huggingface response format

This commit is contained in:
Krrish Dholakia 2023-11-21 09:20:27 -08:00
parent a534928337
commit 6892fd8b51
4 changed files with 3 additions and 3 deletions

BIN
dist/litellm-1.3.3.dev1-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/litellm-1.3.3.dev1.tar.gz vendored Normal file

Binary file not shown.

View file

@ -406,7 +406,7 @@ class Huggingface(BaseLLM):
try:
completion_response = response.json()
if isinstance(completion_response, dict):
completion_response: List[Dict[str, Any]] = [{"generated_text": content}]
completion_response: List[Dict[str, Any]] = [{"generated_text": completion_response}]
except:
import traceback
raise HuggingfaceError(

View file

@ -159,7 +159,7 @@ def test_completion_gpt4_vision():
pass
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_gpt4_vision()
# test_completion_gpt4_vision()
def test_completion_perplexity_api():
try:
@ -283,7 +283,7 @@ def hf_test_completion_tgi():
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# hf_test_completion_tgi()
hf_test_completion_tgi()
def hf_test_completion_tgi_stream():
try: