bug fixes for response object

This commit is contained in:
Krrish Dholakia 2023-08-17 20:24:38 -07:00
parent c20ba023c2
commit 6cd4c5531e
5 changed files with 3 additions and 2 deletions

View file

@ -12,6 +12,7 @@ from .integrations.aispend import AISpendLogger
from .integrations.berrispend import BerriSpendLogger
from .integrations.supabase import Supabase
from openai.error import OpenAIError as OriginalError
from openai.openai_object import OpenAIObject
from .exceptions import AuthenticationError, InvalidRequestError, RateLimitError, ServiceUnavailableError, OpenAIError
from typing import List, Dict, Union
####### ENVIRONMENT VARIABLES ###################
@ -87,7 +88,7 @@ class Choices:
result = f"{{\n 'finish_reason': '{self.finish_reason}',\n 'index': {self.index},\n 'message': {self.message}\n}}"
return result
class ModelResponse:
class ModelResponse(dict):
def __init__(self):
self.choices: List[Choices] = [Choices()]
self.created: str = None

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.421"
version = "0.1.422"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"