mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
have response type inherit from openaiobject
This commit is contained in:
parent
3d1934a829
commit
ee20e6a1e6
6 changed files with 60 additions and 55 deletions
Binary file not shown.
Binary file not shown.
23
litellm/tests/test_model_response_typing/server.py
Normal file
23
litellm/tests/test_model_response_typing/server.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# #### What this tests ####
|
||||||
|
# # This tests if the litellm model response type is returnable in a flask app
|
||||||
|
|
||||||
|
# import sys, os
|
||||||
|
# import traceback
|
||||||
|
# from flask import Flask, request, jsonify, abort, Response
|
||||||
|
# sys.path.insert(0, os.path.abspath('../../..')) # Adds the parent directory to the system path
|
||||||
|
|
||||||
|
# import litellm
|
||||||
|
# from litellm import completion
|
||||||
|
|
||||||
|
# litellm.set_verbose = False
|
||||||
|
|
||||||
|
# app = Flask(__name__)
|
||||||
|
|
||||||
|
# @app.route('/')
|
||||||
|
# def hello():
|
||||||
|
# data = request.json
|
||||||
|
# return completion(**data)
|
||||||
|
|
||||||
|
# if __name__ == '__main__':
|
||||||
|
# from waitress import serve
|
||||||
|
# serve(app, host='localhost', port=8080, threads=10)
|
14
litellm/tests/test_model_response_typing/test.py
Normal file
14
litellm/tests/test_model_response_typing/test.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# import requests, json
|
||||||
|
|
||||||
|
# BASE_URL = 'http://localhost:8080'
|
||||||
|
|
||||||
|
# def test_hello_route():
|
||||||
|
# data = {"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "hey, how's it going?"}]}
|
||||||
|
# headers = {'Content-Type': 'application/json'}
|
||||||
|
# response = requests.get(BASE_URL, headers=headers, data=json.dumps(data))
|
||||||
|
# print(response.text)
|
||||||
|
# assert response.status_code == 200
|
||||||
|
# print("Hello route test passed!")
|
||||||
|
|
||||||
|
# if __name__ == '__main__':
|
||||||
|
# test_hello_route()
|
|
@ -51,67 +51,35 @@ local_cache = {}
|
||||||
# 'usage': {'prompt_tokens': 18, 'completion_tokens': 23, 'total_tokens': 41}
|
# 'usage': {'prompt_tokens': 18, 'completion_tokens': 23, 'total_tokens': 41}
|
||||||
# }
|
# }
|
||||||
|
|
||||||
class Message:
|
class Message(OpenAIObject):
|
||||||
def __init__(self):
|
def __init__(self, content="default", role="assistant", **params):
|
||||||
self.content: str = "default"
|
super(Message, self).__init__(**params)
|
||||||
self.role: str = "assistant"
|
self.content = content
|
||||||
|
self.role = role
|
||||||
|
|
||||||
def __getitem__(self, key):
|
class Choices(OpenAIObject):
|
||||||
return getattr(self, key)
|
def __init__(self, finish_reason="stop", index=0, message=Message(), **params):
|
||||||
|
super(Choices, self).__init__(**params)
|
||||||
|
self.finish_reason = finish_reason
|
||||||
|
self.index = index
|
||||||
|
self.message = message
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
class ModelResponse(OpenAIObject):
|
||||||
setattr(self, key, value)
|
def __init__(self, choices=None, created=None, model=None, usage=None, **params):
|
||||||
|
super(ModelResponse, self).__init__(**params)
|
||||||
def __iter__(self):
|
self.choices = choices if choices else [Choices()]
|
||||||
return iter(vars(self))
|
self.created = created
|
||||||
|
self.model = model
|
||||||
def __str__(self):
|
self.usage = usage if usage else {
|
||||||
result = f"{{\n 'role': '{self.role}',\n 'content': \"{self.content}\"\n}}"
|
|
||||||
return result
|
|
||||||
|
|
||||||
class Choices:
|
|
||||||
def __init__(self):
|
|
||||||
self.finish_reason: str = "stop"
|
|
||||||
self.index: int = 0
|
|
||||||
self.message: Message = Message()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return getattr(self, key)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
setattr(self, key, value)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(vars(self))
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
result = f"{{\n 'finish_reason': '{self.finish_reason}',\n 'index': {self.index},\n 'message': {self.message}\n}}"
|
|
||||||
return result
|
|
||||||
|
|
||||||
class ModelResponse(dict):
|
|
||||||
def __init__(self):
|
|
||||||
self.choices: List[Choices] = [Choices()]
|
|
||||||
self.created: str = None
|
|
||||||
self.model: str = None
|
|
||||||
self.usage: Dict[str, Union[int, None]] = {
|
|
||||||
"prompt_tokens": None,
|
"prompt_tokens": None,
|
||||||
"completion_tokens": None,
|
"completion_tokens": None,
|
||||||
"total_tokens": None
|
"total_tokens": None
|
||||||
}
|
}
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def to_dict_recursive(self):
|
||||||
return getattr(self, key)
|
d = super().to_dict_recursive()
|
||||||
|
d['choices'] = [choice.to_dict_recursive() for choice in self.choices]
|
||||||
def __setitem__(self, key, value):
|
return d
|
||||||
setattr(self, key, value)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(vars(self))
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
choices_str = ",\n".join(str(choice) for choice in self.choices)
|
|
||||||
result = f"{{\n 'choices': [\n{choices_str}\n ],\n 'created': {self.created},\n 'model': '{self.model}',\n 'usage': {self.usage}\n}}"
|
|
||||||
return result
|
|
||||||
############################################################
|
############################################################
|
||||||
def print_verbose(print_statement):
|
def print_verbose(print_statement):
|
||||||
if litellm.set_verbose:
|
if litellm.set_verbose:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.1.422"
|
version = "0.1.424"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue