refactor(ai21,-aleph-alpha,-ollama): making ai21, aleph-alpha, ollama compatible with openai v1 sdk

This commit is contained in:
Krrish Dholakia 2023-11-11 17:49:13 -08:00
parent c6ce3fedcd
commit ae35c13015
3 changed files with 7 additions and 2 deletions

View file

@ -11,7 +11,7 @@ class AI21Error(Exception):
def __init__(self, status_code, message): def __init__(self, status_code, message):
self.status_code = status_code self.status_code = status_code
self.message = message self.message = message
self.request = httpx.Request(method="POST", url="https://api.replicate.com/v1/deployments") self.request = httpx.Request(method="POST", url="https://api.ai21.com/studio/v1/")
self.response = httpx.Response(status_code=status_code, request=self.request) self.response = httpx.Response(status_code=status_code, request=self.request)
super().__init__( super().__init__(
self.message self.message
@ -152,7 +152,6 @@ def completion(
original_response=response.text, original_response=response.text,
additional_args={"complete_input_dict": data}, additional_args={"complete_input_dict": data},
) )
print_verbose(f"raw model_response: {response.text}")
## RESPONSE OBJECT ## RESPONSE OBJECT
completion_response = response.json() completion_response = response.json()
if "error" in completion_response: if "error" in completion_response:

View file

@ -6,11 +6,14 @@ import time
from typing import Callable, Optional from typing import Callable, Optional
import litellm import litellm
from litellm.utils import ModelResponse, Choices, Message from litellm.utils import ModelResponse, Choices, Message
import httpx
class AlephAlphaError(Exception): class AlephAlphaError(Exception):
def __init__(self, status_code, message): def __init__(self, status_code, message):
self.status_code = status_code self.status_code = status_code
self.message = message self.message = message
self.request = httpx.Request(method="POST", url="https://api.aleph-alpha.com/complete")
self.response = httpx.Response(status_code=status_code, request=self.request)
super().__init__( super().__init__(
self.message self.message
) # Call the base class constructor with the parameters it needs ) # Call the base class constructor with the parameters it needs

View file

@ -3,6 +3,7 @@ import json
import traceback import traceback
from typing import Optional from typing import Optional
import litellm import litellm
import httpx
try: try:
from async_generator import async_generator, yield_ # optional dependency from async_generator import async_generator, yield_ # optional dependency
@ -14,6 +15,8 @@ class OllamaError(Exception):
def __init__(self, status_code, message): def __init__(self, status_code, message):
self.status_code = status_code self.status_code = status_code
self.message = message self.message = message
self.request = httpx.Request(method="POST", url="http://localhost:11434")
self.response = httpx.Response(status_code=status_code, request=self.request)
super().__init__( super().__init__(
self.message self.message
) # Call the base class constructor with the parameters it needs ) # Call the base class constructor with the parameters it needs