feat(proxy_server.py): return litellm version in response headers

This commit is contained in:
Krrish Dholakia 2024-05-08 16:00:08 -07:00
parent 80378966a0
commit 6575143460
50 changed files with 260 additions and 140 deletions

View file

@ -1,5 +1,5 @@
from typing import Optional, Union, Any
import types, requests
import types, requests # type: ignore
from .base import BaseLLM
from litellm.utils import (
ModelResponse,
@ -12,7 +12,7 @@ from litellm.utils import (
from typing import Callable, Optional, BinaryIO
from litellm import OpenAIConfig
import litellm, json
import httpx
import httpx # type: ignore
from .custom_httpx.azure_dall_e_2 import CustomHTTPTransport, AsyncCustomHTTPTransport
from openai import AzureOpenAI, AsyncAzureOpenAI
import uuid