forked from phoenix/litellm-mirror
add AnthropicConfig
This commit is contained in:
parent
b3b1ff6882
commit
83a722a34b
1 changed files with 37 additions and 0 deletions
|
@ -8,6 +8,7 @@ import httpx
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
|
from litellm.llms.anthropic.chat.transformation import AnthropicConfig
|
||||||
from litellm.llms.vertex_ai_and_google_ai_studio.gemini.vertex_and_google_ai_studio_gemini import (
|
from litellm.llms.vertex_ai_and_google_ai_studio.gemini.vertex_and_google_ai_studio_gemini import (
|
||||||
VertexLLM,
|
VertexLLM,
|
||||||
)
|
)
|
||||||
|
@ -23,6 +24,9 @@ class PassThroughEndpointLogging:
|
||||||
"predict",
|
"predict",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Anthropic
|
||||||
|
self.TRACKED_ANTHROPIC_ROUTES = ["/messages"]
|
||||||
|
|
||||||
async def pass_through_async_success_handler(
|
async def pass_through_async_success_handler(
|
||||||
self,
|
self,
|
||||||
httpx_response: httpx.Response,
|
httpx_response: httpx.Response,
|
||||||
|
@ -45,6 +49,17 @@ class PassThroughEndpointLogging:
|
||||||
cache_hit=cache_hit,
|
cache_hit=cache_hit,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
elif self.is_anthropic_route(url_route):
|
||||||
|
await self.anthropic_passthrough_handler(
|
||||||
|
httpx_response=httpx_response,
|
||||||
|
logging_obj=logging_obj,
|
||||||
|
url_route=url_route,
|
||||||
|
result=result,
|
||||||
|
start_time=start_time,
|
||||||
|
end_time=end_time,
|
||||||
|
cache_hit=cache_hit,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
standard_logging_response_object = StandardPassThroughResponseObject(
|
standard_logging_response_object = StandardPassThroughResponseObject(
|
||||||
response=httpx_response.text
|
response=httpx_response.text
|
||||||
|
@ -76,6 +91,12 @@ class PassThroughEndpointLogging:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def is_anthropic_route(self, url_route: str):
|
||||||
|
for route in self.TRACKED_ANTHROPIC_ROUTES:
|
||||||
|
if route in url_route:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def extract_model_from_url(self, url: str) -> str:
|
def extract_model_from_url(self, url: str) -> str:
|
||||||
pattern = r"/models/([^:]+)"
|
pattern = r"/models/([^:]+)"
|
||||||
match = re.search(pattern, url)
|
match = re.search(pattern, url)
|
||||||
|
@ -83,6 +104,22 @@ class PassThroughEndpointLogging:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
return "unknown"
|
return "unknown"
|
||||||
|
|
||||||
|
async def anthropic_passthrough_handler(
|
||||||
|
self,
|
||||||
|
httpx_response: httpx.Response,
|
||||||
|
logging_obj: LiteLLMLoggingObj,
|
||||||
|
url_route: str,
|
||||||
|
result: str,
|
||||||
|
start_time: datetime,
|
||||||
|
end_time: datetime,
|
||||||
|
cache_hit: bool,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Transforms Anthropic response to OpenAI response, generates a standard logging object so downstream logging can be handled
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
async def vertex_passthrough_handler(
|
async def vertex_passthrough_handler(
|
||||||
self,
|
self,
|
||||||
httpx_response: httpx.Response,
|
httpx_response: httpx.Response,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue