forked from phoenix/litellm-mirror
fix linting
This commit is contained in:
parent
8ede32f91d
commit
e09db615aa
1 changed files with 9 additions and 7 deletions
|
@ -111,18 +111,20 @@ class PassThroughEndpointLogging:
|
|||
model = self.extract_model_from_url(url_route)
|
||||
_json_response = httpx_response.json()
|
||||
|
||||
litellm_model_response = await transform_vertex_response_to_openai(
|
||||
response=_json_response,
|
||||
model=model,
|
||||
model_response=litellm.EmbeddingResponse(),
|
||||
litellm_embedding_response: litellm.EmbeddingResponse = (
|
||||
await transform_vertex_response_to_openai(
|
||||
response=_json_response,
|
||||
model=model,
|
||||
model_response=litellm.EmbeddingResponse(),
|
||||
)
|
||||
)
|
||||
|
||||
litellm_model_response.model = model
|
||||
logging_obj.model = litellm_model_response.model
|
||||
litellm_embedding_response.model = model
|
||||
logging_obj.model = litellm_embedding_response.model
|
||||
logging_obj.model_call_details["model"] = logging_obj.model
|
||||
|
||||
await logging_obj.async_success_handler(
|
||||
result=litellm_model_response,
|
||||
result=litellm_embedding_response,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
cache_hit=cache_hit,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue