mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix: initial commit for litellm_proxy support with CRUD Endpoints
This commit is contained in:
parent
6b04b48b17
commit
7fff83e441
6 changed files with 197 additions and 9 deletions
|
@ -20,8 +20,7 @@ from litellm.types.integrations.argilla import ArgillaItem
|
|||
from litellm.types.llms.openai import AllMessageValues, ChatCompletionRequest
|
||||
from litellm.types.utils import (
|
||||
AdapterCompletionStreamWrapper,
|
||||
EmbeddingResponse,
|
||||
ImageResponse,
|
||||
LLMResponseTypes,
|
||||
ModelResponse,
|
||||
ModelResponseStream,
|
||||
StandardCallbackDynamicParams,
|
||||
|
@ -223,7 +222,7 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac
|
|||
self,
|
||||
data: dict,
|
||||
user_api_key_dict: UserAPIKeyAuth,
|
||||
response: Union[Any, ModelResponse, EmbeddingResponse, ImageResponse],
|
||||
response: LLMResponseTypes,
|
||||
) -> Any:
|
||||
pass
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue