mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
refactor create_litellm_proxy_request_started_spen
This commit is contained in:
parent
f4b7b49e9a
commit
de97cda445
4 changed files with 33 additions and 15 deletions
|
@ -33,9 +33,12 @@ from litellm.types.utils import (
|
|||
if TYPE_CHECKING:
|
||||
from opentelemetry.trace import Span as _Span
|
||||
|
||||
from litellm.integrations.opentelemetry import OpenTelemetry
|
||||
|
||||
Span = _Span
|
||||
else:
|
||||
Span = Any
|
||||
OpenTelemetry = Any
|
||||
|
||||
|
||||
def showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
|
@ -777,7 +780,7 @@ disable_spend_logs = False
|
|||
jwt_handler = JWTHandler()
|
||||
prompt_injection_detection_obj: Optional[_OPTIONAL_PromptInjectionDetection] = None
|
||||
store_model_in_db: bool = False
|
||||
open_telemetry_logger: Optional[Any] = None
|
||||
open_telemetry_logger: Optional[OpenTelemetry] = None
|
||||
### INITIALIZE GLOBAL LOGGING OBJECT ###
|
||||
proxy_logging_obj = ProxyLogging(
|
||||
user_api_key_cache=user_api_key_cache, premium_user=premium_user
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue