Revert "Disable telemetry in library client for now"

This reverts commit 176ebddf47.
This commit is contained in:
Dinesh Yeduguru 2024-12-10 08:47:18 -08:00
parent 686f8d5b8d
commit f969b561ea

View file

@ -24,7 +24,7 @@ from termcolor import cprint
from llama_stack.distribution.build import print_pip_install_help from llama_stack.distribution.build import print_pip_install_help
from llama_stack.distribution.configure import parse_and_maybe_upgrade_config from llama_stack.distribution.configure import parse_and_maybe_upgrade_config
from llama_stack.distribution.datatypes import Api # noqa from llama_stack.distribution.datatypes import Api
from llama_stack.distribution.resolver import ProviderRegistry from llama_stack.distribution.resolver import ProviderRegistry
from llama_stack.distribution.server.endpoints import get_all_api_endpoints from llama_stack.distribution.server.endpoints import get_all_api_endpoints
from llama_stack.distribution.stack import ( from llama_stack.distribution.stack import (
@ -32,12 +32,11 @@ from llama_stack.distribution.stack import (
get_stack_run_config_from_template, get_stack_run_config_from_template,
replace_env_vars, replace_env_vars,
) )
from llama_stack.providers.utils.telemetry.tracing import (
from llama_stack.providers.utils.telemetry.tracing import ( # noqa end_trace,
end_trace, # noqa setup_logger,
setup_logger, # noqa start_trace,
start_trace, # noqa )
) # noqa
T = TypeVar("T") T = TypeVar("T")
@ -248,8 +247,8 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
return False return False
# Set up telemetry logger similar to server.py # Set up telemetry logger similar to server.py
# if Api.telemetry in self.impls: if Api.telemetry in self.impls:
# setup_logger(self.impls[Api.telemetry]) setup_logger(self.impls[Api.telemetry])
console = Console() console = Console()
console.print(f"Using config [blue]{self.config_path_or_template_name}[/blue]:") console.print(f"Using config [blue]{self.config_path_or_template_name}[/blue]:")
@ -287,7 +286,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
async def _call_non_streaming( async def _call_non_streaming(
self, path: str, body: dict = None, cast_to: Any = None self, path: str, body: dict = None, cast_to: Any = None
): ):
# await start_trace(path, {"__location__": "library_client"}) await start_trace(path, {"__location__": "library_client"})
try: try:
func = self.endpoint_impls.get(path) func = self.endpoint_impls.get(path)
if not func: if not func:
@ -296,11 +295,10 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
body = self._convert_body(path, body) body = self._convert_body(path, body)
return convert_pydantic_to_json_value(await func(**body), cast_to) return convert_pydantic_to_json_value(await func(**body), cast_to)
finally: finally:
pass await end_trace()
# await end_trace()
async def _call_streaming(self, path: str, body: dict = None, cast_to: Any = None): async def _call_streaming(self, path: str, body: dict = None, cast_to: Any = None):
# await start_trace(path, {"__location__": "library_client"}) await start_trace(path, {"__location__": "library_client"})
try: try:
func = self.endpoint_impls.get(path) func = self.endpoint_impls.get(path)
if not func: if not func:
@ -310,8 +308,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
async for chunk in await func(**body): async for chunk in await func(**body):
yield convert_pydantic_to_json_value(chunk, cast_to) yield convert_pydantic_to_json_value(chunk, cast_to)
finally: finally:
pass await end_trace()
# await end_trace()
def _convert_body(self, path: str, body: Optional[dict] = None) -> dict: def _convert_body(self, path: str, body: Optional[dict] = None) -> dict:
if not body: if not body: