mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
Disable telemetry in library client for now
This commit is contained in:
parent
baae4f7b51
commit
176ebddf47
1 changed files with 15 additions and 12 deletions
|
@ -24,7 +24,7 @@ from termcolor import cprint
|
|||
|
||||
from llama_stack.distribution.build import print_pip_install_help
|
||||
from llama_stack.distribution.configure import parse_and_maybe_upgrade_config
|
||||
from llama_stack.distribution.datatypes import Api
|
||||
from llama_stack.distribution.datatypes import Api # noqa
|
||||
from llama_stack.distribution.resolver import ProviderRegistry
|
||||
from llama_stack.distribution.server.endpoints import get_all_api_endpoints
|
||||
from llama_stack.distribution.stack import (
|
||||
|
@ -32,11 +32,12 @@ from llama_stack.distribution.stack import (
|
|||
get_stack_run_config_from_template,
|
||||
replace_env_vars,
|
||||
)
|
||||
from llama_stack.providers.utils.telemetry.tracing import (
|
||||
end_trace,
|
||||
setup_logger,
|
||||
start_trace,
|
||||
)
|
||||
|
||||
from llama_stack.providers.utils.telemetry.tracing import ( # noqa
|
||||
end_trace, # noqa
|
||||
setup_logger, # noqa
|
||||
start_trace, # noqa
|
||||
) # noqa
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
@ -247,8 +248,8 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
return False
|
||||
|
||||
# Set up telemetry logger similar to server.py
|
||||
if Api.telemetry in self.impls:
|
||||
setup_logger(self.impls[Api.telemetry])
|
||||
# if Api.telemetry in self.impls:
|
||||
# setup_logger(self.impls[Api.telemetry])
|
||||
|
||||
console = Console()
|
||||
console.print(f"Using config [blue]{self.config_path_or_template_name}[/blue]:")
|
||||
|
@ -286,7 +287,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
async def _call_non_streaming(
|
||||
self, path: str, body: dict = None, cast_to: Any = None
|
||||
):
|
||||
await start_trace(path, {"__location__": "library_client"})
|
||||
# await start_trace(path, {"__location__": "library_client"})
|
||||
try:
|
||||
func = self.endpoint_impls.get(path)
|
||||
if not func:
|
||||
|
@ -295,10 +296,11 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
body = self._convert_body(path, body)
|
||||
return convert_pydantic_to_json_value(await func(**body), cast_to)
|
||||
finally:
|
||||
await end_trace()
|
||||
pass
|
||||
# await end_trace()
|
||||
|
||||
async def _call_streaming(self, path: str, body: dict = None, cast_to: Any = None):
|
||||
await start_trace(path, {"__location__": "library_client"})
|
||||
# await start_trace(path, {"__location__": "library_client"})
|
||||
try:
|
||||
func = self.endpoint_impls.get(path)
|
||||
if not func:
|
||||
|
@ -308,7 +310,8 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
async for chunk in await func(**body):
|
||||
yield convert_pydantic_to_json_value(chunk, cast_to)
|
||||
finally:
|
||||
await end_trace()
|
||||
pass
|
||||
# await end_trace()
|
||||
|
||||
def _convert_body(self, path: str, body: Optional[dict] = None) -> dict:
|
||||
if not body:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue