mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 10:13:05 +00:00
Update llama_stack/distribution/server/server.py
Adding logger per request. Co-authored-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
b7072c3c67
commit
ca2c46a6e3
1 changed files with 1 additions and 0 deletions
|
@ -257,6 +257,7 @@ class TracingMiddleware:
|
|||
_, _, trace_path = find_matching_endpoint(scope.get("method", "GET"), path, self.endpoint_impls)
|
||||
except ValueError:
|
||||
# If no matching endpoint is found, pass through to FastAPI
|
||||
logger.debug(f"No matching endpoint found for path: {path}, falling back to FastAPI")
|
||||
return await self.app(scope, receive, send)
|
||||
|
||||
trace_context = await start_trace(trace_path, {"__location__": "server", "raw_path": path})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue