mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-01 23:00:01 +00:00
fix tracing and add tests
This commit is contained in:
parent
714c09cd53
commit
518a5f898c
3 changed files with 170 additions and 16 deletions
|
|
@ -14,19 +14,19 @@ def preserve_contexts_async_generator(
|
|||
gen: AsyncGenerator[T, None], context_vars: List[ContextVar]
|
||||
) -> AsyncGenerator[T, None]:
|
||||
"""
|
||||
Wraps an async generator to preserve both tracing and headers context variables across iterations.
|
||||
This is needed because we start a new asyncio event loop for each request, and we need to preserve the context
|
||||
across the event loop boundary.
|
||||
Wraps an async generator to preserve context variables across iterations.
|
||||
This is needed because we start a new asyncio event loop for each streaming request,
|
||||
and we need to preserve the context across the event loop boundary.
|
||||
"""
|
||||
context_values = [context_var.get() for context_var in context_vars]
|
||||
|
||||
async def wrapper():
|
||||
while True:
|
||||
for context_var, context_value in zip(context_vars, context_values, strict=False):
|
||||
_ = context_var.set(context_value)
|
||||
try:
|
||||
item = await gen.__anext__()
|
||||
context_values = {context_var.name: context_var.get() for context_var in context_vars}
|
||||
yield item
|
||||
for context_var in context_vars:
|
||||
_ = context_var.set(context_values[context_var.name])
|
||||
except StopAsyncIteration:
|
||||
break
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue