mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-14 17:16:09 +00:00
fix: llama stack run starter in conda (#2679)
# What does this PR do? `llama stack run starter` in conda environment fails with ' --config is required for venv and conda environments' because it is passed as --template and start_stack.sh doesn't process template. ## Test Plan `llama stack run starter`
This commit is contained in:
parent
7915551eee
commit
780b4c6eea
2 changed files with 40 additions and 34 deletions
|
@ -197,7 +197,7 @@ def llama_stack_client(request, provider_data):
|
|||
server_process = start_llama_stack_server(config_name)
|
||||
|
||||
# Wait for server to be ready
|
||||
if not wait_for_server_ready(base_url, timeout=30, process=server_process):
|
||||
if not wait_for_server_ready(base_url, timeout=120, process=server_process):
|
||||
print("Server failed to start within timeout")
|
||||
server_process.terminate()
|
||||
raise RuntimeError(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue