mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-24 00:53:55 +00:00
ci: error when a pipefails
The CI was failing but the error was eaten by the pipe. Now we run the task with pipefail. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
4bca4af3e4
commit
e0cee4fd75
2 changed files with 8 additions and 4 deletions
|
|
@ -42,7 +42,7 @@ def start_llama_stack_server(config_name: str) -> subprocess.Popen:
|
|||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=devnull, # redirect stdout to devnull to prevent deadlock
|
||||
stderr=devnull, # redirect stderr to devnull to prevent deadlock
|
||||
stderr=subprocess.PIPE, # keep stderr to see errors
|
||||
text=True,
|
||||
env={**os.environ, "LLAMA_STACK_LOG_FILE": "server.log"},
|
||||
)
|
||||
|
|
@ -57,6 +57,7 @@ def wait_for_server_ready(base_url: str, timeout: int = 30, process: subprocess.
|
|||
while time.time() - start_time < timeout:
|
||||
if process and process.poll() is not None:
|
||||
print(f"Server process terminated with return code: {process.returncode}")
|
||||
print(f"Server stderr: {process.stderr.read()}")
|
||||
return False
|
||||
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue