mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 10:13:05 +00:00
test publish
This commit is contained in:
parent
0aab982d64
commit
bfb9848074
3 changed files with 5 additions and 8 deletions
4
.github/workflows/publish-to-docker.yml
vendored
4
.github/workflows/publish-to-docker.yml
vendored
|
@ -31,3 +31,7 @@ jobs:
|
|||
run: |
|
||||
llama stack build --template ollama --image-type docker
|
||||
docker images
|
||||
|
||||
- name: Push to dockerhub
|
||||
run: |
|
||||
docker push localhost/distribution-ollama llamastack/distribution-ollama:0.0.63-test
|
||||
|
|
|
@ -153,7 +153,6 @@ def build_image(
|
|||
return_code = run_with_pty(args)
|
||||
else:
|
||||
return_code = run_command(args)
|
||||
print("return code", return_code)
|
||||
|
||||
if return_code != 0:
|
||||
log.error(
|
||||
|
|
|
@ -101,15 +101,9 @@ def run_with_pty(command):
|
|||
def run_command(command):
|
||||
try:
|
||||
result = subprocess.run(command, capture_output=True, text=True, check=True)
|
||||
print("Script Output:", result.stdout)
|
||||
print("Script Output\n", result.stdout)
|
||||
return result.returncode
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Error running script:", e)
|
||||
print("Error output:", e.stderr)
|
||||
return e.returncode
|
||||
# process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# output, error = process.communicate()
|
||||
# if process.returncode != 0:
|
||||
# log.error(f"Error: {error.decode('utf-8')}")
|
||||
# sys.exit(1)
|
||||
# return output.decode("utf-8")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue