mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Add windows support for build execution (#889)
# What does this PR do? This PR implements windows platform support for build_container.sh execution from terminal. Additionally, it resolves "no support for Terminos and PTY for Window PC" issues. - [x] Addresses issue (#issue) Releates issues: https://github.com/meta-llama/llama-stack/issues/826, https://github.com/meta-llama/llama-stack/issues/726 ## Test Plan Changes were tested manually by executing standard scripts from LLama guide: - llama stack build --template ollama --image-type container - llama stack build --list-templates - llama stack build ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [x] Ran pre-commit to handle lint / formatting issues. - [x] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests.
This commit is contained in:
parent
d123e9d3d7
commit
09299e908e
2 changed files with 41 additions and 4 deletions
|
@ -21,7 +21,7 @@ from llama_stack.distribution.distribution import get_provider_registry
|
|||
|
||||
from llama_stack.distribution.utils.config_dirs import BUILDS_BASE_DIR
|
||||
|
||||
from llama_stack.distribution.utils.exec import run_command, run_with_pty
|
||||
from llama_stack.distribution.utils.exec import run_command, run_with_pty, run_with_win
|
||||
from llama_stack.providers.datatypes import Api
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -157,7 +157,9 @@ def build_image(
|
|||
|
||||
is_terminal = sys.stdin.isatty()
|
||||
if is_terminal:
|
||||
return_code = run_with_pty(args)
|
||||
return_code = (
|
||||
run_with_win(args) if sys.platform.startswith("win") else run_with_pty(args)
|
||||
)
|
||||
else:
|
||||
return_code = run_command(args)
|
||||
|
||||
|
|
|
@ -7,12 +7,10 @@
|
|||
import errno
|
||||
import logging
|
||||
import os
|
||||
import pty
|
||||
import select
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import termios
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -20,6 +18,9 @@ log = logging.getLogger(__name__)
|
|||
# run a command in a pseudo-terminal, with interrupt handling,
|
||||
# useful when you want to run interactive things
|
||||
def run_with_pty(command):
|
||||
import pty
|
||||
import termios
|
||||
|
||||
master, slave = pty.openpty()
|
||||
|
||||
old_settings = termios.tcgetattr(sys.stdin)
|
||||
|
@ -97,6 +98,40 @@ def run_with_pty(command):
|
|||
return process.returncode
|
||||
|
||||
|
||||
# run a command in a pseudo-terminal in windows, with interrupt handling,
|
||||
def run_with_win(command):
|
||||
"""
|
||||
Alternative to run_with_pty for Windows platforms.
|
||||
Runs a command with interactive support using subprocess directly.
|
||||
"""
|
||||
try:
|
||||
# For shell scripts on Windows, use appropriate shell
|
||||
if isinstance(command, (list, tuple)):
|
||||
if command[0].endswith(".sh"):
|
||||
if os.path.exists("/usr/bin/bash"): # WSL
|
||||
command = ["bash"] + command
|
||||
else:
|
||||
# Use cmd.exe with bash while preserving all arguments
|
||||
command = ["cmd.exe", "/c", "bash"] + command
|
||||
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
shell=True,
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
process.wait()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {str(e)}")
|
||||
return 1
|
||||
finally:
|
||||
if process and process.poll() is None:
|
||||
process.terminate()
|
||||
process.wait()
|
||||
return process.returncode
|
||||
|
||||
|
||||
def run_command(command):
|
||||
try:
|
||||
result = subprocess.run(command, capture_output=True, text=True, check=True)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue