From 09299e908e31d4b95cf75c2fb2261d4c78ac5a2e Mon Sep 17 00:00:00 2001 From: Vladislav Bronzov <58587565+VladOS95-cyber@users.noreply.github.com> Date: Tue, 28 Jan 2025 16:41:41 +0100 Subject: [PATCH] Add windows support for build execution (#889) # What does this PR do? This PR implements windows platform support for build_container.sh execution from terminal. Additionally, it resolves "no support for Terminos and PTY for Window PC" issues. - [x] Addresses issue (#issue) Releates issues: https://github.com/meta-llama/llama-stack/issues/826, https://github.com/meta-llama/llama-stack/issues/726 ## Test Plan Changes were tested manually by executing standard scripts from LLama guide: - llama stack build --template ollama --image-type container - llama stack build --list-templates - llama stack build ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [x] Ran pre-commit to handle lint / formatting issues. - [x] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests. --- llama_stack/distribution/build.py | 6 ++-- llama_stack/distribution/utils/exec.py | 39 ++++++++++++++++++++++++-- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index a29c8d5d1..1600f54cb 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -21,7 +21,7 @@ from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.utils.config_dirs import BUILDS_BASE_DIR -from llama_stack.distribution.utils.exec import run_command, run_with_pty +from llama_stack.distribution.utils.exec import run_command, run_with_pty, run_with_win from llama_stack.providers.datatypes import Api log = logging.getLogger(__name__) @@ -157,7 +157,9 @@ def build_image( is_terminal = sys.stdin.isatty() if is_terminal: - return_code = run_with_pty(args) + return_code = ( + run_with_win(args) if sys.platform.startswith("win") else run_with_pty(args) + ) else: return_code = run_command(args) diff --git a/llama_stack/distribution/utils/exec.py b/llama_stack/distribution/utils/exec.py index 9b4d0acee..65cdfeea4 100644 --- a/llama_stack/distribution/utils/exec.py +++ b/llama_stack/distribution/utils/exec.py @@ -7,12 +7,10 @@ import errno import logging import os -import pty import select import signal import subprocess import sys -import termios log = logging.getLogger(__name__) @@ -20,6 +18,9 @@ log = logging.getLogger(__name__) # run a command in a pseudo-terminal, with interrupt handling, # useful when you want to run interactive things def run_with_pty(command): + import pty + import termios + master, slave = pty.openpty() old_settings = termios.tcgetattr(sys.stdin) @@ -97,6 +98,40 @@ def run_with_pty(command): return process.returncode +# run a command in a pseudo-terminal in windows, with interrupt handling, +def run_with_win(command): + """ + Alternative to run_with_pty for Windows platforms. + Runs a command with interactive support using subprocess directly. + """ + try: + # For shell scripts on Windows, use appropriate shell + if isinstance(command, (list, tuple)): + if command[0].endswith(".sh"): + if os.path.exists("/usr/bin/bash"): # WSL + command = ["bash"] + command + else: + # Use cmd.exe with bash while preserving all arguments + command = ["cmd.exe", "/c", "bash"] + command + + process = subprocess.Popen( + command, + shell=True, + universal_newlines=True, + ) + + process.wait() + + except Exception as e: + print(f"Error: {str(e)}") + return 1 + finally: + if process and process.poll() is None: + process.terminate() + process.wait() + return process.returncode + + def run_command(command): try: result = subprocess.run(command, capture_output=True, text=True, check=True)