mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Small refactor for run_with_pty
This commit is contained in:
parent
8332ea23ad
commit
aee6237685
3 changed files with 13 additions and 10 deletions
|
@ -15,9 +15,16 @@ import sys
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_with_pty(command):
|
||||
if sys.platform.startswith("win"):
|
||||
return _run_with_pty_win(command)
|
||||
else:
|
||||
return _run_with_pty_unix(command)
|
||||
|
||||
|
||||
# run a command in a pseudo-terminal, with interrupt handling,
|
||||
# useful when you want to run interactive things
|
||||
def run_with_pty(command):
|
||||
def _run_with_pty_unix(command):
|
||||
import pty
|
||||
import termios
|
||||
|
||||
|
@ -99,9 +106,8 @@ def run_with_pty(command):
|
|||
|
||||
|
||||
# run a command in a pseudo-terminal in windows, with interrupt handling,
|
||||
def run_with_win(command):
|
||||
def _run_with_pty_win(command):
|
||||
"""
|
||||
Alternative to run_with_pty for Windows platforms.
|
||||
Runs a command with interactive support using subprocess directly.
|
||||
"""
|
||||
try:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue