mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
update proxy cli
This commit is contained in:
parent
c5bed0c9fd
commit
09b8c08cad
7 changed files with 92 additions and 35 deletions
|
@ -6,9 +6,12 @@ load_dotenv()
|
|||
@click.option('--port', default=8000, help='Port to bind the server to.')
|
||||
@click.option('--api_base', default=None, help='API base URL.')
|
||||
@click.option('--model', required=True, help='The model name to pass to litellm expects')
|
||||
def run_server(port, api_base, model):
|
||||
@click.option('--debug', is_flag=True, help='To debug the input')
|
||||
@click.option('--temperature', default=None, type=float, help='Set temperature for the model')
|
||||
@click.option('--max_tokens', default=None, help='Set max tokens for the model')
|
||||
def run_server(port, api_base, model, debug, temperature, max_tokens):
|
||||
from .proxy_server import app, initialize
|
||||
initialize(model, api_base)
|
||||
initialize(model, api_base, debug, temperature, max_tokens)
|
||||
try:
|
||||
import uvicorn
|
||||
except:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue