mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
adding support for completions endpoint in proxy
This commit is contained in:
parent
8e2b139f8d
commit
d334031108
5 changed files with 42 additions and 4 deletions
|
@ -7,7 +7,8 @@ load_dotenv()
|
|||
@click.option('--api_base', default=None, help='API base URL.')
|
||||
@click.option('--model', required=True, help='The model name to pass to litellm expects')
|
||||
def run_server(port, api_base, model):
|
||||
from .proxy_server import app, initialize
|
||||
# from .proxy_server import app, initialize
|
||||
from proxy_server import app, initialize
|
||||
initialize(model, api_base)
|
||||
try:
|
||||
import uvicorn
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue