forked from phoenix/litellm-mirror
fix proxy
This commit is contained in:
parent
0acde1c72d
commit
c1fce0859c
9 changed files with 10 additions and 3 deletions
BIN
dist/litellm-0.1.7712-py3-none-any.whl
vendored
Normal file
BIN
dist/litellm-0.1.7712-py3-none-any.whl
vendored
Normal file
Binary file not shown.
BIN
dist/litellm-0.1.7712.tar.gz
vendored
Normal file
BIN
dist/litellm-0.1.7712.tar.gz
vendored
Normal file
Binary file not shown.
BIN
dist/litellm-0.1.7713-py3-none-any.whl
vendored
Normal file
BIN
dist/litellm-0.1.7713-py3-none-any.whl
vendored
Normal file
Binary file not shown.
BIN
dist/litellm-0.1.7713.tar.gz
vendored
Normal file
BIN
dist/litellm-0.1.7713.tar.gz
vendored
Normal file
Binary file not shown.
|
@ -322,4 +322,4 @@ from .exceptions import (
|
|||
|
||||
)
|
||||
from .budget_manager import BudgetManager
|
||||
from .proxy_server.proxy_cli import run_server
|
||||
from .proxy.proxy_cli import run_server
|
Binary file not shown.
|
@ -7,7 +7,7 @@ load_dotenv()
|
|||
@click.option('--api_base', default=None, help='API base URL.')
|
||||
@click.option('--model', required=True, help='The model name to pass to litellm expects')
|
||||
def run_server(port, api_base, model):
|
||||
from proxy_server import app, initialize
|
||||
from .proxy_server import app, initialize
|
||||
initialize(model, api_base)
|
||||
try:
|
||||
import uvicorn
|
|
@ -1,3 +1,4 @@
|
|||
import litellm
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import StreamingResponse
|
||||
import json
|
||||
|
@ -11,6 +12,12 @@ def initialize(model, api_base):
|
|||
user_model = model
|
||||
user_api_base = api_base
|
||||
|
||||
|
||||
# for streaming
|
||||
def data_generator(response):
|
||||
for chunk in response:
|
||||
yield f"data: {json.dumps(chunk)}\n\n"
|
||||
|
||||
@app.get("/models") # if project requires model list
|
||||
def model_list():
|
||||
return dict(
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "litellm"
|
||||
version = "0.1.771"
|
||||
version = "0.1.7713"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
authors = ["BerriAI"]
|
||||
license = "MIT License"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue