forked from phoenix/litellm-mirror
(fix) proxy cli --test
This commit is contained in:
parent
4dd7e2519f
commit
547f41071e
1 changed files with 0 additions and 4 deletions
|
@ -158,10 +158,6 @@ def run_server(host, port, api_base, api_version, model, alias, add_key, headers
|
|||
for chunk in response:
|
||||
click.echo(f'LiteLLM: streaming response from proxy {chunk}')
|
||||
|
||||
# response = openai.Completion.create(model="gpt-3.5-turbo", prompt='this is a test request, write a short poem', stream=True)
|
||||
|
||||
for chunk in response:
|
||||
click.echo(f'LiteLLM: streaming response from proxy {chunk}')
|
||||
return
|
||||
else:
|
||||
if headers:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue