cli deploy

This commit is contained in:
ishaan-jaff 2023-09-30 15:44:58 -07:00
parent 16942458a2
commit 0c640ab5ef

View file

@ -54,33 +54,47 @@ def initialize(model, api_base, debug, temperature, max_tokens, telemetry):
usage_telemetry()
def deploy_proxy(model, api_base, debug, temperature, max_tokens, telemetry, deploy):
import requests
# Load .env file
# Prepare data for posting
data = {
"model": model,
"api_base": api_base,
"debug": debug,
"temperature": temperature,
"max_tokens": max_tokens,
}
print(data)
# print(data)
# Make post request to the url
url = "https://api.litellm.ai/deploy"
# url = "http://0.0.0.0:4000/deploy"
with open(".env", "w") as env_file:
for row in data:
env_file.write(f"{row.upper()}='{data[row]}'\n")
env_file.write("\n\n")
for key in os.environ:
value = os.environ[key]
env_file.write(f"{key}='{value}'\n")
# env_file.write(str(os.environ))
files = {"file": open(".env", "rb")}
print(files)
# response = requests.post(url, data=data, files=files)
# print(files)
# # Check the status of the request
# if response.status_code != 200:
# return f"Request to url: {url} failed with status: {response.status_code}"
# # Reading the response
# response_data = response.json()
response = requests.post(url, data=data, files=files)
# print(response)
# Check the status of the request
if response.status_code != 200:
return f"Request to url: {url} failed with status: {response.status_code}"
# Reading the response
response_data = response.json()
# print(response_data)
url = response_data["url"]
# # Do something with response_data
return url