mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
build: adding debug logs to gitignore
This commit is contained in:
parent
c52861906b
commit
c4aea7432f
3 changed files with 7 additions and 6 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -16,3 +16,4 @@ router_config.yaml
|
||||||
litellm_server/config.yaml
|
litellm_server/config.yaml
|
||||||
litellm/proxy/_secret_config.yaml
|
litellm/proxy/_secret_config.yaml
|
||||||
.aws-sam/
|
.aws-sam/
|
||||||
|
litellm/tests/aiologs.log
|
||||||
|
|
|
@ -176,13 +176,13 @@ class Router:
|
||||||
########## remove -ModelID-XXXX from model ##############
|
########## remove -ModelID-XXXX from model ##############
|
||||||
original_model_string = data["model"]
|
original_model_string = data["model"]
|
||||||
# Find the index of "ModelID" in the string
|
# Find the index of "ModelID" in the string
|
||||||
|
self.print_verbose(f"completion model: {original_model_string}"}
|
||||||
index_of_model_id = original_model_string.find("-ModelID")
|
index_of_model_id = original_model_string.find("-ModelID")
|
||||||
# Remove everything after "-ModelID" if it exists
|
# Remove everything after "-ModelID" if it exists
|
||||||
if index_of_model_id != -1:
|
if index_of_model_id != -1:
|
||||||
data["model"] = original_model_string[:index_of_model_id]
|
data["model"] = original_model_string[:index_of_model_id]
|
||||||
else:
|
else:
|
||||||
data["model"] = original_model_string
|
data["model"] = original_model_string)
|
||||||
self.print_verbose(f"completion model: {data['model']}")
|
|
||||||
return litellm.completion(**{**data, "messages": messages, "caching": self.cache_responses, **kwargs})
|
return litellm.completion(**{**data, "messages": messages, "caching": self.cache_responses, **kwargs})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e
|
raise e
|
||||||
|
|
|
@ -30,21 +30,21 @@
|
||||||
# "model_name": "azure-model",
|
# "model_name": "azure-model",
|
||||||
# "litellm_params": {
|
# "litellm_params": {
|
||||||
# "model": "azure/gpt-turbo",
|
# "model": "azure/gpt-turbo",
|
||||||
# "api_key": "6a5ae4c5b2bd4e8088248067799c6899",
|
# "api_key": "os.environ/AZURE_FRANCE_API_KEY",
|
||||||
# "api_base": "https://openai-france-1234.openai.azure.com"
|
# "api_base": "https://openai-france-1234.openai.azure.com"
|
||||||
# }
|
# }
|
||||||
# }, {
|
# }, {
|
||||||
# "model_name": "azure-model",
|
# "model_name": "azure-model",
|
||||||
# "litellm_params": {
|
# "litellm_params": {
|
||||||
# "model": "azure/gpt-35-turbo",
|
# "model": "azure/gpt-35-turbo",
|
||||||
# "api_key": "fe5b390e8990407e8d913f40833b19f7",
|
# "api_key": "os.environ/AZURE_EUROPE_API_KEY",
|
||||||
# "api_base": "https://my-endpoint-europe-berri-992.openai.azure.com"
|
# "api_base": "https://my-endpoint-europe-berri-992.openai.azure.com"
|
||||||
# }
|
# }
|
||||||
# }, {
|
# }, {
|
||||||
# "model_name": "azure-model",
|
# "model_name": "azure-model",
|
||||||
# "litellm_params": {
|
# "litellm_params": {
|
||||||
# "model": "azure/gpt-35-turbo",
|
# "model": "azure/gpt-35-turbo",
|
||||||
# "api_key": "6a0f46e99d554e8caad9c2b7c0ba7319",
|
# "api_key": "os.environ/AZURE_CANADA_API_KEY",
|
||||||
# "api_base": "https://my-endpoint-canada-berri992.openai.azure.com"
|
# "api_base": "https://my-endpoint-canada-berri992.openai.azure.com"
|
||||||
# }
|
# }
|
||||||
# }]
|
# }]
|
||||||
|
@ -64,7 +64,7 @@
|
||||||
|
|
||||||
# async def loadtest_fn():
|
# async def loadtest_fn():
|
||||||
# start = time.time()
|
# start = time.time()
|
||||||
# n = 10
|
# n = 100
|
||||||
# tasks = [router_completion() for _ in range(n)]
|
# tasks = [router_completion() for _ in range(n)]
|
||||||
# chat_completions = await asyncio.gather(*tasks)
|
# chat_completions = await asyncio.gather(*tasks)
|
||||||
# successful_completions = [c for c in chat_completions if c is not None]
|
# successful_completions = [c for c in chat_completions if c is not None]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue