diff --git a/docs/my-website/docs/tutorials/model_config_proxy.md b/docs/my-website/docs/tutorials/model_config_proxy.md index 075b9a69f..32f0528a9 100644 --- a/docs/my-website/docs/tutorials/model_config_proxy.md +++ b/docs/my-website/docs/tutorials/model_config_proxy.md @@ -1,3 +1,5 @@ +import Image from '@theme/IdealImage'; + # Customize Prompt Templates on OpenAI-Compatible server **You will learn:** How to set a custom prompt template on our OpenAI compatible server. @@ -11,15 +13,23 @@ $ litellm --model huggingface/codellama/CodeLlama-34b-Instruct-hf --api_base htt # OpenAI compatible server running on http://0.0.0.0/8000 ``` -By default this will follow [our formatting](https://github.com/BerriAI/litellm/blob/9932371f883c55fd0f3142f91d9c40279e8fe241/litellm/llms/prompt_templates/factory.py#L10) for CodeLlama (based on the [Huggingface's documentation](https://huggingface.co/blog/codellama#conversational-instructions)). +In a new shell, run: +```shell +$ litellm --test +``` +This will send a test request to our endpoint. -```python -[INST] <> -{{ system_prompt }} -<> - -{{ user_msg_1 }} [/INST] {{ model_answer_1 }} [INST] {{ user_msg_2 }} [/INST] +Now, let's see what got sent to huggingface. Run: +```shell +$ litellm --logs ``` +This will return the most recent log (by default logs are stored in a local file called 'api_logs.json'). + +As we can see, this is the formatting sent to huggingface: + + +- +This follows [our formatting](https://github.com/BerriAI/litellm/blob/9932371f883c55fd0f3142f91d9c40279e8fe241/litellm/llms/prompt_templates/factory.py#L10) for CodeLlama (based on the [Huggingface's documentation](https://huggingface.co/blog/codellama#conversational-instructions)). But this lacks BOS(``) and EOS(``) tokens. @@ -31,19 +41,22 @@ Our litellm server accepts prompt templates as part of a config file. You can sa For now, let's just create a simple config file with our prompt template, and tell our server about it. -Create a file called `litellm_config.toml`, and here's how we'll format it: +Create a file called `litellm_config.toml`: -```toml -[model.] -// our prompt template +```shell +$ touch litellm_config.toml ``` - We want to add: * BOS (``) tokens at the start of every System and Human message * EOS (``) tokens at the end of every assistant message. -This is what it looks like: -```toml +Let's open our file in our terminal: +```shell +$ vi litellm_config.toml +``` + +paste our prompt template: +```shell [model."huggingface/codellama/CodeLlama-34b-Instruct-hf".prompt_template] MODEL_SYSTEM_MESSAGE_START_TOKEN = "[INST] <>\n]" MODEL_SYSTEM_MESSAGE_END_TOKEN = "\n<>\n [/INST]\n" @@ -55,11 +68,30 @@ MODEL_ASSISTANT_MESSAGE_START_TOKEN = "" MODEL_ASSISTANT_MESSAGE_END_TOKEN = "" ``` -## Step 3: Save template +save our file (in vim): +```shell +:wq +``` + +## Step 3: Run new template Let's save our custom template to our litellm server by running: ```shell -litellm --config -f ./litellm_config.toml +$ litellm --config -f ./litellm_config.toml +``` +LiteLLM will save a copy of this file in it's package, so it can persist these settings across restarts. + +Re-start our server: +```shell +$ litellm --model huggingface/codellama/CodeLlama-34b-Instruct-hf --api_base https://my-endpoint.com ``` -LiteLLM will save a copy of this file in it's package, so it can persist these settings across restarts. \ No newline at end of file +In a new shell, run: +```shell +$ litellm --test +``` + +See our new input prompt to Huggingface! + + +Congratulations 🎉 \ No newline at end of file diff --git a/docs/my-website/img/codellama_formatted_input.png b/docs/my-website/img/codellama_formatted_input.png new file mode 100644 index 000000000..c9204ee76 Binary files /dev/null and b/docs/my-website/img/codellama_formatted_input.png differ diff --git a/docs/my-website/img/codellama_input.png b/docs/my-website/img/codellama_input.png new file mode 100644 index 000000000..414539c99 Binary files /dev/null and b/docs/my-website/img/codellama_input.png differ diff --git a/litellm/proxy/api_log.json b/litellm/proxy/api_log.json index c2c72092a..fb29cac24 100644 --- a/litellm/proxy/api_log.json +++ b/litellm/proxy/api_log.json @@ -852,5 +852,602 @@ "stop_reason": "stop_sequence" } } + }, + "20231013200437287062": { + "pre_api_call": { + "model": "codellama/CodeLlama-34b-Instruct-hf", + "messages": [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ], + "optional_params": { + "details": true, + "return_full_text": false + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "huggingface", + "api_base": "https://ji16r2iys9a8rjk2.us-east-1.aws.endpoints.huggingface.cloud", + "litellm_call_id": "b40db754-5536-4ecd-837f-cda44f83424c", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "api_key": "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", + "additional_args": { + "complete_input_dict": { + "inputs": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "parameters": { + "details": true, + "return_full_text": false + }, + "stream": false + }, + "task": "text-generation-inference" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "codellama/CodeLlama-34b-Instruct-hf", + "messages": [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ], + "optional_params": { + "details": true, + "return_full_text": false + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "huggingface", + "api_base": "https://ji16r2iys9a8rjk2.us-east-1.aws.endpoints.huggingface.cloud", + "litellm_call_id": "b40db754-5536-4ecd-837f-cda44f83424c", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "api_key": "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", + "additional_args": { + "complete_input_dict": { + "inputs": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "parameters": { + "details": true, + "return_full_text": false + }, + "stream": false + }, + "task": "text-generation-inference" + }, + "log_event_type": "post_api_call", + "original_response": "[{\"generated_text\":\"\\n[INST]Please provide me with your name and age[/INST]\\n\\n[INST\",\"details\":{\"finish_reason\":\"length\",\"generated_tokens\":20,\"seed\":null,\"prefill\":[],\"tokens\":[{\"id\":13,\"text\":\"\\n\",\"logprob\":-1.9580078,\"special\":false},{\"id\":29961,\"text\":\"[\",\"logprob\":-1.8720703,\"special\":false},{\"id\":25580,\"text\":\"INST\",\"logprob\":-2.0449219,\"special\":false},{\"id\":29962,\"text\":\"]\",\"logprob\":-0.037017822,\"special\":false},{\"id\":12148,\"text\":\"Please\",\"logprob\":-3.15625,\"special\":false},{\"id\":3867,\"text\":\" provide\",\"logprob\":-1.7675781,\"special\":false},{\"id\":592,\"text\":\" me\",\"logprob\":-0.9614258,\"special\":false},{\"id\":411,\"text\":\" with\",\"logprob\":-0.42285156,\"special\":false},{\"id\":596,\"text\":\" your\",\"logprob\":-0.86328125,\"special\":false},{\"id\":1024,\"text\":\" name\",\"logprob\":-1.0976562,\"special\":false},{\"id\":322,\"text\":\" and\",\"logprob\":-0.77246094,\"special\":false},{\"id\":5046,\"text\":\" age\",\"logprob\":-1.1455078,\"special\":false},{\"id\":29961,\"text\":\"[\",\"logprob\":-0.59033203,\"special\":false},{\"id\":29914,\"text\":\"/\",\"logprob\":-0.0069770813,\"special\":false},{\"id\":25580,\"text\":\"INST\",\"logprob\":-0.00042438507,\"special\":false},{\"id\":29962,\"text\":\"]\",\"logprob\":-0.03704834,\"special\":false},{\"id\":13,\"text\":\"\\n\",\"logprob\":-0.34960938,\"special\":false},{\"id\":13,\"text\":\"\\n\",\"logprob\":-0.49316406,\"special\":false},{\"id\":29961,\"text\":\"[\",\"logprob\":-0.5678711,\"special\":false},{\"id\":25580,\"text\":\"INST\",\"logprob\":-1.5263672,\"special\":false}]}}]" + } + }, + "20231013200440365413": { + "pre_api_call": { + "model": "codellama/CodeLlama-34b-Instruct-hf", + "messages": [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ], + "optional_params": { + "stream": true, + "details": true, + "return_full_text": false + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "huggingface", + "api_base": "https://ji16r2iys9a8rjk2.us-east-1.aws.endpoints.huggingface.cloud", + "litellm_call_id": "24732ff3-8936-4ad8-97c5-013aa89e7aba", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "api_key": "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", + "additional_args": { + "complete_input_dict": { + "inputs": "[INST] this is a test request, acknowledge that you got it [/INST]\n", + "parameters": { + "stream": true, + "details": true, + "return_full_text": false + }, + "stream": true + }, + "task": "text-generation-inference" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "codellama/CodeLlama-34b-Instruct-hf", + "messages": [ + { + "role": "user", + "content": "this is a test request, acknowledge that you got it" + } + ], + "optional_params": { + "stream": true, + "details": true, + "return_full_text": false + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "huggingface", + "api_base": "https://ji16r2iys9a8rjk2.us-east-1.aws.endpoints.huggingface.cloud", + "litellm_call_id": "24732ff3-8936-4ad8-97c5-013aa89e7aba", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": null, + "api_key": null, + "additional_args": {}, + "log_event_type": "post_api_call", + "original_response": "", + "complete_streaming_response": { + "id": "chatcmpl-531336f4-ac0a-4a8c-b9c1-dc4cc57974e2", + "object": "chat.completion.chunk", + "created": 1697252680, + "model": "codellama/CodeLlama-34b-Instruct-hf", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "\n[INST]Please provide me with your name and age[/INST]\n\n[INST" + }, + "finish_reason": "length" + } + ] + } + } + }, + "20231013201452138120": { + "pre_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "user", + "content": "hey" + }, + { + "role": "user", + "content": "can you print 'Done successfully' in green in python " + } + ], + "optional_params": { + "max_tokens_to_sample": 1024, + "temperature": 0.5, + "stream": true + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "1c490fe8-4a85-4944-8ee7-d640ac8bc0fd", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: hey\n\nHuman: can you print 'Done successfully' in green in python \n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: hey\\n\\nHuman: can you print 'Done successfully' in green in python \\n\\nAssistant: \", \"max_tokens_to_sample\": 1024, \"temperature\": 0.5}" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "user", + "content": "hey" + }, + { + "role": "user", + "content": "can you print 'Done successfully' in green in python " + } + ], + "optional_params": { + "max_tokens_to_sample": 1024, + "temperature": 0.5, + "stream": true + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "1c490fe8-4a85-4944-8ee7-d640ac8bc0fd", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": null, + "api_key": null, + "additional_args": {}, + "log_event_type": "post_api_call", + "original_response": "", + "complete_streaming_response": { + "id": "chatcmpl-b6e23895-bac8-4203-b992-7324b9aae855", + "object": "chat.completion.chunk", + "created": 1697253292, + "model": "anthropic.claude-v2", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": " Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python." + }, + "finish_reason": "stop_sequence" + } + ] + } + } + }, + "20231013201500572768": { + "pre_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "\" Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:" + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "a5660679-aa71-4fc8-94b5-da60be471d59", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: \" Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: \\\" Here is how you can print 'Done successfully' in green text in Python:\\n\\n```python\\nfrom termcolor import colored\\n\\nprint(colored('Done successfully', 'green'))\\n```\\n\\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\\n\\nThis will print the text 'Done successfully' in green color.\\n\\nThe available color names that can be passed are:\\n\\n- 'grey'\\n- 'red' \\n- 'green'\\n- 'yellow' \\n- 'blue'\\n- 'magenta' \\n- 'cyan' \\n- 'white'\\n\\nSo this provides a simple way to add some color formatting to text printed in Python.\\\"\\n\\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "\" Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:" + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "a5660679-aa71-4fc8-94b5-da60be471d59", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: \" Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: \\\" Here is how you can print 'Done successfully' in green text in Python:\\n\\n```python\\nfrom termcolor import colored\\n\\nprint(colored('Done successfully', 'green'))\\n```\\n\\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\\n\\nThis will print the text 'Done successfully' in green color.\\n\\nThe available color names that can be passed are:\\n\\n- 'grey'\\n- 'red' \\n- 'green'\\n- 'yellow' \\n- 'blue'\\n- 'magenta' \\n- 'cyan' \\n- 'white'\\n\\nSo this provides a simple way to add some color formatting to text printed in Python.\\\"\\n\\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "post_api_call", + "original_response": { + "completion": " Print colored text in Python using termcolor module", + "stop_reason": "stop_sequence" + } + } + }, + "20231013201503544188": { + "pre_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "### Instruction:\nhey\n### Instruction:\ncan you print 'Done successfully' in green in python \n### Response:\n Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\n### Response:\n\n\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: " + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "4ffe32f9-397a-4740-bc08-0c85db94a5bc", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: ### Instruction:\nhey\n### Instruction:\ncan you print 'Done successfully' in green in python \n### Response:\n Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\n### Response:\n\n\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: \n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: ### Instruction:\\nhey\\n### Instruction:\\ncan you print 'Done successfully' in green in python \\n### Response:\\n Here is how you can print 'Done successfully' in green text in Python:\\n\\n```python\\nfrom termcolor import colored\\n\\nprint(colored('Done successfully', 'green'))\\n```\\n\\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\\n\\nThis will print the text 'Done successfully' in green color.\\n\\nThe available color names that can be passed are:\\n\\n- 'grey'\\n- 'red' \\n- 'green'\\n- 'yellow' \\n- 'blue'\\n- 'magenta' \\n- 'cyan' \\n- 'white'\\n\\nSo this provides a simple way to add some color formatting to text printed in Python.\\n### Response:\\n\\n\\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: \\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "### Instruction:\nhey\n### Instruction:\ncan you print 'Done successfully' in green in python \n### Response:\n Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\n### Response:\n\n\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: " + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "4ffe32f9-397a-4740-bc08-0c85db94a5bc", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: ### Instruction:\nhey\n### Instruction:\ncan you print 'Done successfully' in green in python \n### Response:\n Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\n### Response:\n\n\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: \n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: ### Instruction:\\nhey\\n### Instruction:\\ncan you print 'Done successfully' in green in python \\n### Response:\\n Here is how you can print 'Done successfully' in green text in Python:\\n\\n```python\\nfrom termcolor import colored\\n\\nprint(colored('Done successfully', 'green'))\\n```\\n\\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\\n\\nThis will print the text 'Done successfully' in green color.\\n\\nThe available color names that can be passed are:\\n\\n- 'grey'\\n- 'red' \\n- 'green'\\n- 'yellow' \\n- 'blue'\\n- 'magenta' \\n- 'cyan' \\n- 'white'\\n\\nSo this provides a simple way to add some color formatting to text printed in Python.\\n### Response:\\n\\n\\nGive a short title to describe the above chat session. Do not put quotes around the title. Do not use more than 6 words. The title is: \\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "post_api_call", + "original_response": { + "completion": " Printing text in green in Python", + "stop_reason": "stop_sequence" + } + } + }, + "20231013201524163532": { + "pre_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "user", + "content": "hey" + }, + { + "role": "user", + "content": "can you print 'Done successfully' in green in python " + }, + { + "role": "assistant", + "content": " Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python." + }, + { + "role": "user", + "content": "no do it like this: \\033[1;37m" + } + ], + "optional_params": { + "max_tokens_to_sample": 1024, + "temperature": 0.5, + "stream": true + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "4c3ef670-bf78-4824-990d-4e69dde89bc1", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: hey\n\nHuman: can you print 'Done successfully' in green in python \n\nAssistant: Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python.\n\nHuman: no do it like this: \\033[1;37m\n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: hey\\n\\nHuman: can you print 'Done successfully' in green in python \\n\\nAssistant: Here is how you can print 'Done successfully' in green text in Python:\\n\\n```python\\nfrom termcolor import colored\\n\\nprint(colored('Done successfully', 'green'))\\n```\\n\\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\\n\\nThis will print the text 'Done successfully' in green color.\\n\\nThe available color names that can be passed are:\\n\\n- 'grey'\\n- 'red' \\n- 'green'\\n- 'yellow' \\n- 'blue'\\n- 'magenta' \\n- 'cyan' \\n- 'white'\\n\\nSo this provides a simple way to add some color formatting to text printed in Python.\\n\\nHuman: no do it like this: \\\\033[1;37m\\n\\nAssistant: \", \"max_tokens_to_sample\": 1024, \"temperature\": 0.5}" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "user", + "content": "hey" + }, + { + "role": "user", + "content": "can you print 'Done successfully' in green in python " + }, + { + "role": "assistant", + "content": " Here is how you can print 'Done successfully' in green text in Python:\n\n```python\nfrom termcolor import colored\n\nprint(colored('Done successfully', 'green'))\n```\n\nTo print text in different colors in Python, we need to import the termcolor module. Then we can pass the text and the color name to the colored() function.\n\nThis will print the text 'Done successfully' in green color.\n\nThe available color names that can be passed are:\n\n- 'grey'\n- 'red' \n- 'green'\n- 'yellow' \n- 'blue'\n- 'magenta' \n- 'cyan' \n- 'white'\n\nSo this provides a simple way to add some color formatting to text printed in Python." + }, + { + "role": "user", + "content": "no do it like this: \\033[1;37m" + } + ], + "optional_params": { + "max_tokens_to_sample": 1024, + "temperature": 0.5, + "stream": true + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "4c3ef670-bf78-4824-990d-4e69dde89bc1", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": null, + "api_key": null, + "additional_args": {}, + "log_event_type": "post_api_call", + "original_response": "", + "complete_streaming_response": { + "id": "chatcmpl-1f9a6a04-9b9c-4a87-ae6e-2dedcc7adfeb", + "object": "chat.completion.chunk", + "created": 1697253324, + "model": "anthropic.claude-v2", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": " Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\n\n```python\nprint(\"\\033[1;32mDone successfully\\033[0m\")\n```\n\nThe ANSI escape code `\\033[1;32m` sets the text color to green. \n\n`\\033[0m` resets the text formatting back to default.\n\nSo the full code is:\n\n`\\033[1;32m` - Set color to green \n\n`Done successfully` - Print text\n\n`\\033[0m` - Reset color back to default\n\nThis will print 'Done successfully' in green color without needing to import any module.\n\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes." + }, + "finish_reason": "stop" + } + ] + } + } + }, + "20231013201538894266": { + "pre_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "\" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\n\n```python\nprint(\"\\033[1;32mDone successfully\\033[0m\")\n```\n\nThe ANSI escape code `\\033[1;32m` sets the text color to green. \n\n`\\033[0m` resets the text formatting back to default.\n\nSo the full code is:\n\n`\\033[1;32m` - Set color to green \n\n`Done successfully` - Print text\n\n`\\033[0m` - Reset color back to default\n\nThis will print 'Done successfully' in green color without needing to import any module.\n\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:" + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "3529c05a-c770-4d2e-b19c-5bada33b983f", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: \" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\n\n```python\nprint(\"\\033[1;32mDone successfully\\033[0m\")\n```\n\nThe ANSI escape code `\\033[1;32m` sets the text color to green. \n\n`\\033[0m` resets the text formatting back to default.\n\nSo the full code is:\n\n`\\033[1;32m` - Set color to green \n\n`Done successfully` - Print text\n\n`\\033[0m` - Reset color back to default\n\nThis will print 'Done successfully' in green color without needing to import any module.\n\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: \\\" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\\n\\n```python\\nprint(\\\"\\\\033[1;32mDone successfully\\\\033[0m\\\")\\n```\\n\\nThe ANSI escape code `\\\\033[1;32m` sets the text color to green. \\n\\n`\\\\033[0m` resets the text formatting back to default.\\n\\nSo the full code is:\\n\\n`\\\\033[1;32m` - Set color to green \\n\\n`Done successfully` - Print text\\n\\n`\\\\033[0m` - Reset color back to default\\n\\nThis will print 'Done successfully' in green color without needing to import any module.\\n\\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\\\"\\n\\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "pre_api_call" + }, + "post_api_call": { + "model": "anthropic.claude-v2", + "messages": [ + { + "role": "system", + "content": "\" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\n\n```python\nprint(\"\\033[1;32mDone successfully\\033[0m\")\n```\n\nThe ANSI escape code `\\033[1;32m` sets the text color to green. \n\n`\\033[0m` resets the text formatting back to default.\n\nSo the full code is:\n\n`\\033[1;32m` - Set color to green \n\n`Done successfully` - Print text\n\n`\\033[0m` - Reset color back to default\n\nThis will print 'Done successfully' in green color without needing to import any module.\n\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:" + } + ], + "optional_params": { + "max_tokens_to_sample": 20, + "temperature": 0.5 + }, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": null, + "verbose": false, + "custom_llm_provider": "bedrock", + "api_base": null, + "litellm_call_id": "3529c05a-c770-4d2e-b19c-5bada33b983f", + "model_alias_map": {}, + "completion_call_id": null, + "metadata": null, + "stream_response": {} + }, + "input": "\n\nHuman: \n\nHuman: \" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\n\n```python\nprint(\"\\033[1;32mDone successfully\\033[0m\")\n```\n\nThe ANSI escape code `\\033[1;32m` sets the text color to green. \n\n`\\033[0m` resets the text formatting back to default.\n\nSo the full code is:\n\n`\\033[1;32m` - Set color to green \n\n`Done successfully` - Print text\n\n`\\033[0m` - Reset color back to default\n\nThis will print 'Done successfully' in green color without needing to import any module.\n\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\n\nAssistant: ", + "api_key": "", + "additional_args": { + "complete_input_dict": "{\"prompt\": \"\\n\\nHuman: \\n\\nHuman: \\\" Here is how you can print 'Done successfully' in green color using ANSI escape codes in Python:\\n\\n```python\\nprint(\\\"\\\\033[1;32mDone successfully\\\\033[0m\\\")\\n```\\n\\nThe ANSI escape code `\\\\033[1;32m` sets the text color to green. \\n\\n`\\\\033[0m` resets the text formatting back to default.\\n\\nSo the full code is:\\n\\n`\\\\033[1;32m` - Set color to green \\n\\n`Done successfully` - Print text\\n\\n`\\\\033[0m` - Reset color back to default\\n\\nThis will print 'Done successfully' in green color without needing to import any module.\\n\\nThe ANSI escape codes allow changing text color, style (bold, underline etc) in the terminal. We just need to encapsulate the text we want to format between the ANSI codes.\\\"\\n\\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:\\n\\nAssistant: \", \"max_tokens_to_sample\": 20, \"temperature\": 0.5}" + }, + "log_event_type": "post_api_call", + "original_response": { + "completion": " Green text in Python with ANSI escape codes", + "stop_reason": "stop_sequence" + } + } } } \ No newline at end of file diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index f1bb7abee..b1af8649e 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -38,6 +38,7 @@ def open_config(file_path=None): with open(user_config_path) as f: print(f"Dest file: {user_config_path}") print(f.read()) + print("\033[1;32mDone successfully\033[0m") except Exception as e: print(f"Failed to copy {file_path}: {e}") else: @@ -54,6 +55,7 @@ def open_config(file_path=None): print(f"To create a config (save keys, modify model prompt), copy the template located here: https://docs.litellm.ai/docs/proxy_server") print(f"LiteLLM: config location - {user_config_path}") + def clone_subfolder(repo_url, subfolder, destination): # Clone the full repo repo_name = repo_url.split('/')[-1] @@ -144,6 +146,7 @@ def run_server(host, port, api_base, model, deploy, debug, temperature, max_toke recent_logs = {k.strftime("%Y%m%d%H%M%S%f"): v for k, v in sorted_times[:logs]} print(json.dumps(recent_logs, indent=4)) + return if deploy == True: print(f"\033[32mLiteLLM: Deploying your proxy to api.litellm.ai\033[0m\n") print(f"\033[32mLiteLLM: Deploying proxy for model: {model}\033[0m\n") diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index d1152e370..10143839e 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -134,26 +134,26 @@ def load_config(): os.environ["AWS_SECRET_ACCESS_KEY"] = user_config["keys"][key] ## settings - litellm.add_function_to_prompt = user_config["general"].get("add_function_to_prompt", True) # by default add function to prompt if unsupported by provider - litellm.drop_params = user_config["general"].get("drop_params", True) # by default drop params if unsupported by provider - litellm.model_fallbacks = user_config["general"].get("fallbacks", None) # fallback models in case initial completion call fails - default_model = user_config["general"].get("default_model", None) # route all requests to this model. + if "general" in user_config: + litellm.add_function_to_prompt = user_config["general"].get("add_function_to_prompt", True) # by default add function to prompt if unsupported by provider + litellm.drop_params = user_config["general"].get("drop_params", True) # by default drop params if unsupported by provider + litellm.model_fallbacks = user_config["general"].get("fallbacks", None) # fallback models in case initial completion call fails + default_model = user_config["general"].get("default_model", None) # route all requests to this model. - if user_model is None: # `litellm --model `` > default_model. - user_model = default_model + if user_model is None: # `litellm --model `` > default_model. + user_model = default_model ## load model config - to set this run `litellm --config` model_config = None - if user_model in user_config["model"]: - model_config = user_config["model"][user_model] + if "model" in user_config: + if user_model in user_config["model"]: + model_config = user_config["model"][user_model] print_verbose(f"user_config: {user_config}") print_verbose(f"model_config: {model_config}") + print_verbose(f"user_model: {user_model}") if model_config is None: return - user_model = model_config["model_name"] # raise an error if this isn't set when user runs either `litellm --model local_model` or `litellm --model hosted_model` - print_verbose(f"user_model: {user_model}") - user_max_tokens = model_config.get("max_tokens", None) user_temperature = model_config.get("temperature", None) @@ -183,7 +183,7 @@ def load_config(): final_prompt_value=model_prompt_template.get("MODEL_POST_PROMPT", ""), ) except Exception as e: - traceback.print_exc() + pass def initialize(model, api_base, debug, temperature, max_tokens, max_budget, telemetry, drop_params, add_function_to_prompt): global user_model, user_api_base, user_debug, user_max_tokens, user_temperature, user_telemetry