diff --git a/litellm/__init__.py b/litellm/__init__.py index b0099202e4..71a8d2a7d1 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -164,9 +164,16 @@ ai21_models = ["j2-ultra", "j2-mid", "j2-light"] nlp_cloud_models = ["dolphin", "chatdolphin"] together_ai_models = [ + # llama llms "togethercomputer/llama-2-70b-chat", + "togethercomputer/llama-2-70b", + "togethercomputer/LLaMA-2-7B-32K", "togethercomputer/Llama-2-7B-32K-Instruct", "togethercomputer/llama-2-7b", + "togethercomputer/CodeLlama-34b", + "togethercomputer/CodeLlama-34b-Instruct", + "togethercomputer/CodeLlama-34b-Python", + ] # supports all together ai models, just pass in the model id e.g. completion(model="together_computer/replit_code_3b",...) aleph_alpha_models = [ diff --git a/litellm/utils.py b/litellm/utils.py index b093a1b4a6..7499092ea3 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -880,7 +880,7 @@ def get_optional_params( # use the openai defaults if max_tokens != float("inf"): optional_params["max_tokens"] = max_tokens if frequency_penalty != 0: - optional_params["frequency_penalty"] = frequency_penalty # should be repetition penalty + optional_params["frequency_penalty"] = frequency_penalty # TODO: Check if should be repetition penalty if stop != None: optional_params["stop"] = stop #TG AI expects a list, example ["\n\n\n\n","<|endoftext|>"] elif (