mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
feat(utils.py): Add github as a provider
Closes https://github.com/BerriAI/litellm/issues/4922#issuecomment-2266564469
This commit is contained in:
parent
c88e298315
commit
acbc2917b8
3 changed files with 36 additions and 0 deletions
|
@ -458,6 +458,7 @@ openai_compatible_providers: List = [
|
|||
"empower",
|
||||
"friendliai",
|
||||
"azure_ai",
|
||||
"github",
|
||||
]
|
||||
|
||||
|
||||
|
@ -698,6 +699,7 @@ provider_list: List = [
|
|||
"predibase",
|
||||
"databricks",
|
||||
"empower",
|
||||
"github",
|
||||
"custom", # custom apis
|
||||
]
|
||||
|
||||
|
|
|
@ -344,6 +344,36 @@ def test_completion_empower():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_github_api():
|
||||
litellm.set_verbose = True
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "\nWhat is the query for `console.log` => `console.error`\n",
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "\nThis is the GritQL query for the given before/after examples:\n<gritql>\n`console.log` => `console.error`\n</gritql>\n",
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "\nWhat is the query for `console.info` => `consdole.heaven`\n",
|
||||
},
|
||||
]
|
||||
try:
|
||||
# test without max tokens
|
||||
response = completion(
|
||||
model="github/gpt-4o",
|
||||
messages=messages,
|
||||
)
|
||||
# Add any assertions, here to check response args
|
||||
print(response)
|
||||
except litellm.AuthenticationError:
|
||||
pass
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_claude_3_empty_response():
|
||||
litellm.set_verbose = True
|
||||
|
||||
|
|
|
@ -4513,6 +4513,10 @@ def get_llm_provider(
|
|||
elif custom_llm_provider == "azure_ai":
|
||||
api_base = api_base or get_secret("AZURE_AI_API_BASE") # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("AZURE_AI_API_KEY")
|
||||
elif custom_llm_provider == "github":
|
||||
api_base = api_base or get_secret("GITHUB_API_BASE") or "https://models.inference.ai.azure.com" # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("GITHUB_API_KEY")
|
||||
|
||||
elif custom_llm_provider == "mistral":
|
||||
# mistral is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.mistral.ai
|
||||
api_base = (
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue