mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 45s
* refactor(fireworks_ai/): inherit from openai like base config refactors fireworks ai to use a common config * test: fix import in test * refactor(watsonx/): refactor watsonx to use llm base config refactors chat + completion routes to base config path * fix: fix linting error * refactor: inherit base llm config for oai compatible routes * test: fix test * test: fix test
24 lines
558 B
Python
24 lines
558 B
Python
"""
|
|
Translate from OpenAI's `/v1/chat/completions` to Github's `/v1/chat/completions`
|
|
"""
|
|
|
|
import json
|
|
import types
|
|
from typing import List, Optional, Tuple, Union
|
|
|
|
from pydantic import BaseModel
|
|
|
|
import litellm
|
|
from litellm.secret_managers.main import get_secret_str
|
|
from litellm.types.llms.openai import (
|
|
AllMessageValues,
|
|
ChatCompletionAssistantMessage,
|
|
ChatCompletionToolParam,
|
|
ChatCompletionToolParamFunctionChunk,
|
|
)
|
|
|
|
from ...openai_like.chat.handler import OpenAILikeChatConfig
|
|
|
|
|
|
class GithubChatConfig(OpenAILikeChatConfig):
|
|
pass
|