ollama remote adapter works

This commit is contained in:
Ashwin Bharambe 2024-08-28 06:51:07 -07:00
parent 2076d2b6db
commit 2a1552a5eb
14 changed files with 196 additions and 128 deletions

View file

@ -4,4 +4,4 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from .ollama import get_adapter_impl # noqa
from .ollama import get_provider_impl # noqa

View file

@ -4,7 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import AsyncGenerator
from typing import Any, AsyncGenerator
import httpx
@ -36,7 +36,7 @@ OLLAMA_SUPPORTED_SKUS = {
}
async def get_adapter_impl(config: RemoteProviderConfig) -> Inference:
async def get_provider_impl(config: RemoteProviderConfig, _deps: Any) -> Inference:
impl = OllamaInferenceAdapter(config.url)
await impl.initialize()
return impl

View file

@ -26,7 +26,7 @@ from .api import (
from .event_logger import EventLogger
async def get_adapter_impl(config: RemoteProviderConfig) -> Inference:
async def get_provider_impl(config: RemoteProviderConfig) -> Inference:
return InferenceClient(config.url)

View file

@ -27,11 +27,11 @@ def available_inference_providers() -> List[ProviderSpec]:
module="llama_toolchain.inference.meta_reference",
config_class="llama_toolchain.inference.meta_reference.MetaReferenceImplConfig",
),
remote_provider_spec(
adapter_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="ollama",
pip_packages=[],
pip_packages=["ollama"],
module="llama_toolchain.inference.adapters.ollama",
),
),