mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
Lint check in main branch is failing. This fixes the lint check after we moved to ruff in https://github.com/meta-llama/llama-stack/pull/921. We need to move to a `ruff.toml` file as well as fixing and ignoring some additional checks. Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
20 lines
542 B
Python
20 lines
542 B
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
from typing import Any, Dict
|
|
|
|
from pydantic import BaseModel
|
|
|
|
|
|
DEFAULT_OLLAMA_URL = "http://localhost:11434"
|
|
|
|
|
|
class OllamaImplConfig(BaseModel):
|
|
url: str = DEFAULT_OLLAMA_URL
|
|
|
|
@classmethod
|
|
def sample_run_config(cls, url: str = "${env.OLLAMA_URL:http://localhost:11434}", **kwargs) -> Dict[str, Any]:
|
|
return {"url": url}
|