diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html
index dcb3ef945..c35da8d55 100644
--- a/docs/_static/llama-stack-spec.html
+++ b/docs/_static/llama-stack-spec.html
@@ -4195,7 +4195,26 @@
"additional_params": {
"type": "object",
"additionalProperties": {
- "type": "string"
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
}
}
},
diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml
index c0a704230..591a296d0 100644
--- a/docs/_static/llama-stack-spec.yaml
+++ b/docs/_static/llama-stack-spec.yaml
@@ -2910,7 +2910,13 @@ components:
additional_params:
type: object
additionalProperties:
- type: string
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
additionalProperties: false
required:
- strategy
diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py
index cb72c3b76..b91169fbb 100644
--- a/llama_stack/apis/inference/inference.py
+++ b/llama_stack/apis/inference/inference.py
@@ -82,7 +82,7 @@ class SamplingParams(BaseModel):
max_tokens: Optional[int] = 0
repetition_penalty: Optional[float] = 1.0
stop: Optional[List[str]] = None
- additional_params: Optional[Dict[str, str]] = {}
+ additional_params: Optional[Dict[str, Any]] = {}
class LogProbConfig(BaseModel):