forked from phoenix-oss/llama-stack-mirror
update python sdk reference (#866)
# What does this PR do? syncs changes from https://github.com/stainless-sdks/llama-stack-python/blob/main/api.md
This commit is contained in:
parent
c570a708bf
commit
ebffa15f40
1 changed files with 290 additions and 184 deletions
|
@ -4,29 +4,77 @@
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import (
|
from llama_stack_client.types import (
|
||||||
Attachment,
|
AgentConfig,
|
||||||
BatchCompletion,
|
BatchCompletion,
|
||||||
CompletionMessage,
|
CompletionMessage,
|
||||||
|
ContentDelta,
|
||||||
|
Document,
|
||||||
|
InterleavedContent,
|
||||||
|
InterleavedContentItem,
|
||||||
|
Message,
|
||||||
|
ParamType,
|
||||||
|
QueryConfig,
|
||||||
|
QueryResult,
|
||||||
|
ReturnType,
|
||||||
|
SafetyViolation,
|
||||||
SamplingParams,
|
SamplingParams,
|
||||||
|
ScoringResult,
|
||||||
SystemMessage,
|
SystemMessage,
|
||||||
ToolCall,
|
ToolCall,
|
||||||
|
ToolParamDefinition,
|
||||||
ToolResponseMessage,
|
ToolResponseMessage,
|
||||||
|
URL,
|
||||||
UserMessage,
|
UserMessage,
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Telemetry
|
## Toolgroups
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import TelemetryGetTraceResponse
|
from llama_stack_client.types import ListToolGroupsResponse, ToolGroup, ToolgroupListResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /telemetry/get_trace">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">get_trace</a>(\*\*<a href="src/llama_stack_client/types/telemetry_get_trace_params.py">params</a>) -> <a href="./src/llama_stack_client/types/telemetry_get_trace_response.py">TelemetryGetTraceResponse</a></code>
|
- <code title="get /v1/toolgroups">client.toolgroups.<a href="./src/llama_stack_client/resources/toolgroups.py">list</a>() -> <a href="./src/llama_stack_client/types/toolgroup_list_response.py">ToolgroupListResponse</a></code>
|
||||||
- <code title="post /telemetry/log_event">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">log</a>(\*\*<a href="src/llama_stack_client/types/telemetry_log_params.py">params</a>) -> None</code>
|
- <code title="get /v1/toolgroups/{toolgroup_id}">client.toolgroups.<a href="./src/llama_stack_client/resources/toolgroups.py">get</a>(toolgroup_id) -> <a href="./src/llama_stack_client/types/tool_group.py">ToolGroup</a></code>
|
||||||
|
- <code title="post /v1/toolgroups">client.toolgroups.<a href="./src/llama_stack_client/resources/toolgroups.py">register</a>(\*\*<a href="src/llama_stack_client/types/toolgroup_register_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="delete /v1/toolgroups/{toolgroup_id}">client.toolgroups.<a href="./src/llama_stack_client/resources/toolgroups.py">unregister</a>(toolgroup_id) -> None</code>
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import ListToolsResponse, Tool, ToolListResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/tools">client.tools.<a href="./src/llama_stack_client/resources/tools.py">list</a>(\*\*<a href="src/llama_stack_client/types/tool_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/tool_list_response.py">ToolListResponse</a></code>
|
||||||
|
- <code title="get /v1/tools/{tool_name}">client.tools.<a href="./src/llama_stack_client/resources/tools.py">get</a>(tool_name) -> <a href="./src/llama_stack_client/types/tool.py">Tool</a></code>
|
||||||
|
|
||||||
|
## ToolRuntime
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import ToolDef, ToolInvocationResult
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/tool-runtime/invoke">client.tool_runtime.<a href="./src/llama_stack_client/resources/tool_runtime/tool_runtime.py">invoke_tool</a>(\*\*<a href="src/llama_stack_client/types/tool_runtime_invoke_tool_params.py">params</a>) -> <a href="./src/llama_stack_client/types/tool_invocation_result.py">ToolInvocationResult</a></code>
|
||||||
|
- <code title="get /v1/tool-runtime/list-tools">client.tool_runtime.<a href="./src/llama_stack_client/resources/tool_runtime/tool_runtime.py">list_tools</a>(\*\*<a href="src/llama_stack_client/types/tool_runtime_list_tools_params.py">params</a>) -> <a href="./src/llama_stack_client/types/tool_def.py">JSONLDecoder[ToolDef]</a></code>
|
||||||
|
|
||||||
|
### RagTool
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/tool-runtime/rag-tool/insert">client.tool_runtime.rag_tool.<a href="./src/llama_stack_client/resources/tool_runtime/rag_tool.py">insert</a>(\*\*<a href="src/llama_stack_client/types/tool_runtime/rag_tool_insert_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="post /v1/tool-runtime/rag-tool/query">client.tool_runtime.rag_tool.<a href="./src/llama_stack_client/resources/tool_runtime/rag_tool.py">query</a>(\*\*<a href="src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py">params</a>) -> <a href="./src/llama_stack_client/types/shared/query_result.py">QueryResult</a></code>
|
||||||
|
|
||||||
## Agents
|
## Agents
|
||||||
|
|
||||||
|
@ -36,20 +84,19 @@ Types:
|
||||||
from llama_stack_client.types import (
|
from llama_stack_client.types import (
|
||||||
InferenceStep,
|
InferenceStep,
|
||||||
MemoryRetrievalStep,
|
MemoryRetrievalStep,
|
||||||
RestAPIExecutionConfig,
|
|
||||||
ShieldCallStep,
|
ShieldCallStep,
|
||||||
ToolExecutionStep,
|
ToolExecutionStep,
|
||||||
ToolParamDefinition,
|
ToolResponse,
|
||||||
AgentCreateResponse,
|
AgentCreateResponse,
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /agents/create">client.agents.<a href="./src/llama_stack_client/resources/agents/agents.py">create</a>(\*\*<a href="src/llama_stack_client/types/agent_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agent_create_response.py">AgentCreateResponse</a></code>
|
- <code title="post /v1/agents">client.agents.<a href="./src/llama_stack_client/resources/agents/agents.py">create</a>(\*\*<a href="src/llama_stack_client/types/agent_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agent_create_response.py">AgentCreateResponse</a></code>
|
||||||
- <code title="post /agents/delete">client.agents.<a href="./src/llama_stack_client/resources/agents/agents.py">delete</a>(\*\*<a href="src/llama_stack_client/types/agent_delete_params.py">params</a>) -> None</code>
|
- <code title="delete /v1/agents/{agent_id}">client.agents.<a href="./src/llama_stack_client/resources/agents/agents.py">delete</a>(agent_id) -> None</code>
|
||||||
|
|
||||||
### Sessions
|
### Session
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
|
@ -59,104 +106,106 @@ from llama_stack_client.types.agents import Session, SessionCreateResponse
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /agents/session/create">client.agents.sessions.<a href="./src/llama_stack_client/resources/agents/sessions.py">create</a>(\*\*<a href="src/llama_stack_client/types/agents/session_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/session_create_response.py">SessionCreateResponse</a></code>
|
- <code title="post /v1/agents/{agent_id}/session">client.agents.session.<a href="./src/llama_stack_client/resources/agents/session.py">create</a>(agent_id, \*\*<a href="src/llama_stack_client/types/agents/session_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/session_create_response.py">SessionCreateResponse</a></code>
|
||||||
- <code title="post /agents/session/get">client.agents.sessions.<a href="./src/llama_stack_client/resources/agents/sessions.py">retrieve</a>(\*\*<a href="src/llama_stack_client/types/agents/session_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/session.py">Session</a></code>
|
- <code title="get /v1/agents/{agent_id}/session/{session_id}">client.agents.session.<a href="./src/llama_stack_client/resources/agents/session.py">retrieve</a>(session_id, \*, agent_id, \*\*<a href="src/llama_stack_client/types/agents/session_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/session.py">Session</a></code>
|
||||||
- <code title="post /agents/session/delete">client.agents.sessions.<a href="./src/llama_stack_client/resources/agents/sessions.py">delete</a>(\*\*<a href="src/llama_stack_client/types/agents/session_delete_params.py">params</a>) -> None</code>
|
- <code title="delete /v1/agents/{agent_id}/session/{session_id}">client.agents.session.<a href="./src/llama_stack_client/resources/agents/session.py">delete</a>(session_id, \*, agent_id) -> None</code>
|
||||||
|
|
||||||
### Steps
|
### Steps
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types.agents import AgentsStep
|
from llama_stack_client.types.agents import StepRetrieveResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /agents/step/get">client.agents.steps.<a href="./src/llama_stack_client/resources/agents/steps.py">retrieve</a>(\*\*<a href="src/llama_stack_client/types/agents/step_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/agents_step.py">AgentsStep</a></code>
|
- <code title="get /v1/agents/{agent_id}/session/{session_id}/turn/{turn_id}/step/{step_id}">client.agents.steps.<a href="./src/llama_stack_client/resources/agents/steps.py">retrieve</a>(step_id, \*, agent_id, session_id, turn_id) -> <a href="./src/llama_stack_client/types/agents/step_retrieve_response.py">StepRetrieveResponse</a></code>
|
||||||
|
|
||||||
### Turns
|
### Turn
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types.agents import AgentsTurnStreamChunk, Turn, TurnStreamEvent
|
from llama_stack_client.types.agents import Turn, TurnCreateResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /agents/turn/create">client.agents.turns.<a href="./src/llama_stack_client/resources/agents/turns.py">create</a>(\*\*<a href="src/llama_stack_client/types/agents/turn_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/agents_turn_stream_chunk.py">AgentsTurnStreamChunk</a></code>
|
- <code title="post /v1/agents/{agent_id}/session/{session_id}/turn">client.agents.turn.<a href="./src/llama_stack_client/resources/agents/turn.py">create</a>(session_id, \*, agent_id, \*\*<a href="src/llama_stack_client/types/agents/turn_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/turn_create_response.py">TurnCreateResponse</a></code>
|
||||||
- <code title="get /agents/turn/get">client.agents.turns.<a href="./src/llama_stack_client/resources/agents/turns.py">retrieve</a>(\*\*<a href="src/llama_stack_client/types/agents/turn_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/agents/turn.py">Turn</a></code>
|
- <code title="get /v1/agents/{agent_id}/session/{session_id}/turn/{turn_id}">client.agents.turn.<a href="./src/llama_stack_client/resources/agents/turn.py">retrieve</a>(turn_id, \*, agent_id, session_id) -> <a href="./src/llama_stack_client/types/agents/turn.py">Turn</a></code>
|
||||||
|
|
||||||
|
## BatchInference
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import BatchInferenceChatCompletionResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/batch-inference/chat-completion">client.batch_inference.<a href="./src/llama_stack_client/resources/batch_inference.py">chat_completion</a>(\*\*<a href="src/llama_stack_client/types/batch_inference_chat_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/batch_inference_chat_completion_response.py">BatchInferenceChatCompletionResponse</a></code>
|
||||||
|
- <code title="post /v1/batch-inference/completion">client.batch_inference.<a href="./src/llama_stack_client/resources/batch_inference.py">completion</a>(\*\*<a href="src/llama_stack_client/types/batch_inference_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/shared/batch_completion.py">BatchCompletion</a></code>
|
||||||
|
|
||||||
## Datasets
|
## Datasets
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import TrainEvalDataset
|
from llama_stack_client.types import (
|
||||||
|
ListDatasetsResponse,
|
||||||
|
DatasetRetrieveResponse,
|
||||||
|
DatasetListResponse,
|
||||||
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /datasets/create">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">create</a>(\*\*<a href="src/llama_stack_client/types/dataset_create_params.py">params</a>) -> None</code>
|
- <code title="get /v1/datasets/{dataset_id}">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">retrieve</a>(dataset_id) -> <a href="./src/llama_stack_client/types/dataset_retrieve_response.py">Optional[DatasetRetrieveResponse]</a></code>
|
||||||
- <code title="post /datasets/delete">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">delete</a>(\*\*<a href="src/llama_stack_client/types/dataset_delete_params.py">params</a>) -> None</code>
|
- <code title="get /v1/datasets">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">list</a>() -> <a href="./src/llama_stack_client/types/dataset_list_response.py">DatasetListResponse</a></code>
|
||||||
- <code title="get /datasets/get">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">get</a>(\*\*<a href="src/llama_stack_client/types/dataset_get_params.py">params</a>) -> <a href="./src/llama_stack_client/types/train_eval_dataset.py">TrainEvalDataset</a></code>
|
- <code title="post /v1/datasets">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">register</a>(\*\*<a href="src/llama_stack_client/types/dataset_register_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="delete /v1/datasets/{dataset_id}">client.datasets.<a href="./src/llama_stack_client/resources/datasets.py">unregister</a>(dataset_id) -> None</code>
|
||||||
|
|
||||||
## Evaluate
|
## Eval
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import EvaluationJob
|
from llama_stack_client.types import EvaluateResponse, Job
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/eval/tasks/{task_id}/evaluations">client.eval.<a href="./src/llama_stack_client/resources/eval/eval.py">evaluate_rows</a>(task_id, \*\*<a href="src/llama_stack_client/types/eval_evaluate_rows_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluate_response.py">EvaluateResponse</a></code>
|
||||||
|
- <code title="post /v1/eval/tasks/{task_id}/jobs">client.eval.<a href="./src/llama_stack_client/resources/eval/eval.py">run_eval</a>(task_id, \*\*<a href="src/llama_stack_client/types/eval_run_eval_params.py">params</a>) -> <a href="./src/llama_stack_client/types/job.py">Job</a></code>
|
||||||
|
|
||||||
### Jobs
|
### Jobs
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types.evaluate import (
|
from llama_stack_client.types.eval import JobStatusResponse
|
||||||
EvaluationJobArtifacts,
|
|
||||||
EvaluationJobLogStream,
|
|
||||||
EvaluationJobStatus,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /evaluate/jobs">client.evaluate.jobs.<a href="./src/llama_stack_client/resources/evaluate/jobs/jobs.py">list</a>() -> <a href="./src/llama_stack_client/types/evaluation_job.py">EvaluationJob</a></code>
|
- <code title="get /v1/eval/tasks/{task_id}/jobs/{job_id}/result">client.eval.jobs.<a href="./src/llama_stack_client/resources/eval/jobs.py">retrieve</a>(job_id, \*, task_id) -> <a href="./src/llama_stack_client/types/evaluate_response.py">EvaluateResponse</a></code>
|
||||||
- <code title="post /evaluate/job/cancel">client.evaluate.jobs.<a href="./src/llama_stack_client/resources/evaluate/jobs/jobs.py">cancel</a>(\*\*<a href="src/llama_stack_client/types/evaluate/job_cancel_params.py">params</a>) -> None</code>
|
- <code title="delete /v1/eval/tasks/{task_id}/jobs/{job_id}">client.eval.jobs.<a href="./src/llama_stack_client/resources/eval/jobs.py">cancel</a>(job_id, \*, task_id) -> None</code>
|
||||||
|
- <code title="get /v1/eval/tasks/{task_id}/jobs/{job_id}">client.eval.jobs.<a href="./src/llama_stack_client/resources/eval/jobs.py">status</a>(job_id, \*, task_id) -> Optional[JobStatusResponse]</code>
|
||||||
|
|
||||||
#### Artifacts
|
## Inspect
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import HealthInfo, ProviderInfo, RouteInfo, VersionInfo
|
||||||
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /evaluate/job/artifacts">client.evaluate.jobs.artifacts.<a href="./src/llama_stack_client/resources/evaluate/jobs/artifacts.py">list</a>(\*\*<a href="src/llama_stack_client/types/evaluate/jobs/artifact_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluate/evaluation_job_artifacts.py">EvaluationJobArtifacts</a></code>
|
- <code title="get /v1/health">client.inspect.<a href="./src/llama_stack_client/resources/inspect.py">health</a>() -> <a href="./src/llama_stack_client/types/health_info.py">HealthInfo</a></code>
|
||||||
|
- <code title="get /v1/version">client.inspect.<a href="./src/llama_stack_client/resources/inspect.py">version</a>() -> <a href="./src/llama_stack_client/types/version_info.py">VersionInfo</a></code>
|
||||||
#### Logs
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="get /evaluate/job/logs">client.evaluate.jobs.logs.<a href="./src/llama_stack_client/resources/evaluate/jobs/logs.py">list</a>(\*\*<a href="src/llama_stack_client/types/evaluate/jobs/log_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluate/evaluation_job_log_stream.py">EvaluationJobLogStream</a></code>
|
|
||||||
|
|
||||||
#### Status
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="get /evaluate/job/status">client.evaluate.jobs.status.<a href="./src/llama_stack_client/resources/evaluate/jobs/status.py">list</a>(\*\*<a href="src/llama_stack_client/types/evaluate/jobs/status_list_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluate/evaluation_job_status.py">EvaluationJobStatus</a></code>
|
|
||||||
|
|
||||||
### QuestionAnswering
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /evaluate/question_answering/">client.evaluate.question_answering.<a href="./src/llama_stack_client/resources/evaluate/question_answering.py">create</a>(\*\*<a href="src/llama_stack_client/types/evaluate/question_answering_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluation_job.py">EvaluationJob</a></code>
|
|
||||||
|
|
||||||
## Evaluations
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /evaluate/summarization/">client.evaluations.<a href="./src/llama_stack_client/resources/evaluations.py">summarization</a>(\*\*<a href="src/llama_stack_client/types/evaluation_summarization_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluation_job.py">EvaluationJob</a></code>
|
|
||||||
- <code title="post /evaluate/text_generation/">client.evaluations.<a href="./src/llama_stack_client/resources/evaluations.py">text_generation</a>(\*\*<a href="src/llama_stack_client/types/evaluation_text_generation_params.py">params</a>) -> <a href="./src/llama_stack_client/types/evaluation_job.py">EvaluationJob</a></code>
|
|
||||||
|
|
||||||
## Inference
|
## Inference
|
||||||
|
|
||||||
|
@ -164,8 +213,8 @@ Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import (
|
from llama_stack_client.types import (
|
||||||
ChatCompletionStreamChunk,
|
CompletionResponse,
|
||||||
CompletionStreamChunk,
|
EmbeddingsResponse,
|
||||||
TokenLogProbs,
|
TokenLogProbs,
|
||||||
InferenceChatCompletionResponse,
|
InferenceChatCompletionResponse,
|
||||||
InferenceCompletionResponse,
|
InferenceCompletionResponse,
|
||||||
|
@ -174,175 +223,232 @@ from llama_stack_client.types import (
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /inference/chat_completion">client.inference.<a href="./src/llama_stack_client/resources/inference/inference.py">chat_completion</a>(\*\*<a href="src/llama_stack_client/types/inference_chat_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/inference_chat_completion_response.py">InferenceChatCompletionResponse</a></code>
|
- <code title="post /v1/inference/chat-completion">client.inference.<a href="./src/llama_stack_client/resources/inference.py">chat_completion</a>(\*\*<a href="src/llama_stack_client/types/inference_chat_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/inference_chat_completion_response.py">InferenceChatCompletionResponse</a></code>
|
||||||
- <code title="post /inference/completion">client.inference.<a href="./src/llama_stack_client/resources/inference/inference.py">completion</a>(\*\*<a href="src/llama_stack_client/types/inference_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/inference_completion_response.py">InferenceCompletionResponse</a></code>
|
- <code title="post /v1/inference/completion">client.inference.<a href="./src/llama_stack_client/resources/inference.py">completion</a>(\*\*<a href="src/llama_stack_client/types/inference_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/inference_completion_response.py">InferenceCompletionResponse</a></code>
|
||||||
|
- <code title="post /v1/inference/embeddings">client.inference.<a href="./src/llama_stack_client/resources/inference.py">embeddings</a>(\*\*<a href="src/llama_stack_client/types/inference_embeddings_params.py">params</a>) -> <a href="./src/llama_stack_client/types/embeddings_response.py">EmbeddingsResponse</a></code>
|
||||||
|
|
||||||
### Embeddings
|
## VectorIo
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types.inference import Embeddings
|
from llama_stack_client.types import QueryChunksResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /inference/embeddings">client.inference.embeddings.<a href="./src/llama_stack_client/resources/inference/embeddings.py">create</a>(\*\*<a href="src/llama_stack_client/types/inference/embedding_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/inference/embeddings.py">Embeddings</a></code>
|
- <code title="post /v1/vector-io/insert">client.vector_io.<a href="./src/llama_stack_client/resources/vector_io.py">insert</a>(\*\*<a href="src/llama_stack_client/types/vector_io_insert_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="post /v1/vector-io/query">client.vector_io.<a href="./src/llama_stack_client/resources/vector_io.py">query</a>(\*\*<a href="src/llama_stack_client/types/vector_io_query_params.py">params</a>) -> <a href="./src/llama_stack_client/types/query_chunks_response.py">QueryChunksResponse</a></code>
|
||||||
|
|
||||||
## Safety
|
## VectorDBs
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types import RunSheidResponse
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /safety/run_shield">client.safety.<a href="./src/llama_stack_client/resources/safety.py">run_shield</a>(\*\*<a href="src/llama_stack_client/types/safety_run_shield_params.py">params</a>) -> <a href="./src/llama_stack_client/types/run_sheid_response.py">RunSheidResponse</a></code>
|
|
||||||
|
|
||||||
## Memory
|
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import (
|
from llama_stack_client.types import (
|
||||||
QueryDocuments,
|
ListVectorDBsResponse,
|
||||||
MemoryCreateResponse,
|
VectorDBRetrieveResponse,
|
||||||
MemoryRetrieveResponse,
|
VectorDBListResponse,
|
||||||
MemoryListResponse,
|
VectorDBRegisterResponse,
|
||||||
MemoryDropResponse,
|
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="post /memory/create">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">create</a>(\*\*<a href="src/llama_stack_client/types/memory_create_params.py">params</a>) -> <a href="./src/llama_stack_client/types/memory_create_response.py">object</a></code>
|
- <code title="get /v1/vector-dbs/{vector_db_id}">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">retrieve</a>(vector_db_id) -> <a href="./src/llama_stack_client/types/vector_db_retrieve_response.py">Optional[VectorDBRetrieveResponse]</a></code>
|
||||||
- <code title="get /memory/get">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">retrieve</a>(\*\*<a href="src/llama_stack_client/types/memory_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/memory_retrieve_response.py">object</a></code>
|
- <code title="get /v1/vector-dbs">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">list</a>() -> <a href="./src/llama_stack_client/types/vector_db_list_response.py">VectorDBListResponse</a></code>
|
||||||
- <code title="post /memory/update">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">update</a>(\*\*<a href="src/llama_stack_client/types/memory_update_params.py">params</a>) -> None</code>
|
- <code title="post /v1/vector-dbs">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">register</a>(\*\*<a href="src/llama_stack_client/types/vector_db_register_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_db_register_response.py">VectorDBRegisterResponse</a></code>
|
||||||
- <code title="get /memory/list">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">list</a>() -> <a href="./src/llama_stack_client/types/memory_list_response.py">object</a></code>
|
- <code title="delete /v1/vector-dbs/{vector_db_id}">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">unregister</a>(vector_db_id) -> None</code>
|
||||||
- <code title="post /memory/drop">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">drop</a>(\*\*<a href="src/llama_stack_client/types/memory_drop_params.py">params</a>) -> str</code>
|
|
||||||
- <code title="post /memory/insert">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">insert</a>(\*\*<a href="src/llama_stack_client/types/memory_insert_params.py">params</a>) -> None</code>
|
|
||||||
- <code title="post /memory/query">client.memory.<a href="./src/llama_stack_client/resources/memory/memory.py">query</a>(\*\*<a href="src/llama_stack_client/types/memory_query_params.py">params</a>) -> <a href="./src/llama_stack_client/types/query_documents.py">QueryDocuments</a></code>
|
|
||||||
|
|
||||||
### Documents
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types.memory import DocumentRetrieveResponse
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /memory/documents/get">client.memory.documents.<a href="./src/llama_stack_client/resources/memory/documents.py">retrieve</a>(\*\*<a href="src/llama_stack_client/types/memory/document_retrieve_params.py">params</a>) -> <a href="./src/llama_stack_client/types/memory/document_retrieve_response.py">DocumentRetrieveResponse</a></code>
|
|
||||||
- <code title="post /memory/documents/delete">client.memory.documents.<a href="./src/llama_stack_client/resources/memory/documents.py">delete</a>(\*\*<a href="src/llama_stack_client/types/memory/document_delete_params.py">params</a>) -> None</code>
|
|
||||||
|
|
||||||
## PostTraining
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types import PostTrainingJob
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /post_training/preference_optimize">client.post_training.<a href="./src/llama_stack_client/resources/post_training/post_training.py">preference_optimize</a>(\*\*<a href="src/llama_stack_client/types/post_training_preference_optimize_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training_job.py">PostTrainingJob</a></code>
|
|
||||||
- <code title="post /post_training/supervised_fine_tune">client.post_training.<a href="./src/llama_stack_client/resources/post_training/post_training.py">supervised_fine_tune</a>(\*\*<a href="src/llama_stack_client/types/post_training_supervised_fine_tune_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training_job.py">PostTrainingJob</a></code>
|
|
||||||
|
|
||||||
### Jobs
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types.post_training import (
|
|
||||||
PostTrainingJobArtifacts,
|
|
||||||
PostTrainingJobLogStream,
|
|
||||||
PostTrainingJobStatus,
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="get /post_training/jobs">client.post_training.jobs.<a href="./src/llama_stack_client/resources/post_training/jobs.py">list</a>() -> <a href="./src/llama_stack_client/types/post_training_job.py">PostTrainingJob</a></code>
|
|
||||||
- <code title="get /post_training/job/artifacts">client.post_training.jobs.<a href="./src/llama_stack_client/resources/post_training/jobs.py">artifacts</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_artifacts_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training/post_training_job_artifacts.py">PostTrainingJobArtifacts</a></code>
|
|
||||||
- <code title="post /post_training/job/cancel">client.post_training.jobs.<a href="./src/llama_stack_client/resources/post_training/jobs.py">cancel</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_cancel_params.py">params</a>) -> None</code>
|
|
||||||
- <code title="get /post_training/job/logs">client.post_training.jobs.<a href="./src/llama_stack_client/resources/post_training/jobs.py">logs</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_logs_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training/post_training_job_log_stream.py">PostTrainingJobLogStream</a></code>
|
|
||||||
- <code title="get /post_training/job/status">client.post_training.jobs.<a href="./src/llama_stack_client/resources/post_training/jobs.py">status</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_status_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training/post_training_job_status.py">PostTrainingJobStatus</a></code>
|
|
||||||
|
|
||||||
## RewardScoring
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types import RewardScoring, ScoredDialogGenerations
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /reward_scoring/score">client.reward_scoring.<a href="./src/llama_stack_client/resources/reward_scoring.py">score</a>(\*\*<a href="src/llama_stack_client/types/reward_scoring_score_params.py">params</a>) -> <a href="./src/llama_stack_client/types/reward_scoring.py">RewardScoring</a></code>
|
|
||||||
|
|
||||||
## SyntheticDataGeneration
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types import SyntheticDataGeneration
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /synthetic_data_generation/generate">client.synthetic_data_generation.<a href="./src/llama_stack_client/resources/synthetic_data_generation.py">generate</a>(\*\*<a href="src/llama_stack_client/types/synthetic_data_generation_generate_params.py">params</a>) -> <a href="./src/llama_stack_client/types/synthetic_data_generation.py">SyntheticDataGeneration</a></code>
|
|
||||||
|
|
||||||
## BatchInference
|
|
||||||
|
|
||||||
Types:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from llama_stack_client.types import BatchChatCompletion
|
|
||||||
```
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
|
|
||||||
- <code title="post /batch_inference/chat_completion">client.batch_inference.<a href="./src/llama_stack_client/resources/batch_inference.py">chat_completion</a>(\*\*<a href="src/llama_stack_client/types/batch_inference_chat_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/batch_chat_completion.py">BatchChatCompletion</a></code>
|
|
||||||
- <code title="post /batch_inference/completion">client.batch_inference.<a href="./src/llama_stack_client/resources/batch_inference.py">completion</a>(\*\*<a href="src/llama_stack_client/types/batch_inference_completion_params.py">params</a>) -> <a href="./src/llama_stack_client/types/shared/batch_completion.py">BatchCompletion</a></code>
|
|
||||||
|
|
||||||
## Models
|
## Models
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import ModelServingSpec
|
from llama_stack_client.types import ListModelsResponse, Model, ModelListResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /models/list">client.models.<a href="./src/llama_stack_client/resources/models.py">list</a>() -> <a href="./src/llama_stack_client/types/model_serving_spec.py">ModelServingSpec</a></code>
|
- <code title="get /v1/models/{model_id}">client.models.<a href="./src/llama_stack_client/resources/models.py">retrieve</a>(model_id) -> <a href="./src/llama_stack_client/types/model.py">Optional[Model]</a></code>
|
||||||
- <code title="get /models/get">client.models.<a href="./src/llama_stack_client/resources/models.py">get</a>(\*\*<a href="src/llama_stack_client/types/model_get_params.py">params</a>) -> <a href="./src/llama_stack_client/types/model_serving_spec.py">Optional</a></code>
|
- <code title="get /v1/models">client.models.<a href="./src/llama_stack_client/resources/models.py">list</a>() -> <a href="./src/llama_stack_client/types/model_list_response.py">ModelListResponse</a></code>
|
||||||
|
- <code title="post /v1/models">client.models.<a href="./src/llama_stack_client/resources/models.py">register</a>(\*\*<a href="src/llama_stack_client/types/model_register_params.py">params</a>) -> <a href="./src/llama_stack_client/types/model.py">Model</a></code>
|
||||||
|
- <code title="delete /v1/models/{model_id}">client.models.<a href="./src/llama_stack_client/resources/models.py">unregister</a>(model_id) -> None</code>
|
||||||
|
|
||||||
## MemoryBanks
|
## PostTraining
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import MemoryBankSpec
|
from llama_stack_client.types import ListPostTrainingJobsResponse, PostTrainingJob
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /memory_banks/list">client.memory_banks.<a href="./src/llama_stack_client/resources/memory_banks.py">list</a>() -> <a href="./src/llama_stack_client/types/memory_bank_spec.py">MemoryBankSpec</a></code>
|
- <code title="post /v1/post-training/preference-optimize">client.post_training.<a href="./src/llama_stack_client/resources/post_training/post_training.py">preference_optimize</a>(\*\*<a href="src/llama_stack_client/types/post_training_preference_optimize_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training_job.py">PostTrainingJob</a></code>
|
||||||
- <code title="get /memory_banks/get">client.memory_banks.<a href="./src/llama_stack_client/resources/memory_banks.py">get</a>(\*\*<a href="src/llama_stack_client/types/memory_bank_get_params.py">params</a>) -> <a href="./src/llama_stack_client/types/memory_bank_spec.py">Optional</a></code>
|
- <code title="post /v1/post-training/supervised-fine-tune">client.post_training.<a href="./src/llama_stack_client/resources/post_training/post_training.py">supervised_fine_tune</a>(\*\*<a href="src/llama_stack_client/types/post_training_supervised_fine_tune_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training_job.py">PostTrainingJob</a></code>
|
||||||
|
|
||||||
|
### Job
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types.post_training import (
|
||||||
|
JobListResponse,
|
||||||
|
JobArtifactsResponse,
|
||||||
|
JobStatusResponse,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/post-training/jobs">client.post_training.job.<a href="./src/llama_stack_client/resources/post_training/job.py">list</a>() -> <a href="./src/llama_stack_client/types/post_training/job_list_response.py">JobListResponse</a></code>
|
||||||
|
- <code title="get /v1/post-training/job/artifacts">client.post_training.job.<a href="./src/llama_stack_client/resources/post_training/job.py">artifacts</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_artifacts_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training/job_artifacts_response.py">Optional[JobArtifactsResponse]</a></code>
|
||||||
|
- <code title="post /v1/post-training/job/cancel">client.post_training.job.<a href="./src/llama_stack_client/resources/post_training/job.py">cancel</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_cancel_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="get /v1/post-training/job/status">client.post_training.job.<a href="./src/llama_stack_client/resources/post_training/job.py">status</a>(\*\*<a href="src/llama_stack_client/types/post_training/job_status_params.py">params</a>) -> <a href="./src/llama_stack_client/types/post_training/job_status_response.py">Optional[JobStatusResponse]</a></code>
|
||||||
|
|
||||||
|
## Providers
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import ListProvidersResponse, ProviderListResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/inspect/providers">client.providers.<a href="./src/llama_stack_client/resources/providers.py">list</a>() -> <a href="./src/llama_stack_client/types/provider_list_response.py">ProviderListResponse</a></code>
|
||||||
|
|
||||||
|
## Routes
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import ListRoutesResponse, RouteListResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/inspect/routes">client.routes.<a href="./src/llama_stack_client/resources/routes.py">list</a>() -> <a href="./src/llama_stack_client/types/route_list_response.py">RouteListResponse</a></code>
|
||||||
|
|
||||||
|
## Safety
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import RunShieldResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/safety/run-shield">client.safety.<a href="./src/llama_stack_client/resources/safety.py">run_shield</a>(\*\*<a href="src/llama_stack_client/types/safety_run_shield_params.py">params</a>) -> <a href="./src/llama_stack_client/types/run_shield_response.py">RunShieldResponse</a></code>
|
||||||
|
|
||||||
## Shields
|
## Shields
|
||||||
|
|
||||||
Types:
|
Types:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from llama_stack_client.types import ShieldSpec
|
from llama_stack_client.types import ListShieldsResponse, Shield, ShieldListResponse
|
||||||
```
|
```
|
||||||
|
|
||||||
Methods:
|
Methods:
|
||||||
|
|
||||||
- <code title="get /shields/list">client.shields.<a href="./src/llama_stack_client/resources/shields.py">list</a>() -> <a href="./src/llama_stack_client/types/shield_spec.py">ShieldSpec</a></code>
|
- <code title="get /v1/shields/{identifier}">client.shields.<a href="./src/llama_stack_client/resources/shields.py">retrieve</a>(identifier) -> <a href="./src/llama_stack_client/types/shield.py">Optional[Shield]</a></code>
|
||||||
- <code title="get /shields/get">client.shields.<a href="./src/llama_stack_client/resources/shields.py">get</a>(\*\*<a href="src/llama_stack_client/types/shield_get_params.py">params</a>) -> <a href="./src/llama_stack_client/types/shield_spec.py">Optional</a></code>
|
- <code title="get /v1/shields">client.shields.<a href="./src/llama_stack_client/resources/shields.py">list</a>() -> <a href="./src/llama_stack_client/types/shield_list_response.py">ShieldListResponse</a></code>
|
||||||
|
- <code title="post /v1/shields">client.shields.<a href="./src/llama_stack_client/resources/shields.py">register</a>(\*\*<a href="src/llama_stack_client/types/shield_register_params.py">params</a>) -> <a href="./src/llama_stack_client/types/shield.py">Shield</a></code>
|
||||||
|
|
||||||
|
## SyntheticDataGeneration
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import SyntheticDataGenerationResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/synthetic-data-generation/generate">client.synthetic_data_generation.<a href="./src/llama_stack_client/resources/synthetic_data_generation.py">generate</a>(\*\*<a href="src/llama_stack_client/types/synthetic_data_generation_generate_params.py">params</a>) -> <a href="./src/llama_stack_client/types/synthetic_data_generation_response.py">SyntheticDataGenerationResponse</a></code>
|
||||||
|
|
||||||
|
## Telemetry
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import (
|
||||||
|
QuerySpansResponse,
|
||||||
|
SpanWithStatus,
|
||||||
|
Trace,
|
||||||
|
TelemetryGetSpanResponse,
|
||||||
|
TelemetryGetSpanTreeResponse,
|
||||||
|
TelemetryQuerySpansResponse,
|
||||||
|
TelemetryQueryTracesResponse,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/telemetry/traces/{trace_id}/spans/{span_id}">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">get_span</a>(span_id, \*, trace_id) -> <a href="./src/llama_stack_client/types/telemetry_get_span_response.py">TelemetryGetSpanResponse</a></code>
|
||||||
|
- <code title="get /v1/telemetry/spans/{span_id}/tree">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">get_span_tree</a>(span_id, \*\*<a href="src/llama_stack_client/types/telemetry_get_span_tree_params.py">params</a>) -> <a href="./src/llama_stack_client/types/telemetry_get_span_tree_response.py">TelemetryGetSpanTreeResponse</a></code>
|
||||||
|
- <code title="get /v1/telemetry/traces/{trace_id}">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">get_trace</a>(trace_id) -> <a href="./src/llama_stack_client/types/trace.py">Trace</a></code>
|
||||||
|
- <code title="post /v1/telemetry/events">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">log_event</a>(\*\*<a href="src/llama_stack_client/types/telemetry_log_event_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="get /v1/telemetry/spans">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">query_spans</a>(\*\*<a href="src/llama_stack_client/types/telemetry_query_spans_params.py">params</a>) -> <a href="./src/llama_stack_client/types/telemetry_query_spans_response.py">TelemetryQuerySpansResponse</a></code>
|
||||||
|
- <code title="get /v1/telemetry/traces">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">query_traces</a>(\*\*<a href="src/llama_stack_client/types/telemetry_query_traces_params.py">params</a>) -> <a href="./src/llama_stack_client/types/telemetry_query_traces_response.py">TelemetryQueryTracesResponse</a></code>
|
||||||
|
- <code title="post /v1/telemetry/spans/export">client.telemetry.<a href="./src/llama_stack_client/resources/telemetry.py">save_spans_to_dataset</a>(\*\*<a href="src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py">params</a>) -> None</code>
|
||||||
|
|
||||||
|
## Datasetio
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import PaginatedRowsResult
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/datasetio/rows">client.datasetio.<a href="./src/llama_stack_client/resources/datasetio.py">append_rows</a>(\*\*<a href="src/llama_stack_client/types/datasetio_append_rows_params.py">params</a>) -> None</code>
|
||||||
|
- <code title="get /v1/datasetio/rows">client.datasetio.<a href="./src/llama_stack_client/resources/datasetio.py">get_rows_paginated</a>(\*\*<a href="src/llama_stack_client/types/datasetio_get_rows_paginated_params.py">params</a>) -> <a href="./src/llama_stack_client/types/paginated_rows_result.py">PaginatedRowsResult</a></code>
|
||||||
|
|
||||||
|
## Scoring
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import ScoringScoreResponse, ScoringScoreBatchResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="post /v1/scoring/score">client.scoring.<a href="./src/llama_stack_client/resources/scoring.py">score</a>(\*\*<a href="src/llama_stack_client/types/scoring_score_params.py">params</a>) -> <a href="./src/llama_stack_client/types/scoring_score_response.py">ScoringScoreResponse</a></code>
|
||||||
|
- <code title="post /v1/scoring/score-batch">client.scoring.<a href="./src/llama_stack_client/resources/scoring.py">score_batch</a>(\*\*<a href="src/llama_stack_client/types/scoring_score_batch_params.py">params</a>) -> <a href="./src/llama_stack_client/types/scoring_score_batch_response.py">ScoringScoreBatchResponse</a></code>
|
||||||
|
|
||||||
|
## ScoringFunctions
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import (
|
||||||
|
ListScoringFunctionsResponse,
|
||||||
|
ScoringFn,
|
||||||
|
ScoringFunctionListResponse,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/scoring-functions/{scoring_fn_id}">client.scoring_functions.<a href="./src/llama_stack_client/resources/scoring_functions.py">retrieve</a>(scoring_fn_id) -> <a href="./src/llama_stack_client/types/scoring_fn.py">Optional[ScoringFn]</a></code>
|
||||||
|
- <code title="get /v1/scoring-functions">client.scoring_functions.<a href="./src/llama_stack_client/resources/scoring_functions.py">list</a>() -> <a href="./src/llama_stack_client/types/scoring_function_list_response.py">ScoringFunctionListResponse</a></code>
|
||||||
|
- <code title="post /v1/scoring-functions">client.scoring_functions.<a href="./src/llama_stack_client/resources/scoring_functions.py">register</a>(\*\*<a href="src/llama_stack_client/types/scoring_function_register_params.py">params</a>) -> None</code>
|
||||||
|
|
||||||
|
## EvalTasks
|
||||||
|
|
||||||
|
Types:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from llama_stack_client.types import EvalTask, ListEvalTasksResponse, EvalTaskListResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
|
||||||
|
- <code title="get /v1/eval-tasks/{eval_task_id}">client.eval_tasks.<a href="./src/llama_stack_client/resources/eval_tasks.py">retrieve</a>(eval_task_id) -> <a href="./src/llama_stack_client/types/eval_task.py">Optional[EvalTask]</a></code>
|
||||||
|
- <code title="get /v1/eval-tasks">client.eval_tasks.<a href="./src/llama_stack_client/resources/eval_tasks.py">list</a>() -> <a href="./src/llama_stack_client/types/eval_task_list_response.py">EvalTaskListResponse</a></code>
|
||||||
|
- <code title="post /v1/eval-tasks">client.eval_tasks.<a href="./src/llama_stack_client/resources/eval_tasks.py">register</a>(\*\*<a href="src/llama_stack_client/types/eval_task_register_params.py">params</a>) -> None</code>
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue