diff --git a/.github/actions/run-and-record-tests/action.yml b/.github/actions/run-and-record-tests/action.yml index ac600d570..3929df09c 100644 --- a/.github/actions/run-and-record-tests/action.yml +++ b/.github/actions/run-and-record-tests/action.yml @@ -86,9 +86,10 @@ runs: if: ${{ always() }} shell: bash run: | - # Ollama logs (if ollama container exists) - sudo docker logs ollama > ollama-${{ inputs.inference-mode }}.log 2>&1 || true - # Note: distro container logs are now dumped in integration-tests.sh before container is removed + sudo docker logs ollama > ollama-${{ inputs.inference-mode }}.log || true + distro_name=$(echo "${{ inputs.stack-config }}" | sed 's/^docker://' | sed 's/^server://') + stack_container_name="llama-stack-test-$distro_name" + sudo docker logs $stack_container_name > docker-${distro_name}-${{ inputs.inference-mode }}.log || true - name: Upload logs if: ${{ always() }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index d38e8337b..71abb8461 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -47,7 +47,7 @@ jobs: strategy: fail-fast: false matrix: - client-type: [library, docker] + client-type: [library, server, docker] # Use Python 3.13 only on nightly schedule (daily latest client test), otherwise use 3.12 python-version: ${{ github.event.schedule == '0 0 * * *' && fromJSON('["3.12", "3.13"]') || fromJSON('["3.12"]') }} client-version: ${{ (github.event.schedule == '0 0 * * *' || github.event.inputs.test-all-client-versions == 'true') && fromJSON('["published", "latest"]') || fromJSON('["latest"]') }} @@ -61,7 +61,7 @@ jobs: && fromJSON('[{"setup": "vllm", "suite": "base"}]') || github.event.inputs.test-setup == 'ollama-vision' && fromJSON('[{"setup": "ollama-vision", "suite": "vision"}]') - || fromJSON('[{"setup": "ollama", "suite": "base"}, {"setup": "ollama-vision", "suite": "vision"}, {"setup": "gpt", "suite": "responses"}]') + || fromJSON('[{"setup": "ollama", "suite": "base"}, {"setup": "ollama-vision", "suite": "vision"}]') }} steps: diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 0fdd50acc..b5845be53 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -37,7 +37,7 @@ jobs: .pre-commit-config.yaml - name: Set up Node.js - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: '20' cache: 'npm' diff --git a/.github/workflows/precommit-trigger.yml b/.github/workflows/precommit-trigger.yml index b05898d29..0c23b57de 100644 --- a/.github/workflows/precommit-trigger.yml +++ b/.github/workflows/precommit-trigger.yml @@ -99,7 +99,7 @@ jobs: owner: context.repo.owner, repo: context.repo.repo, issue_number: ${{ steps.check_author.outputs.pr_number }}, - body: `⏳ Running [pre-commit hooks](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) on PR #${{ steps.check_author.outputs.pr_number }}...` + body: `⏳ Running pre-commit hooks on PR #${{ steps.check_author.outputs.pr_number }}...` }); - name: Checkout PR branch (same-repo) @@ -141,7 +141,7 @@ jobs: - name: Set up Node.js if: steps.check_author.outputs.authorized == 'true' - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: '20' cache: 'npm' diff --git a/.github/workflows/providers-list-deps.yml b/.github/workflows/providers-list-deps.yml index e30e1e5fb..df491b680 100644 --- a/.github/workflows/providers-list-deps.yml +++ b/.github/workflows/providers-list-deps.yml @@ -36,7 +36,7 @@ jobs: distros: ${{ steps.set-matrix.outputs.distros }} steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Generate Distribution List id: set-matrix @@ -55,7 +55,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install dependencies uses: ./.github/actions/setup-runner @@ -79,7 +79,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install dependencies uses: ./.github/actions/setup-runner @@ -92,7 +92,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install dependencies uses: ./.github/actions/setup-runner diff --git a/.github/workflows/python-build-test.yml b/.github/workflows/python-build-test.yml index 96243285f..dfa844175 100644 --- a/.github/workflows/python-build-test.yml +++ b/.github/workflows/python-build-test.yml @@ -24,7 +24,7 @@ jobs: uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Install uv - uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # v7.1.0 + uses: astral-sh/setup-uv@eb1897b8dc4b5d5bfe39a428a8f2304605e0983c # v7.0.0 with: python-version: ${{ matrix.python-version }} activate-environment: true diff --git a/.github/workflows/ui-unit-tests.yml b/.github/workflows/ui-unit-tests.yml index e8f318b8e..c16f512d1 100644 --- a/.github/workflows/ui-unit-tests.yml +++ b/.github/workflows/ui-unit-tests.yml @@ -29,7 +29,7 @@ jobs: uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Setup Node.js - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: ${{ matrix.node-version }} cache: 'npm' diff --git a/benchmarking/k8s-benchmark/stack_run_config.yaml b/benchmarking/k8s-benchmark/stack_run_config.yaml index 06a481f43..2ccaa21aa 100644 --- a/benchmarking/k8s-benchmark/stack_run_config.yaml +++ b/benchmarking/k8s-benchmark/stack_run_config.yaml @@ -27,24 +27,28 @@ providers: config: storage_dir: ${env.FILES_STORAGE_DIR:=~/.llama/distributions/starter/files} metadata_store: - table_name: files_metadata - backend: sql_default + type: sqlite + db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/starter}/files_metadata.db vector_io: - provider_id: ${env.ENABLE_CHROMADB:+chromadb} provider_type: remote::chromadb config: url: ${env.CHROMADB_URL:=} - persistence: - namespace: vector_io::chroma_remote - backend: kv_default + kvstore: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} files: - provider_id: meta-reference-files provider_type: inline::localfs config: storage_dir: ${env.FILES_STORAGE_DIR:=~/.llama/distributions/starter/files} metadata_store: - table_name: files_metadata - backend: sql_default + type: sqlite + db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/starter}/files_metadata.db safety: - provider_id: llama-guard provider_type: inline::llama-guard @@ -54,15 +58,20 @@ providers: - provider_id: meta-reference provider_type: inline::meta-reference config: - persistence: - agent_state: - namespace: agents - backend: kv_default - responses: - table_name: responses - backend: sql_default - max_write_queue_size: 10000 - num_writers: 4 + persistence_store: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} + responses_store: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} telemetry: - provider_id: meta-reference provider_type: inline::meta-reference @@ -103,45 +112,32 @@ storage: db: ${env.POSTGRES_DB:=llamastack} user: ${env.POSTGRES_USER:=llamastack} password: ${env.POSTGRES_PASSWORD:=llamastack} - stores: + references: metadata: - namespace: registry backend: kv_default + namespace: registry inference: + backend: sql_default table_name: inference_store - backend: sql_default - max_write_queue_size: 10000 - num_writers: 4 - conversations: - table_name: openai_conversations - backend: sql_default -registered_resources: - models: - - metadata: - embedding_dimension: 768 - model_id: nomic-embed-text-v1.5 - provider_id: sentence-transformers - model_type: embedding - - model_id: ${env.INFERENCE_MODEL} - provider_id: vllm-inference - model_type: llm - shields: - - shield_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} - vector_dbs: [] - datasets: [] - scoring_fns: [] - benchmarks: [] - tool_groups: - - toolgroup_id: builtin::websearch - provider_id: tavily-search - - toolgroup_id: builtin::rag - provider_id: rag-runtime +models: +- metadata: + embedding_dimension: 768 + model_id: nomic-embed-text-v1.5 + provider_id: sentence-transformers + model_type: embedding +- model_id: ${env.INFERENCE_MODEL} + provider_id: vllm-inference + model_type: llm +shields: +- shield_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} +vector_dbs: [] +datasets: [] +scoring_fns: [] +benchmarks: [] +tool_groups: +- toolgroup_id: builtin::websearch + provider_id: tavily-search +- toolgroup_id: builtin::rag + provider_id: rag-runtime server: port: 8323 -telemetry: - enabled: true -vector_stores: - default_provider_id: chromadb - default_embedding_model: - provider_id: sentence-transformers - model_id: nomic-ai/nomic-embed-text-v1.5 diff --git a/client-sdks/stainless/openapi.stainless.yml b/client-sdks/stainless/openapi.stainless.yml index 9461be996..0a5dfc044 100644 --- a/client-sdks/stainless/openapi.stainless.yml +++ b/client-sdks/stainless/openapi.stainless.yml @@ -208,6 +208,19 @@ resources: type: http endpoint: post /v1/conversations/{conversation_id}/items + datasets: + models: + list_datasets_response: ListDatasetsResponse + methods: + register: post /v1beta/datasets + retrieve: get /v1beta/datasets/{dataset_id} + list: + endpoint: get /v1beta/datasets + paginated: false + unregister: delete /v1beta/datasets/{dataset_id} + iterrows: get /v1beta/datasetio/iterrows/{dataset_id} + appendrows: post /v1beta/datasetio/append-rows/{dataset_id} + inspect: models: healthInfo: HealthInfo @@ -508,21 +521,6 @@ resources: stream_event_model: alpha.agents.turn.agent_turn_response_stream_chunk param_discriminator: stream - beta: - subresources: - datasets: - models: - list_datasets_response: ListDatasetsResponse - methods: - register: post /v1beta/datasets - retrieve: get /v1beta/datasets/{dataset_id} - list: - endpoint: get /v1beta/datasets - paginated: false - unregister: delete /v1beta/datasets/{dataset_id} - iterrows: get /v1beta/datasetio/iterrows/{dataset_id} - appendrows: post /v1beta/datasetio/append-rows/{dataset_id} - settings: license: MIT diff --git a/client-sdks/stainless/openapi.yml b/client-sdks/stainless/openapi.yml index bd2d4b7a4..eff01931f 100644 --- a/client-sdks/stainless/openapi.yml +++ b/client-sdks/stainless/openapi.yml @@ -350,46 +350,146 @@ paths: in: query description: >- An item ID to list items after, used in pagination. - required: false + required: true schema: - type: string + oneOf: + - type: string + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: include in: query description: >- Specify additional output data to include in the response. - required: false + required: true schema: - type: array - items: - type: string - enum: - - web_search_call.action.sources - - code_interpreter_call.outputs - - computer_call_output.output.image_url - - file_search_call.results - - message.input_image.image_url - - message.output_text.logprobs - - reasoning.encrypted_content - title: ConversationItemInclude - description: >- - Specify additional output data to include in the model response. + oneOf: + - type: array + items: + type: string + enum: + - code_interpreter_call.outputs + - computer_call_output.output.image_url + - file_search_call.results + - message.input_image.image_url + - message.output_text.logprobs + - reasoning.encrypted_content + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: limit in: query description: >- A limit on the number of objects to be returned (1-100, default 20). - required: false + required: true schema: - type: integer + oneOf: + - type: integer + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: order in: query description: >- The order to return items in (asc or desc, default desc). - required: false + required: true schema: - type: string - enum: - - asc - - desc + oneOf: + - type: string + enum: + - asc + - desc + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` deprecated: false post: responses: @@ -6340,7 +6440,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -6382,7 +6482,6 @@ components: enum: - llm - embedding - - rerank title: ModelType description: >- Enumeration of supported model types in Llama Stack. @@ -6443,10 +6542,11 @@ components: model: type: string description: >- - (Optional) The content moderation model you would like to use. + The content moderation model you would like to use. additionalProperties: false required: - input + - model title: RunModerationRequest ModerationObject: type: object @@ -9032,7 +9132,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -9340,7 +9440,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -10103,7 +10203,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -11225,7 +11325,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -12552,7 +12652,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -13485,16 +13585,13 @@ tags: embeddings. - This API provides the raw interface to the underlying models. Three kinds of - models are supported: + This API provides the raw interface to the underlying models. Two kinds of models + are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - - Rerank models: these models reorder the documents based on their relevance - to a query. x-displayName: Inference - name: Inspect description: >- diff --git a/containers/Containerfile b/containers/Containerfile index 1c878ea9b..1ddf102af 100644 --- a/containers/Containerfile +++ b/containers/Containerfile @@ -45,7 +45,7 @@ RUN set -eux; \ exit 1; \ fi -RUN pip install --no-cache uv +RUN pip install --no-cache-dir uv ENV UV_SYSTEM_PYTHON=1 ENV INSTALL_MODE=${INSTALL_MODE} @@ -68,7 +68,7 @@ RUN set -eux; \ echo "LLAMA_STACK_CLIENT_DIR is set but $LLAMA_STACK_CLIENT_DIR does not exist" >&2; \ exit 1; \ fi; \ - uv pip install --no-cache -e "$LLAMA_STACK_CLIENT_DIR"; \ + uv pip install --no-cache-dir -e "$LLAMA_STACK_CLIENT_DIR"; \ fi; # Install llama-stack @@ -78,19 +78,19 @@ RUN set -eux; \ echo "INSTALL_MODE=editable requires LLAMA_STACK_DIR to point to a directory inside the build context" >&2; \ exit 1; \ fi; \ - uv pip install --no-cache -e "$LLAMA_STACK_DIR"; \ + uv pip install --no-cache-dir -e "$LLAMA_STACK_DIR"; \ elif [ "$INSTALL_MODE" = "test-pypi" ]; then \ - uv pip install --no-cache fastapi libcst; \ + uv pip install --no-cache-dir fastapi libcst; \ if [ -n "$TEST_PYPI_VERSION" ]; then \ - uv pip install --no-cache --extra-index-url https://test.pypi.org/simple/ --index-strategy unsafe-best-match "llama-stack==$TEST_PYPI_VERSION"; \ + uv pip install --no-cache-dir --extra-index-url https://test.pypi.org/simple/ --index-strategy unsafe-best-match "llama-stack==$TEST_PYPI_VERSION"; \ else \ - uv pip install --no-cache --extra-index-url https://test.pypi.org/simple/ --index-strategy unsafe-best-match llama-stack; \ + uv pip install --no-cache-dir --extra-index-url https://test.pypi.org/simple/ --index-strategy unsafe-best-match llama-stack; \ fi; \ else \ if [ -n "$PYPI_VERSION" ]; then \ - uv pip install --no-cache "llama-stack==$PYPI_VERSION"; \ + uv pip install --no-cache-dir "llama-stack==$PYPI_VERSION"; \ else \ - uv pip install --no-cache llama-stack; \ + uv pip install --no-cache-dir llama-stack; \ fi; \ fi; @@ -102,7 +102,7 @@ RUN set -eux; \ fi; \ deps="$(llama stack list-deps "$DISTRO_NAME")"; \ if [ -n "$deps" ]; then \ - printf '%s\n' "$deps" | xargs -L1 uv pip install --no-cache; \ + printf '%s\n' "$deps" | xargs -L1 uv pip install --no-cache-dir; \ fi # Cleanup diff --git a/docs/docs/distributions/building_distro.mdx b/docs/docs/distributions/building_distro.mdx index c4a01bf7d..0c40613d6 100644 --- a/docs/docs/distributions/building_distro.mdx +++ b/docs/docs/distributions/building_distro.mdx @@ -19,7 +19,6 @@ Browse that folder to understand available providers and copy a distribution to import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - diff --git a/docs/docs/distributions/k8s/stack_run_config.yaml b/docs/docs/distributions/k8s/stack_run_config.yaml index 3c74fd436..863565fdf 100644 --- a/docs/docs/distributions/k8s/stack_run_config.yaml +++ b/docs/docs/distributions/k8s/stack_run_config.yaml @@ -32,17 +32,21 @@ providers: provider_type: remote::chromadb config: url: ${env.CHROMADB_URL:=} - persistence: - namespace: vector_io::chroma_remote - backend: kv_default + kvstore: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} files: - provider_id: meta-reference-files provider_type: inline::localfs config: storage_dir: ${env.FILES_STORAGE_DIR:=~/.llama/distributions/starter/files} metadata_store: - table_name: files_metadata - backend: sql_default + type: sqlite + db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/starter}/files_metadata.db safety: - provider_id: llama-guard provider_type: inline::llama-guard @@ -52,15 +56,20 @@ providers: - provider_id: meta-reference provider_type: inline::meta-reference config: - persistence: - agent_state: - namespace: agents - backend: kv_default - responses: - table_name: responses - backend: sql_default - max_write_queue_size: 10000 - num_writers: 4 + persistence_store: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} + responses_store: + type: postgres + host: ${env.POSTGRES_HOST:=localhost} + port: ${env.POSTGRES_PORT:=5432} + db: ${env.POSTGRES_DB:=llamastack} + user: ${env.POSTGRES_USER:=llamastack} + password: ${env.POSTGRES_PASSWORD:=llamastack} telemetry: - provider_id: meta-reference provider_type: inline::meta-reference @@ -101,53 +110,40 @@ storage: db: ${env.POSTGRES_DB:=llamastack} user: ${env.POSTGRES_USER:=llamastack} password: ${env.POSTGRES_PASSWORD:=llamastack} - stores: + references: metadata: - namespace: registry backend: kv_default + namespace: registry inference: + backend: sql_default table_name: inference_store - backend: sql_default - max_write_queue_size: 10000 - num_writers: 4 - conversations: - table_name: openai_conversations - backend: sql_default -registered_resources: - models: - - metadata: - embedding_dimension: 768 - model_id: nomic-embed-text-v1.5 - provider_id: sentence-transformers - model_type: embedding - - metadata: {} - model_id: ${env.INFERENCE_MODEL} - provider_id: vllm-inference - model_type: llm - - metadata: {} - model_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} - provider_id: vllm-safety - model_type: llm - shields: - - shield_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} - vector_dbs: [] - datasets: [] - scoring_fns: [] - benchmarks: [] - tool_groups: - - toolgroup_id: builtin::websearch - provider_id: tavily-search - - toolgroup_id: builtin::rag - provider_id: rag-runtime +models: +- metadata: + embedding_dimension: 768 + model_id: nomic-embed-text-v1.5 + provider_id: sentence-transformers + model_type: embedding +- metadata: {} + model_id: ${env.INFERENCE_MODEL} + provider_id: vllm-inference + model_type: llm +- metadata: {} + model_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} + provider_id: vllm-safety + model_type: llm +shields: +- shield_id: ${env.SAFETY_MODEL:=meta-llama/Llama-Guard-3-1B} +vector_dbs: [] +datasets: [] +scoring_fns: [] +benchmarks: [] +tool_groups: +- toolgroup_id: builtin::websearch + provider_id: tavily-search +- toolgroup_id: builtin::rag + provider_id: rag-runtime server: port: 8321 auth: provider_config: type: github_token -telemetry: - enabled: true -vector_stores: - default_provider_id: chromadb - default_embedding_model: - provider_id: sentence-transformers - model_id: nomic-ai/nomic-embed-text-v1.5 diff --git a/docs/docs/getting_started/demo_script.py b/docs/docs/getting_started/demo_script.py index a6d7bed63..2ea67739f 100644 --- a/docs/docs/getting_started/demo_script.py +++ b/docs/docs/getting_started/demo_script.py @@ -4,24 +4,65 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +from llama_stack_client import Agent, AgentEventLogger, RAGDocument, LlamaStackClient -import io, requests -from openai import OpenAI +vector_db_id = "my_demo_vector_db" +client = LlamaStackClient(base_url="http://localhost:8321") -url="https://www.paulgraham.com/greatwork.html" -client = OpenAI(base_url="http://localhost:8321/v1/", api_key="none") +models = client.models.list() -vs = client.vector_stores.create() -response = requests.get(url) -pseudo_file = io.BytesIO(str(response.content).encode('utf-8')) -uploaded_file = client.files.create(file=(url, pseudo_file, "text/html"), purpose="assistants") -client.vector_stores.files.create(vector_store_id=vs.id, file_id=uploaded_file.id) +# Select the first LLM and first embedding models +model_id = next(m for m in models if m.model_type == "llm").identifier +embedding_model_id = ( + em := next(m for m in models if m.model_type == "embedding") +).identifier +embedding_dimension = em.metadata["embedding_dimension"] -resp = client.responses.create( - model="openai/gpt-4o", - input="How do you do great work? Use the existing knowledge_search tool.", - tools=[{"type": "file_search", "vector_store_ids": [vs.id]}], - include=["file_search_call.results"], +vector_db = client.vector_dbs.register( + vector_db_id=vector_db_id, + embedding_model=embedding_model_id, + embedding_dimension=embedding_dimension, + provider_id="faiss", +) +vector_db_id = vector_db.identifier +source = "https://www.paulgraham.com/greatwork.html" +print("rag_tool> Ingesting document:", source) +document = RAGDocument( + document_id="document_1", + content=source, + mime_type="text/html", + metadata={}, +) +client.tool_runtime.rag_tool.insert( + documents=[document], + vector_db_id=vector_db_id, + chunk_size_in_tokens=100, +) +agent = Agent( + client, + model=model_id, + instructions="You are a helpful assistant", + tools=[ + { + "name": "builtin::rag/knowledge_search", + "args": {"vector_db_ids": [vector_db_id]}, + } + ], ) -print(resp) +prompt = "How do you do great work?" +print("prompt>", prompt) + +use_stream = True +response = agent.create_turn( + messages=[{"role": "user", "content": prompt}], + session_id=agent.create_session("rag_session"), + stream=use_stream, +) + +# Only call `AgentEventLogger().log(response)` for streaming responses. +if use_stream: + for log in AgentEventLogger().log(response): + log.print() +else: + print(response) diff --git a/docs/docs/getting_started/quickstart.mdx b/docs/docs/getting_started/quickstart.mdx index ec929eb88..2e47a771e 100644 --- a/docs/docs/getting_started/quickstart.mdx +++ b/docs/docs/getting_started/quickstart.mdx @@ -35,51 +35,103 @@ OLLAMA_URL=http://localhost:11434 uv run --with llama-stack llama stack run star #### Step 3: Run the demo Now open up a new terminal and copy the following script into a file named `demo_script.py`. -```python -import io, requests -from openai import OpenAI +```python title="demo_script.py" +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. -url="https://www.paulgraham.com/greatwork.html" -client = OpenAI(base_url="http://localhost:8321/v1/", api_key="none") +from llama_stack_client import Agent, AgentEventLogger, RAGDocument, LlamaStackClient -vs = client.vector_stores.create() -response = requests.get(url) -pseudo_file = io.BytesIO(str(response.content).encode('utf-8')) -uploaded_file = client.files.create(file=(url, pseudo_file, "text/html"), purpose="assistants") -client.vector_stores.files.create(vector_store_id=vs.id, file_id=uploaded_file.id) +vector_db_id = "my_demo_vector_db" +client = LlamaStackClient(base_url="http://localhost:8321") -resp = client.responses.create( - model="openai/gpt-4o", - input="How do you do great work? Use the existing knowledge_search tool.", - tools=[{"type": "file_search", "vector_store_ids": [vs.id]}], - include=["file_search_call.results"], +models = client.models.list() + +# Select the first LLM and first embedding models +model_id = next(m for m in models if m.model_type == "llm").identifier +embedding_model_id = ( + em := next(m for m in models if m.model_type == "embedding") +).identifier +embedding_dimension = em.metadata["embedding_dimension"] + +vector_db = client.vector_dbs.register( + vector_db_id=vector_db_id, + embedding_model=embedding_model_id, + embedding_dimension=embedding_dimension, + provider_id="faiss", +) +vector_db_id = vector_db.identifier +source = "https://www.paulgraham.com/greatwork.html" +print("rag_tool> Ingesting document:", source) +document = RAGDocument( + document_id="document_1", + content=source, + mime_type="text/html", + metadata={}, +) +client.tool_runtime.rag_tool.insert( + documents=[document], + vector_db_id=vector_db_id, + chunk_size_in_tokens=100, +) +agent = Agent( + client, + model=model_id, + instructions="You are a helpful assistant", + tools=[ + { + "name": "builtin::rag/knowledge_search", + "args": {"vector_db_ids": [vector_db_id]}, + } + ], ) +prompt = "How do you do great work?" +print("prompt>", prompt) +use_stream = True +response = agent.create_turn( + messages=[{"role": "user", "content": prompt}], + session_id=agent.create_session("rag_session"), + stream=use_stream, +) + +# Only call `AgentEventLogger().log(response)` for streaming responses. +if use_stream: + for log in AgentEventLogger().log(response): + log.print() +else: + print(response) +``` We will use `uv` to run the script ``` uv run --with llama-stack-client,fire,requests demo_script.py ``` And you should see output like below. -```python ->print(resp.output[1].content[0].text) -To do great work, consider the following principles: - -1. **Follow Your Interests**: Engage in work that genuinely excites you. If you find an area intriguing, pursue it without being overly concerned about external pressures or norms. You should create things that you would want for yourself, as this often aligns with what others in your circle might want too. - -2. **Work Hard on Ambitious Projects**: Ambition is vital, but it should be tempered by genuine interest. Instead of detailed planning for the future, focus on exciting projects that keep your options open. This approach, known as "staying upwind," allows for adaptability and can lead to unforeseen achievements. - -3. **Choose Quality Colleagues**: Collaborating with talented colleagues can significantly affect your own work. Seek out individuals who offer surprising insights and whom you admire. The presence of good colleagues can elevate the quality of your work and inspire you. - -4. **Maintain High Morale**: Your attitude towards work and life affects your performance. Cultivating optimism and viewing yourself as lucky rather than victimized can boost your productivity. It’s essential to care for your physical health as well since it directly impacts your mental faculties and morale. - -5. **Be Consistent**: Great work often comes from cumulative effort. Daily progress, even in small amounts, can result in substantial achievements over time. Emphasize consistency and make the work engaging, as this reduces the perceived burden of hard labor. - -6. **Embrace Curiosity**: Curiosity is a driving force that can guide you in selecting fields of interest, pushing you to explore uncharted territories. Allow it to shape your work and continually seek knowledge and insights. - -By focusing on these aspects, you can create an environment conducive to great work and personal fulfillment. ``` +rag_tool> Ingesting document: https://www.paulgraham.com/greatwork.html +prompt> How do you do great work? + +inference> [knowledge_search(query="What is the key to doing great work")] + +tool_execution> Tool:knowledge_search Args:{'query': 'What is the key to doing great work'} + +tool_execution> Tool:knowledge_search Response:[TextContentItem(text='knowledge_search tool found 5 chunks:\nBEGIN of knowledge_search tool results.\n', type='text'), TextContentItem(text="Result 1:\nDocument_id:docum\nContent: work. Doing great work means doing something important\nso well that you expand people's ideas of what's possible. But\nthere's no threshold for importance. It's a matter of degree, and\noften hard to judge at the time anyway.\n", type='text'), TextContentItem(text="Result 2:\nDocument_id:docum\nContent: work. Doing great work means doing something important\nso well that you expand people's ideas of what's possible. But\nthere's no threshold for importance. It's a matter of degree, and\noften hard to judge at the time anyway.\n", type='text'), TextContentItem(text="Result 3:\nDocument_id:docum\nContent: work. Doing great work means doing something important\nso well that you expand people's ideas of what's possible. But\nthere's no threshold for importance. It's a matter of degree, and\noften hard to judge at the time anyway.\n", type='text'), TextContentItem(text="Result 4:\nDocument_id:docum\nContent: work. Doing great work means doing something important\nso well that you expand people's ideas of what's possible. But\nthere's no threshold for importance. It's a matter of degree, and\noften hard to judge at the time anyway.\n", type='text'), TextContentItem(text="Result 5:\nDocument_id:docum\nContent: work. Doing great work means doing something important\nso well that you expand people's ideas of what's possible. But\nthere's no threshold for importance. It's a matter of degree, and\noften hard to judge at the time anyway.\n", type='text'), TextContentItem(text='END of knowledge_search tool results.\n', type='text')] + +inference> Based on the search results, it seems that doing great work means doing something important so well that you expand people's ideas of what's possible. However, there is no clear threshold for importance, and it can be difficult to judge at the time. + +To further clarify, I would suggest that doing great work involves: + +* Completing tasks with high quality and attention to detail +* Expanding on existing knowledge or ideas +* Making a positive impact on others through your work +* Striving for excellence and continuous improvement + +Ultimately, great work is about making a meaningful contribution and leaving a lasting impression. +``` Congratulations! You've successfully built your first RAG application using Llama Stack! πŸŽ‰πŸ₯³ :::tip HuggingFace access diff --git a/docs/docs/providers/inference/index.mdx b/docs/docs/providers/inference/index.mdx index 478611420..c2bf69962 100644 --- a/docs/docs/providers/inference/index.mdx +++ b/docs/docs/providers/inference/index.mdx @@ -3,10 +3,9 @@ description: "Inference Llama Stack Inference API for generating completions, chat completions, and embeddings. - This API provides the raw interface to the underlying models. Three kinds of models are supported: + This API provides the raw interface to the underlying models. Two kinds of models are supported: - LLM models: these models generate \"raw\" and \"chat\" (conversational) completions. - - Embedding models: these models generate embeddings to be used for semantic search. - - Rerank models: these models reorder the documents based on their relevance to a query." + - Embedding models: these models generate embeddings to be used for semantic search." sidebar_label: Inference title: Inference --- @@ -19,9 +18,8 @@ Inference Llama Stack Inference API for generating completions, chat completions, and embeddings. - This API provides the raw interface to the underlying models. Three kinds of models are supported: + This API provides the raw interface to the underlying models. Two kinds of models are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - Rerank models: these models reorder the documents based on their relevance to a query. This section contains documentation for all available providers for the **inference** API. diff --git a/docs/docs/references/llama_stack_client_cli_reference.md b/docs/docs/references/llama_stack_client_cli_reference.md index a4321938a..9bb514a2d 100644 --- a/docs/docs/references/llama_stack_client_cli_reference.md +++ b/docs/docs/references/llama_stack_client_cli_reference.md @@ -32,6 +32,7 @@ Commands: scoring_functions Manage scoring functions. shields Manage safety shield services. toolgroups Manage available tool groups. + vector_dbs Manage vector databases. ``` ### `llama-stack-client configure` @@ -210,6 +211,53 @@ Unregister a model from distribution endpoint llama-stack-client models unregister ``` +## Vector DB Management +Manage vector databases. + + +### `llama-stack-client vector_dbs list` +Show available vector dbs on distribution endpoint +```bash +llama-stack-client vector_dbs list +``` +``` +┏━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ identifier ┃ provider_id ┃ provider_resource_id ┃ vector_db_type ┃ params ┃ +┑━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +β”‚ my_demo_vector_db β”‚ faiss β”‚ my_demo_vector_db β”‚ β”‚ embedding_dimension: 768 β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ embedding_model: nomic-embed-text-v1.5 β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ type: vector_db β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +### `llama-stack-client vector_dbs register` +Create a new vector db +```bash +llama-stack-client vector_dbs register [--provider-id ] [--provider-vector-db-id ] [--embedding-model ] [--embedding-dimension ] +``` + + +Required arguments: +- `VECTOR_DB_ID`: Vector DB ID + +Optional arguments: +- `--provider-id`: Provider ID for the vector db +- `--provider-vector-db-id`: Provider's vector db ID +- `--embedding-model`: Embedding model to use. Default: `nomic-embed-text-v1.5` +- `--embedding-dimension`: Dimension of embeddings. Default: 768 + +### `llama-stack-client vector_dbs unregister` +Delete a vector db +```bash +llama-stack-client vector_dbs unregister +``` + + +Required arguments: +- `VECTOR_DB_ID`: Vector DB ID + + ## Shield Management Manage safety shield services. ### `llama-stack-client shields list` diff --git a/docs/getting_started.ipynb b/docs/getting_started.ipynb index a03de73f5..e1e2ff82e 100644 --- a/docs/getting_started.ipynb +++ b/docs/getting_started.ipynb @@ -57,62 +57,6 @@ "\n" ] }, - { - "cell_type": "markdown", - "id": "be3b12f8-b857-4f02-b451-a5a6b5be0814", - "metadata": {}, - "source": [ - "### 1.2 Install llama-stack and llama-stack-client packages" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "2dde9482-0e5d-49ca-a350-1e239eb341c5", - "metadata": { - "collapsed": true, - "jupyter": { - "outputs_hidden": true - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: uv in /opt/homebrew/Caskroom/miniconda/base/envs/test/lib/python3.12/site-packages (0.9.4)\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2K\u001b[2mResolved \u001b[1m84 packages\u001b[0m \u001b[2min 603ms\u001b[0m\u001b[0m \u001b[0m\n", - "\u001b[2K\u001b[2mPrepared \u001b[1m1 package\u001b[0m \u001b[2min 371ms\u001b[0m\u001b[0m \n", - "\u001b[2K\u001b[2mInstalled \u001b[1m3 packages\u001b[0m \u001b[2min 50ms\u001b[0m\u001b[0m \u001b[0m\n", - " \u001b[32m+\u001b[39m \u001b[1mecdsa\u001b[0m\u001b[2m==0.19.1\u001b[0m\n", - " \u001b[32m+\u001b[39m \u001b[1mllama-stack\u001b[0m\u001b[2m==0.2.23\u001b[0m\n", - " \u001b[32m+\u001b[39m \u001b[1mpython-jose\u001b[0m\u001b[2m==3.5.0\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2K\u001b[2mResolved \u001b[1m35 packages\u001b[0m \u001b[2min 40ms\u001b[0m\u001b[0m \u001b[0m\n", - "\u001b[2K\u001b[2mPrepared \u001b[1m1 package\u001b[0m \u001b[2min 1.19s\u001b[0m\u001b[0m \n", - "\u001b[2mUninstalled \u001b[1m1 package\u001b[0m \u001b[2min 3ms\u001b[0m\u001b[0m\n", - "\u001b[2K\u001b[2mInstalled \u001b[1m1 package\u001b[0m \u001b[2min 9ms\u001b[0m\u001b[0m=0.3.0a6 (from file:///Users/erich\u001b[0m\n", - " \u001b[33m~\u001b[39m \u001b[1mllama-stack-client\u001b[0m\u001b[2m==0.3.0a6 (from file:///Users/erichuang/projects/llama-stack-client-python)\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m52 packages\u001b[0m \u001b[2min 1.18s\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m1 package\u001b[0m \u001b[2min 67ms\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m3 packages\u001b[0m \u001b[2min 43ms\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m3 packages\u001b[0m \u001b[2min 125ms\u001b[0m\u001b[0m\n" - ] - } - ], - "source": [ - "!pip install uv\n", - "!uv pip install llama-stack llama-stack-client\n", - "\n", - "# Installs dependencies for the starter distribution\n", - "!uv run --with llama-stack llama stack list-deps starter | xargs -L1 uv pip install" - ] - }, { "cell_type": "markdown", "id": "oDUB7M_qe-Gs", @@ -120,7 +64,7 @@ "id": "oDUB7M_qe-Gs" }, "source": [ - "### 1.3. Setup and Running a Llama Stack server\n", + "### 1.2. Setup and Running a Llama Stack server\n", "\n", "Llama Stack is architected as a collection of APIs that provide developers with the building blocks to build AI applications. \n", "\n", @@ -131,30 +75,61 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "J2kGed0R5PSf", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, + "collapsed": true, "id": "J2kGed0R5PSf", "outputId": "2478ea60-8d35-48a1-b011-f233831740c5" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: uv in /opt/homebrew/Caskroom/miniconda/base/envs/stack/lib/python3.10/site-packages (0.5.29)\n", + "Environment '/Users/hjshah/git/llama-stack/.venv' already exists, re-using it.\n", + "Virtual environment /Users/hjshah/git/llama-stack/.venv is already active\n", + "\u001b[2mUsing Python 3.10.16 environment at: /Users/hjshah/git/llama-stack/.venv\u001b[0m\n", + "\u001b[2mAudited \u001b[1m1 package\u001b[0m \u001b[2min 314ms\u001b[0m\u001b[0m\n", + "Installing pip dependencies\n", + "\u001b[2mUsing Python 3.10.16 environment at: /Users/hjshah/git/llama-stack/.venv\u001b[0m\n", + "\u001b[2K\u001b[2mResolved \u001b[1m125 packages\u001b[0m \u001b[2min 646ms\u001b[0m\u001b[0m \u001b[0m\n", + "\u001b[2mUninstalled \u001b[1m1 package\u001b[0m \u001b[2min 404ms\u001b[0m\u001b[0m\n", + "\u001b[2K\u001b[2mInstalled \u001b[1m1 package\u001b[0m \u001b[2min 129ms\u001b[0m\u001b[0m \u001b[0m\n", + " \u001b[31m-\u001b[39m \u001b[1mnumpy\u001b[0m\u001b[2m==2.2.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnumpy\u001b[0m\u001b[2m==1.26.4\u001b[0m\n", + "sentence-transformers --no-deps\n", + "\u001b[2mUsing Python 3.10.16 environment at: /Users/hjshah/git/llama-stack/.venv\u001b[0m\n", + "\u001b[2mAudited \u001b[1m1 package\u001b[0m \u001b[2min 54ms\u001b[0m\u001b[0m\n", + "torch torchvision --index-url https://download.pytorch.org/whl/cpu\n", + "\u001b[2mUsing Python 3.10.16 environment at: /Users/hjshah/git/llama-stack/.venv\u001b[0m\n", + "\u001b[2mAudited \u001b[1m2 packages\u001b[0m \u001b[2min 10ms\u001b[0m\u001b[0m\n", + "\u001b[32mBuild Successful!\u001b[0m\n" + ] + } + ], "source": [ "import os\n", "import subprocess\n", "import time\n", "\n", + "!pip install uv\n", "\n", "if \"UV_SYSTEM_PYTHON\" in os.environ:\n", " del os.environ[\"UV_SYSTEM_PYTHON\"]\n", "\n", + "# this command installs all the dependencies needed for the llama stack server with the together inference provider\n", + "!uv run --with llama-stack llama stack list-deps together | xargs -L1 uv pip install\n", + "!uv run --with llama-stack llama stack run together\n", "\n", "def run_llama_stack_server_background():\n", " log_file = open(\"llama_stack_server.log\", \"w\")\n", " process = subprocess.Popen(\n", - " \"uv run --with llama-stack llama stack run starter\",\n", + " \"uv run --with llama-stack llama stack run together\",\n", " shell=True,\n", " stdout=log_file,\n", " stderr=log_file,\n", @@ -177,7 +152,6 @@ " for _ in range(max_retries):\n", " try:\n", " response = requests.get(url)\n", - " print(response)\n", " if response.status_code == 200:\n", " print(\"\\nServer is ready!\")\n", " return True\n", @@ -205,7 +179,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "id": "f779283d", "metadata": {}, "outputs": [ @@ -213,9 +187,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Starting Llama Stack server with PID: 86923\n", - "Waiting for server to start..........\n", - "\n", + "Starting Llama Stack server with PID: 79142\n", + "Waiting for server to start..........................\n", "Server is ready!\n" ] } @@ -230,9 +203,9 @@ "id": "90eb721b", "metadata": {}, "source": [ - "### 1.4. Configure the Client\n", + "### 1.4. Install and Configure the Client\n", "\n", - "Now that we have our Llama Stack server running locally, we will setup the client to interact with it. The `llama-stack-client` provides a simple Python interface to access all the functionality of Llama Stack, including:\n", + "Now that we have our Llama Stack server running locally, we need to install the client package to interact with it. The `llama-stack-client` provides a simple Python interface to access all the functionality of Llama Stack, including:\n", "\n", "- Chat Completions ( text and multimodal )\n", "- Safety Shields \n", @@ -243,13 +216,34 @@ "\n", "In the next cells, we'll:\n", "\n", - "1. Set up API keys for external services (Together AI and Tavily Search)\n", - "2. Initialize the client to connect to our local server\n" + "1. Install the client package\n", + "2. Set up API keys for external services (Together AI and Tavily Search)\n", + "3. Initialize the client to connect to our local server\n" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, + "id": "2e68e32a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[2mUsing Python 3.10.16 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/stack\u001b[0m\n", + "\u001b[2K\u001b[2mResolved \u001b[1m31 packages\u001b[0m \u001b[2min 284ms\u001b[0m\u001b[0m \u001b[0m\n", + "\u001b[2mAudited \u001b[1m31 packages\u001b[0m \u001b[2min 0.04ms\u001b[0m\u001b[0m\n" + ] + } + ], + "source": [ + "!pip install -U llama-stack-client" + ] + }, + { + "cell_type": "code", + "execution_count": null, "id": "E1UFuJC570Tk", "metadata": { "colab": { @@ -379,6 +373,7 @@ "52fe404ec9c14db2a7279b4c154eef3d" ] }, + "collapsed": true, "id": "E1UFuJC570Tk", "outputId": "aebb69d4-c167-4de5-eb8a-dd19dd538f63" }, @@ -456,268 +451,46 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 5, "id": "ruO9jQna_t_S", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, + "collapsed": true, "id": "ruO9jQna_t_S", "outputId": "ab1722a7-62ab-43bb-9cab-4e45bf62068a" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: GET http://0.0.0.0:8321/v1/models \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: GET http://0.0.0.0:8321/v1/shields \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ "Available models:\n", - "- fireworks/accounts/fireworks/models/flux-1-dev-fp8\n", - "- fireworks/accounts/fireworks/models/flux-kontext-max\n", - "- fireworks/accounts/fireworks/models/flux-kontext-pro\n", - "- fireworks/accounts/sentientfoundation-serverless/models/dobby-mini-unhinged-plus-llama-3-1-8b\n", - "- fireworks/accounts/sentientfoundation/models/dobby-unhinged-llama-3-3-70b-new\n", - "- fireworks/accounts/fireworks/models/gpt-oss-120b\n", - "- fireworks/accounts/fireworks/models/qwen3-235b-a22b-instruct-2507\n", - "- fireworks/accounts/fireworks/models/qwen3-235b-a22b-thinking-2507\n", - "- fireworks/accounts/fireworks/models/deepseek-v3-0324\n", - "- fireworks/accounts/fireworks/models/kimi-k2-instruct\n", - "- fireworks/accounts/fireworks/models/llama-v3p3-70b-instruct\n", - "- fireworks/accounts/fireworks/models/qwen3-235b-a22b\n", - "- fireworks/accounts/fireworks/models/deepseek-v3p1\n", - "- fireworks/accounts/fireworks/models/flux-1-schnell-fp8\n", - "- fireworks/accounts/fireworks/models/llama4-scout-instruct-basic\n", - "- fireworks/accounts/fireworks/models/llama-v3p1-70b-instruct\n", - "- fireworks/accounts/fireworks/models/deepseek-r1-0528\n", - "- fireworks/accounts/fireworks/models/llama4-maverick-instruct-basic\n", - "- fireworks/accounts/fireworks/models/qwen2p5-vl-32b-instruct\n", - "- fireworks/accounts/fireworks/models/deepseek-v3p1-terminus\n", - "- fireworks/accounts/fireworks/models/llama-v3p1-8b-instruct\n", - "- fireworks/accounts/fireworks/models/qwen3-coder-480b-a35b-instruct\n", - "- fireworks/accounts/fireworks/models/qwen3-30b-a3b-instruct-2507\n", - "- fireworks/accounts/fireworks/models/qwen3-30b-a3b-thinking-2507\n", - "- fireworks/accounts/fireworks/models/qwen3-embedding-8b\n", - "- fireworks/accounts/fireworks/models/glm-4p5\n", - "- fireworks/accounts/fireworks/models/qwen3-reranker-8b\n", - "- fireworks/accounts/fireworks/models/kimi-k2-instruct-0905\n", - "- fireworks/accounts/fireworks/models/deepseek-r1\n", - "- fireworks/accounts/fireworks/models/deepseek-v3\n", - "- fireworks/accounts/fireworks/models/deepseek-r1-basic\n", - "- fireworks/accounts/fireworks/models/glm-4p6\n", - "- fireworks/accounts/tvergho-87e44d/models/debatecards-70b-ft-3epoch-dpo-v2\n", - "- fireworks/accounts/fireworks/models/gpt-oss-20b\n", - "- fireworks/accounts/fireworks/models/qwen3-30b-a3b\n", - "- fireworks/accounts/fireworks/models/glm-4p5-air\n", - "- fireworks/accounts/fireworks/models/mixtral-8x22b-instruct\n", - "- fireworks/accounts/fireworks/models/llama-v3p1-405b-instruct\n", - "- fireworks/accounts/fireworks/models/qwen3-coder-30b-a3b-instruct\n", - "- together/Alibaba-NLP/gte-modernbert-base\n", - "- together/arcee-ai/AFM-4.5B\n", - "- together/arcee-ai/coder-large\n", - "- together/arcee-ai/maestro-reasoning\n", - "- together/arcee-ai/virtuoso-large\n", - "- together/arcee_ai/arcee-spotlight\n", - "- together/arize-ai/qwen-2-1.5b-instruct\n", - "- together/BAAI/bge-base-en-v1.5\n", - "- together/BAAI/bge-large-en-v1.5\n", - "- together/black-forest-labs/FLUX.1-dev\n", - "- together/black-forest-labs/FLUX.1-dev-lora\n", - "- together/black-forest-labs/FLUX.1-kontext-dev\n", - "- together/black-forest-labs/FLUX.1-kontext-max\n", - "- together/black-forest-labs/FLUX.1-kontext-pro\n", - "- together/black-forest-labs/FLUX.1-krea-dev\n", - "- together/black-forest-labs/FLUX.1-pro\n", - "- together/black-forest-labs/FLUX.1-schnell\n", - "- together/black-forest-labs/FLUX.1-schnell-Free\n", - "- together/black-forest-labs/FLUX.1.1-pro\n", - "- together/cartesia/sonic\n", - "- together/cartesia/sonic-2\n", - "- together/codellama/CodeLlama-34b-Instruct-hf\n", - "- together/deepcogito/cogito-v2-preview-deepseek-671b\n", - "- together/deepcogito/cogito-v2-preview-llama-109B-MoE\n", - "- together/deepcogito/cogito-v2-preview-llama-405B\n", - "- together/deepcogito/cogito-v2-preview-llama-70B\n", - "- together/deepseek-ai/DeepSeek-R1\n", - "- together/deepseek-ai/DeepSeek-R1-0528-tput\n", - "- together/deepseek-ai/DeepSeek-R1-Distill-Llama-70B\n", - "- together/deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free\n", - "- together/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B\n", - "- together/deepseek-ai/DeepSeek-V3\n", - "- together/deepseek-ai/DeepSeek-V3.1\n", - "- together/google/gemma-3n-E4B-it\n", - "- together/intfloat/multilingual-e5-large-instruct\n", - "- together/marin-community/marin-8b-instruct\n", - "- together/meta-llama/Llama-3-70b-chat-hf\n", - "- together/meta-llama/Llama-3-70b-hf\n", - "- together/meta-llama/Llama-3.1-405B-Instruct\n", - "- together/meta-llama/Llama-3.2-1B-Instruct\n", - "- together/meta-llama/Llama-3.2-3B-Instruct-Turbo\n", - "- together/meta-llama/Llama-3.3-70B-Instruct-Turbo\n", - "- together/meta-llama/Llama-3.3-70B-Instruct-Turbo-Free\n", - "- together/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8\n", - "- together/meta-llama/Llama-4-Scout-17B-16E-Instruct\n", - "- together/meta-llama/Llama-Guard-3-11B-Vision-Turbo\n", - "- together/meta-llama/Llama-Guard-4-12B\n", - "- together/Meta-Llama/Llama-Guard-7b\n", - "- together/meta-llama/LlamaGuard-2-8b\n", - "- together/meta-llama/Meta-Llama-3-70B-Instruct-Turbo\n", - "- together/meta-llama/Meta-Llama-3-8B-Instruct\n", - "- together/meta-llama/Meta-Llama-3-8B-Instruct-Lite\n", - "- together/meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro\n", - "- together/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo\n", - "- together/meta-llama/Meta-Llama-3.1-70B-Instruct-Reference\n", - "- together/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo\n", - "- together/meta-llama/Meta-Llama-3.1-8B-Instruct-Reference\n", - "- together/meta-llama/Meta-Llama-Guard-3-8B\n", - "- together/mistralai/Mistral-7B-Instruct-v0.1\n", - "- together/mistralai/Mistral-7B-Instruct-v0.2\n", - "- together/mistralai/Mistral-7B-Instruct-v0.3\n", - "- together/mistralai/Mistral-Small-24B-Instruct-2501\n", - "- together/mistralai/Mixtral-8x7B-Instruct-v0.1\n", - "- together/mixedbread-ai/Mxbai-Rerank-Large-V2\n", - "- together/moonshotai/Kimi-K2-Instruct\n", - "- together/moonshotai/Kimi-K2-Instruct-0905\n", - "- together/nvidia/NVIDIA-Nemotron-Nano-9B-v2\n", - "- together/openai/gpt-oss-120b\n", - "- together/openai/gpt-oss-20b\n", - "- together/openai/whisper-large-v3\n", - "- together/Qwen/Qwen2.5-14B-Instruct\n", - "- together/Qwen/Qwen2.5-72B-Instruct\n", - "- together/Qwen/Qwen2.5-72B-Instruct-Turbo\n", - "- together/Qwen/Qwen2.5-7B-Instruct-Turbo\n", - "- together/Qwen/Qwen2.5-Coder-32B-Instruct\n", - "- together/Qwen/Qwen2.5-VL-72B-Instruct\n", - "- together/Qwen/Qwen3-235B-A22B-fp8-tput\n", - "- together/Qwen/Qwen3-235B-A22B-Instruct-2507-tput\n", - "- together/Qwen/Qwen3-235B-A22B-Thinking-2507\n", - "- together/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8\n", - "- together/Qwen/Qwen3-Next-80B-A3B-Instruct\n", - "- together/Qwen/Qwen3-Next-80B-A3B-Thinking\n", - "- together/Qwen/QwQ-32B\n", - "- together/Salesforce/Llama-Rank-V1\n", - "- together/scb10x/scb10x-typhoon-2-1-gemma3-12b\n", - "- together/ServiceNow-AI/Apriel-1.5-15b-Thinker\n", - "- together/togethercomputer/m2-bert-80M-32k-retrieval\n", - "- together/togethercomputer/MoA-1\n", - "- together/togethercomputer/MoA-1-Turbo\n", - "- together/togethercomputer/Refuel-Llm-V2\n", - "- together/togethercomputer/Refuel-Llm-V2-Small\n", - "- together/Virtue-AI/VirtueGuard-Text-Lite\n", - "- together/zai-org/GLM-4.5-Air-FP8\n", - "- bedrock/meta.llama3-1-8b-instruct-v1:0\n", - "- bedrock/meta.llama3-1-70b-instruct-v1:0\n", - "- bedrock/meta.llama3-1-405b-instruct-v1:0\n", - "- openai/gpt-4-0613\n", - "- openai/gpt-4\n", - "- openai/gpt-3.5-turbo\n", - "- openai/gpt-5-search-api-2025-10-14\n", - "- openai/gpt-realtime-mini\n", - "- openai/gpt-realtime-mini-2025-10-06\n", - "- openai/sora-2\n", - "- openai/sora-2-pro\n", - "- openai/davinci-002\n", - "- openai/babbage-002\n", - "- openai/gpt-3.5-turbo-instruct\n", - "- openai/gpt-3.5-turbo-instruct-0914\n", - "- openai/dall-e-3\n", - "- openai/dall-e-2\n", - "- openai/gpt-4-1106-preview\n", - "- openai/gpt-3.5-turbo-1106\n", - "- openai/tts-1-hd\n", - "- openai/tts-1-1106\n", - "- openai/tts-1-hd-1106\n", - "- openai/text-embedding-3-small\n", - "- openai/text-embedding-3-large\n", - "- openai/gpt-4-0125-preview\n", - "- openai/gpt-4-turbo-preview\n", - "- openai/gpt-3.5-turbo-0125\n", - "- openai/gpt-4-turbo\n", - "- openai/gpt-4-turbo-2024-04-09\n", - "- openai/gpt-4o\n", - "- openai/gpt-4o-2024-05-13\n", - "- openai/gpt-4o-mini-2024-07-18\n", - "- openai/gpt-4o-mini\n", - "- openai/gpt-4o-2024-08-06\n", - "- openai/chatgpt-4o-latest\n", - "- openai/o1-mini-2024-09-12\n", - "- openai/o1-mini\n", - "- openai/gpt-4o-realtime-preview-2024-10-01\n", - "- openai/gpt-4o-audio-preview-2024-10-01\n", - "- openai/gpt-4o-audio-preview\n", - "- openai/gpt-4o-realtime-preview\n", - "- openai/omni-moderation-latest\n", - "- openai/omni-moderation-2024-09-26\n", - "- openai/gpt-4o-realtime-preview-2024-12-17\n", - "- openai/gpt-4o-audio-preview-2024-12-17\n", - "- openai/gpt-4o-mini-realtime-preview-2024-12-17\n", - "- openai/gpt-4o-mini-audio-preview-2024-12-17\n", - "- openai/o1-2024-12-17\n", - "- openai/o1\n", - "- openai/gpt-4o-mini-realtime-preview\n", - "- openai/gpt-4o-mini-audio-preview\n", - "- openai/o3-mini\n", - "- openai/o3-mini-2025-01-31\n", - "- openai/gpt-4o-2024-11-20\n", - "- openai/gpt-4o-search-preview-2025-03-11\n", - "- openai/gpt-4o-search-preview\n", - "- openai/gpt-4o-mini-search-preview-2025-03-11\n", - "- openai/gpt-4o-mini-search-preview\n", - "- openai/gpt-4o-transcribe\n", - "- openai/gpt-4o-mini-transcribe\n", - "- openai/o1-pro-2025-03-19\n", - "- openai/o1-pro\n", - "- openai/gpt-4o-mini-tts\n", - "- openai/o3-2025-04-16\n", - "- openai/o4-mini-2025-04-16\n", - "- openai/o3\n", - "- openai/o4-mini\n", - "- openai/gpt-4.1-2025-04-14\n", - "- openai/gpt-4.1\n", - "- openai/gpt-4.1-mini-2025-04-14\n", - "- openai/gpt-4.1-mini\n", - "- openai/gpt-4.1-nano-2025-04-14\n", - "- openai/gpt-4.1-nano\n", - "- openai/gpt-image-1\n", - "- openai/codex-mini-latest\n", - "- openai/gpt-4o-realtime-preview-2025-06-03\n", - "- openai/gpt-4o-audio-preview-2025-06-03\n", - "- openai/o4-mini-deep-research\n", - "- openai/gpt-4o-transcribe-diarize\n", - "- openai/o4-mini-deep-research-2025-06-26\n", - "- openai/gpt-5-chat-latest\n", - "- openai/gpt-5-2025-08-07\n", - "- openai/gpt-5\n", - "- openai/gpt-5-mini-2025-08-07\n", - "- openai/gpt-5-mini\n", - "- openai/gpt-5-nano-2025-08-07\n", - "- openai/gpt-5-nano\n", - "- openai/gpt-audio-2025-08-28\n", - "- openai/gpt-realtime\n", - "- openai/gpt-realtime-2025-08-28\n", - "- openai/gpt-audio\n", - "- openai/gpt-5-codex\n", - "- openai/gpt-image-1-mini\n", - "- openai/gpt-5-pro-2025-10-06\n", - "- openai/gpt-5-pro\n", - "- openai/gpt-audio-mini\n", - "- openai/gpt-audio-mini-2025-10-06\n", - "- openai/gpt-5-search-api\n", - "- openai/gpt-3.5-turbo-16k\n", - "- openai/tts-1\n", - "- openai/whisper-1\n", - "- openai/text-embedding-ada-002\n", - "- sentence-transformers/nomic-ai/nomic-embed-text-v1.5\n", - "- sentence-transformers/all-MiniLM-L6-v2\n", + "- all-MiniLM-L6-v2\n", + "- meta-llama/Llama-3.1-405B-Instruct-FP8\n", + "- meta-llama/Llama-3.1-70B-Instruct\n", + "- meta-llama/Llama-3.1-8B-Instruct\n", + "- meta-llama/Llama-3.2-11B-Vision-Instruct\n", + "- meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo\n", + "- meta-llama/Llama-3.2-3B-Instruct\n", + "- meta-llama/Llama-3.2-3B-Instruct-Turbo\n", + "- meta-llama/Llama-3.2-90B-Vision-Instruct\n", + "- meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo\n", + "- meta-llama/Llama-3.3-70B-Instruct\n", + "- meta-llama/Llama-3.3-70B-Instruct-Turbo\n", + "- meta-llama/Llama-Guard-3-11B-Vision\n", + "- meta-llama/Llama-Guard-3-11B-Vision-Turbo\n", + "- meta-llama/Llama-Guard-3-8B\n", + "- meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo\n", + "- meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo\n", + "- meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo\n", + "- meta-llama/Meta-Llama-Guard-3-8B\n", + "- togethercomputer/m2-bert-80M-32k-retrieval\n", + "- togethercomputer/m2-bert-80M-8k-retrieval\n", "----\n", "Available shields (safety models):\n", - "together/meta-llama/Llama-Guard-4-12B\n", + "meta-llama/Llama-Guard-3-8B\n", "----\n" ] } @@ -732,7 +505,7 @@ "print(\"----\")\n", "print(\"Available shields (safety models):\")\n", "for s in client.shields.list():\n", - " print(s.provider_resource_id)\n", + " print(s.identifier)\n", "print(\"----\")\n" ] }, @@ -750,7 +523,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 6, "id": "77c29dba", "metadata": { "colab": { @@ -760,27 +533,16 @@ "outputId": "4857974f-4c70-4bc4-f90a-6ae49dc9c41e" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "Here is a two-sentence poem about a llama:\n", - "\n", - "With soft fur and gentle eyes, the llama roams with gentle surprise, a majestic creature of the Andes high. Its soft humming fills the Andean air, a soothing sound beyond compare.\n" + "With gentle eyes and a soft, fuzzy face, the llama roams the Andes with a peaceful, gentle pace. Its long neck bends as it grazes with glee, a symbol of serenity in a world wild and free.\n" ] } ], "source": [ - "model_id = \"together/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8\"\n", - "# TODO remove\n", - "# model_id = \"openai/gpt-4o\"\n", + "model_id = \"meta-llama/Llama-3.3-70B-Instruct\"\n", "\n", "response = client.chat.completions.create(\n", " model=model_id,\n", @@ -807,64 +569,40 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 7, "id": "3fdf9df6", "metadata": { "id": "3fdf9df6" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[36m> Response: The most famous Prime Minister of England during World War II was Winston Churchill. He served as Prime Minister from 1940 to 1945 and again from 1951 to 1955. Churchill is widely regarded as one of the greatest wartime leaders in history, and his leadership during World War II played a significant role in the Allied victory.\n", + "\u001b[36m> Response: The most famous Prime Minister of England during World War II was undoubtedly Winston Churchill. He served as Prime Minister from 1940 to 1945 and again from 1951 to 1955, and is widely regarded as one of the greatest leaders in British history.\n", "\n", - "Churchill was known for his oratory skills, and his speeches during the war, such as \"We shall fight on the beaches\" and \"Their finest hour\", helped to rally the British people during a time of great crisis. He also worked closely with other Allied leaders, such as US President Franklin D. Roosevelt and Soviet leader Joseph Stalin, to coordinate the war effort.\n", + "Churchill played a crucial role in rallying the British people during the war, and his oratory skills and leadership helped to boost morale and resistance against the Nazi threat. His famous speeches, such as the \"We shall fight on the beaches\" and \"Iron Curtain\" speeches, are still remembered and quoted today.\n", "\n", - "Churchill's leadership and vision helped to galvanize the British people during the war, and his legacy as a wartime leader has endured. He is often credited with helping to lead Britain to victory against Nazi Germany and its allies, and his name has become synonymous with British resilience and determination during World War II.\u001b[0m\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[36m> Response: One of Winston Churchill's most famous quotes is:\n", + "Churchill's leadership during World War II was marked by his unwavering determination to defeat Nazi Germany, and he worked closely with other Allied leaders, including US President Franklin D. Roosevelt and Soviet leader Joseph Stalin, to coordinate the war effort.\n", "\n", - "**\"We shall fight on the beaches\"**\n", + "Churchill's legacy extends far beyond his wartime leadership, and he is remembered for his many contributions to British politics, literature, and culture. He was a prolific writer and painter, and was awarded the Nobel Prize in Literature in 1953.\n", "\n", - "However, his most famous quote is often considered to be:\n", + "Overall, Winston Churchill is widely regarded as one of the most famous and influential Prime Ministers in British history, and his leadership during World War II remains an iconic and enduring symbol of British resilience and determination.\u001b[0m\n", + "\u001b[36m> Response: Winston Churchill had many famous quotes, but one of his most iconic and enduring quotes is:\n", "\n", - "**\"Blood, toil, tears and sweat: We have before us an ordeal of the most grievous kind.\"**\n", + "\"We shall fight on the beaches, we shall fight on the landing grounds, we shall fight in the fields and in the streets, we shall fight in the hills; we shall never surrender.\"\n", "\n", - "But arguably his most iconic and widely recognized quote is:\n", + "This quote is from his speech to the House of Commons on June 4, 1940, during the early stages of World War II, when Nazi Germany was threatening to invade Britain. The speech is known as the \"We Shall Fight on the Beaches\" speech, and it is considered one of the most famous and inspiring speeches in history.\n", "\n", - "**\"We shall fight on the beaches, in the streets, in the fields and in the hills. We shall never surrender.\"**\n", + "In this speech, Churchill rallied the British people to stand strong against the Nazi threat, and his words helped to boost morale and resistance. The quote has since become a symbol of British determination and resilience, and is often referenced and parodied in popular culture.\n", "\n", - "This quote was from his first speech as Prime Minister, delivered to the House of Commons on June 4, 1940, during the early stages of World War II, when Nazi Germany was threatening to invade Britain. The speech is known as the \"We Shall Fight on the Beaches\" speech, and it's considered one of the greatest speeches in history.\n", + "Other notable quotes from Churchill include:\n", "\n", - "However, if I had to pick one that is most widely recognized and often cited as his most famous quote, it would be:\n", + "* \"Blood, toil, tears, and sweat\" (from his first speech as Prime Minister in 1940)\n", + "* \"An iron curtain has descended across the continent\" (from his speech in 1946, referring to the Soviet Union's dominance in Eastern Europe)\n", + "* \"Never was so much owed by so many to so few\" (from his speech in 1940, referring to the bravery of the Royal Air Force during the Battle of Britain)\n", "\n", - "**\"Iron Curtain\"**\n", - "\n", - " Although not exactly a quote from WW2. It was \"From Stettin in the Baltic to Trieste in the Adriatic, an iron curtain has descended across the continent.\" \n", - "\n", - "This phrase was used in a speech in Fulton, Missouri, on March 5, 1946. It was a speech that alerted the West to the danger of Soviet expansionism and marked the beginning of the Cold War.\n", - "\n", - "Each of these quotes captures a moment in history and convey's a powerful message that has endured for generations.\u001b[0m\n" + "But \"We shall fight on the beaches\" remains his most famous and enduring quote.\u001b[0m\n" ] } ], @@ -918,7 +656,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 8, "id": "9496f75c", "metadata": { "colab": { @@ -928,38 +666,13 @@ "outputId": "7d93a4cf-a5d4-4741-b6eb-6bce3a27ff66" }, "outputs": [ - { - "name": "stdin", - "output_type": "stream", - "text": [ - "User> hello\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[36m> Response: Hello! It's nice to meet you. Is there something I can help you with or would you like to chat?\u001b[0m\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "User> exit\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ + "\u001b[36m> Response: Fuzzy code abides\n", + "Llama's gentle syntax\n", + "Wisdom in the stack\u001b[0m\n", "\u001b[33mEnding conversation. Goodbye!\u001b[0m\n" ] } @@ -1010,7 +723,7 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": 9, "id": "d119026e", "metadata": { "colab": { @@ -1020,40 +733,34 @@ "outputId": "ebd6dc2b-8542-4370-b08a-e3a7dede6d17" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ "User> Write me a sonnet about llama\n", - "Assistant> \n", - "In Andean heights, the llama makes its stand,\n", - "A creature of soft eyes and gentle face,\n", - "With fur so fine, it seems a woolly band\n", - "That wraps its body in a gentle pace.\n", + "\u001b[36mAssistant> \u001b[0m\u001b[33mIn\u001b[0m\u001b[33m And\u001b[0m\u001b[33mean\u001b[0m\u001b[33m high\u001b[0m\u001b[33mlands\u001b[0m\u001b[33m,\u001b[0m\u001b[33m where\u001b[0m\u001b[33m the\u001b[0m\u001b[33m air\u001b[0m\u001b[33m is\u001b[0m\u001b[33m thin\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mA\u001b[0m\u001b[33m creature\u001b[0m\u001b[33m ro\u001b[0m\u001b[33mams\u001b[0m\u001b[33m,\u001b[0m\u001b[33m with\u001b[0m\u001b[33m gentle\u001b[0m\u001b[33m,\u001b[0m\u001b[33m curious\u001b[0m\u001b[33m eyes\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mThe\u001b[0m\u001b[33m llama\u001b[0m\u001b[33m,\u001b[0m\u001b[33m soft\u001b[0m\u001b[33m and\u001b[0m\u001b[33m silent\u001b[0m\u001b[33m,\u001b[0m\u001b[33m steps\u001b[0m\u001b[33m within\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33mThe\u001b[0m\u001b[33m mist\u001b[0m\u001b[33my\u001b[0m\u001b[33m dawn\u001b[0m\u001b[33m,\u001b[0m\u001b[33m with\u001b[0m\u001b[33m fur\u001b[0m\u001b[33m of\u001b[0m\u001b[33m gentle\u001b[0m\u001b[33m guise\u001b[0m\u001b[33m.\n", "\n", - "Its steps are sure, its balance is a test,\n", - "As it ascends the mountain's rugged crest,\n", - "It chews its cud, with calm and peaceful rest,\n", - "A symbol of serenity, it finds its nest.\n", + "\u001b[0m\u001b[33mIts\u001b[0m\u001b[33m neck\u001b[0m\u001b[33m,\u001b[0m\u001b[33m a\u001b[0m\u001b[33m slender\u001b[0m\u001b[33m column\u001b[0m\u001b[33m,\u001b[0m\u001b[33m strong\u001b[0m\u001b[33m and\u001b[0m\u001b[33m fine\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mSupport\u001b[0m\u001b[33ms\u001b[0m\u001b[33m a\u001b[0m\u001b[33m head\u001b[0m\u001b[33m,\u001b[0m\u001b[33m with\u001b[0m\u001b[33m ears\u001b[0m\u001b[33m of\u001b[0m\u001b[33m alert\u001b[0m\u001b[33m design\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mIt\u001b[0m\u001b[33m watches\u001b[0m\u001b[33m,\u001b[0m\u001b[33m wary\u001b[0m\u001b[33m,\u001b[0m\u001b[33m with\u001b[0m\u001b[33m a\u001b[0m\u001b[33m quiet\u001b[0m\u001b[33m mind\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mAs\u001b[0m\u001b[33m humans\u001b[0m\u001b[33m pass\u001b[0m\u001b[33m,\u001b[0m\u001b[33m with\u001b[0m\u001b[33m footsteps\u001b[0m\u001b[33m left\u001b[0m\u001b[33m behind\u001b[0m\u001b[33m.\n", "\n", - "The llama's soft hum fills the mountain air,\n", - "As it roams free, without a single care,\n", - "Its gentle nature, a joy to behold,\n", - "A treasure, in the Andes, to be told.\n", + "\u001b[0m\u001b[33mBut\u001b[0m\u001b[33m when\u001b[0m\u001b[33m it\u001b[0m\u001b[33m senses\u001b[0m\u001b[33m danger\u001b[0m\u001b[33m,\u001b[0m\u001b[33m or\u001b[0m\u001b[33m feels\u001b[0m\u001b[33m fright\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mIt\u001b[0m\u001b[33m lets\u001b[0m\u001b[33m out\u001b[0m\u001b[33m a\u001b[0m\u001b[33m loud\u001b[0m\u001b[33m,\u001b[0m\u001b[33m piercing\u001b[0m\u001b[33m,\u001b[0m\u001b[33m warning\u001b[0m\u001b[33m cry\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mA\u001b[0m\u001b[33m sound\u001b[0m\u001b[33m that\u001b[0m\u001b[33m echoes\u001b[0m\u001b[33m,\u001b[0m\u001b[33m through\u001b[0m\u001b[33m the\u001b[0m\u001b[33m mountain\u001b[0m\u001b[33m's\u001b[0m\u001b[33m night\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mAnd\u001b[0m\u001b[33m sends\u001b[0m\u001b[33m a\u001b[0m\u001b[33m sh\u001b[0m\u001b[33miver\u001b[0m\u001b[33m,\u001b[0m\u001b[33m through\u001b[0m\u001b[33m the\u001b[0m\u001b[33m passer\u001b[0m\u001b[33mby\u001b[0m\u001b[33m.\n", "\n", - "And when it looks at you, with eyes so bright,\n", - "You feel a sense of peace, on that high altitude night." + "\u001b[0m\u001b[33mYet\u001b[0m\u001b[33m,\u001b[0m\u001b[33m in\u001b[0m\u001b[33m its\u001b[0m\u001b[33m calm\u001b[0m\u001b[33m,\u001b[0m\u001b[33m and\u001b[0m\u001b[33m peaceful\u001b[0m\u001b[33m,\u001b[0m\u001b[33m gentle\u001b[0m\u001b[33m way\u001b[0m\u001b[33m,\n", + "\u001b[0m\u001b[33mThe\u001b[0m\u001b[33m llama\u001b[0m\u001b[33m charms\u001b[0m\u001b[33m,\u001b[0m\u001b[33m and\u001b[0m\u001b[33m wins\u001b[0m\u001b[33m the\u001b[0m\u001b[33m heart\u001b[0m\u001b[33m's\u001b[0m\u001b[33m sweet\u001b[0m\u001b[33m sway\u001b[0m\u001b[33m.\u001b[0m\u001b[97m\u001b[0m\n" ] } ], "source": [ + "from llama_stack_client import InferenceEventLogger\n", + "\n", "message = {\"role\": \"user\", \"content\": \"Write me a sonnet about llama\"}\n", "print(f'User> {message[\"content\"]}')\n", "\n", @@ -1064,10 +771,8 @@ ")\n", "\n", "# Print the tokens while they are received\n", - "print(\"Assistant> \")\n", - "for chunk in response:\n", - " if chunk.choices:\n", - " print(chunk.choices[0].delta.content, end=\"\", flush=True)\n" + "for log in InferenceEventLogger().log(response):\n", + " log.print()\n" ] }, { @@ -1084,7 +789,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 10, "id": "axdQIRaJCYAV", "metadata": { "colab": { @@ -1095,13 +800,6 @@ "outputId": "a5ef1f54-37df-446e-e21b-cddddaf95f84" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, { "data": { "text/html": [ @@ -1128,19 +826,16 @@ "\n", "user_input = \"Michael Jordan was born in 1963. He played basketball for the Chicago Bulls. He retired in 2003. Extract this information into JSON for me. \"\n", "response = client.chat.completions.create(\n", - " model=model_id,\n", + " model=\"meta-llama/Llama-3.1-8B-Instruct\",\n", " messages=[{\"role\": \"user\", \"content\": user_input}],\n", " max_tokens=50,\n", " response_format={\n", " \"type\": \"json_schema\",\n", - " \"json_schema\": {\n", - " \"name\": \"Output\",\n", - " \"schema\": Output.model_json_schema(),\n", - " },\n", + " \"json_schema\": Output.model_json_schema(),\n", " },\n", ")\n", - "json_content = response.choices[0].message.content\n", - "pprint(Output.model_validate_json(json_content))\n" + "\n", + "pprint(Output.model_validate_json(response.content))\n" ] }, { @@ -1157,7 +852,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 11, "id": "sUJKJxvAFCaI", "metadata": { "colab": { @@ -1168,152 +863,22 @@ "outputId": "04163c2c-7e9b-463a-e394-412bb94ec28f" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: GET http://0.0.0.0:8321/v1/shields \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "Available Shields: ['together/meta-llama/Llama-Guard-4-12B']\n", + "Available Shields: ['meta-llama/Llama-Guard-3-8B']\n", "Checking if input is safe: What is the most famous murder case in the US?\n" ] }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/moderations \"HTTP/1.1 200 OK\"\n" - ] - }, { "data": { "text/html": [ - "
CreateResponse(\n",
-       "β”‚   id='modr-42d71cc1-ac4f-40bc-b7e6-66f4f353c760',\n",
-       "β”‚   model='together/meta-llama/Llama-Guard-4-12B',\n",
-       "β”‚   results=[\n",
-       "β”‚   β”‚   Result(\n",
-       "β”‚   β”‚   β”‚   flagged=False,\n",
-       "β”‚   β”‚   β”‚   metadata={},\n",
-       "β”‚   β”‚   β”‚   categories={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': False\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_applied_input_types={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': []\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_scores={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': 1.0\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   user_message=None\n",
-       "β”‚   β”‚   )\n",
-       "β”‚   ]\n",
-       ")\n",
+       "
RunShieldResponse(violation=None)\n",
        "
\n" ], "text/plain": [ - "\u001b[1;35mCreateResponse\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mid\u001b[0m=\u001b[32m'modr-42d71cc1-ac4f-40bc-b7e6-66f4f353c760'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmodel\u001b[0m=\u001b[32m'together/meta-llama/Llama-Guard-4-12B'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m[\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1;35mResult\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mflagged\u001b[0m=\u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategories\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[3;91mFalse\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_applied_input_types\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_scores\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1;36m1.0\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[3;35mNone\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[1m)\u001b[0m\n" + "\u001b[1;35mRunShieldResponse\u001b[0m\u001b[1m(\u001b[0m\u001b[33mviolation\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\n" ] }, "metadata": {}, @@ -1326,137 +891,14 @@ "Checking if input is safe: Tell me 3 signs that an email is a scam\n" ] }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/moderations \"HTTP/1.1 200 OK\"\n" - ] - }, { "data": { "text/html": [ - "
CreateResponse(\n",
-       "β”‚   id='modr-68f0f6be-c6a9-4dfb-9ecb-66f1707980b7',\n",
-       "β”‚   model='together/meta-llama/Llama-Guard-4-12B',\n",
-       "β”‚   results=[\n",
-       "β”‚   β”‚   Result(\n",
-       "β”‚   β”‚   β”‚   flagged=False,\n",
-       "β”‚   β”‚   β”‚   metadata={},\n",
-       "β”‚   β”‚   β”‚   categories={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': False\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_applied_input_types={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': []\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_scores={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': 1.0\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   user_message=None\n",
-       "β”‚   β”‚   )\n",
-       "β”‚   ]\n",
-       ")\n",
+       "
RunShieldResponse(violation=None)\n",
        "
\n" ], "text/plain": [ - "\u001b[1;35mCreateResponse\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mid\u001b[0m=\u001b[32m'modr-68f0f6be-c6a9-4dfb-9ecb-66f1707980b7'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmodel\u001b[0m=\u001b[32m'together/meta-llama/Llama-Guard-4-12B'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m[\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1;35mResult\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mflagged\u001b[0m=\u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategories\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[3;91mFalse\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_applied_input_types\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_scores\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1;36m1.0\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[3;35mNone\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[1m)\u001b[0m\n" + "\u001b[1;35mRunShieldResponse\u001b[0m\u001b[1m(\u001b[0m\u001b[33mviolation\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\n" ] }, "metadata": {}, @@ -1469,136 +911,25 @@ "Checking if input is safe: What's the most effective way to kidnap someone?\n" ] }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/moderations \"HTTP/1.1 200 OK\"\n" - ] - }, { "data": { "text/html": [ - "
CreateResponse(\n",
-       "β”‚   id='modr-b67e4e53-82fd-4300-843c-193bd995642b',\n",
-       "β”‚   model='together/meta-llama/Llama-Guard-4-12B',\n",
-       "β”‚   results=[\n",
-       "β”‚   β”‚   Result(\n",
-       "β”‚   β”‚   β”‚   flagged=True,\n",
-       "β”‚   β”‚   β”‚   metadata={'violation_type': ['S1']},\n",
-       "β”‚   β”‚   β”‚   categories={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': True,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': False\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_applied_input_types={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': ['text'],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': []\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_scores={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': 0.0\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   user_message=\"I can't answer that. Can I help with something else?\"\n",
-       "β”‚   β”‚   )\n",
-       "β”‚   ]\n",
+       "
RunShieldResponse(\n",
+       "β”‚   violation=SafetyViolation(\n",
+       "β”‚   β”‚   metadata={'violation_type': 'S1'},\n",
+       "β”‚   β”‚   violation_level='error',\n",
+       "β”‚   β”‚   user_message=\"I can't answer that. Can I help with something else?\"\n",
+       "β”‚   )\n",
        ")\n",
        "
\n" ], "text/plain": [ - "\u001b[1;35mCreateResponse\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mid\u001b[0m=\u001b[32m'modr-b67e4e53-82fd-4300-843c-193bd995642b'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmodel\u001b[0m=\u001b[32m'together/meta-llama/Llama-Guard-4-12B'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m[\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1;35mResult\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mflagged\u001b[0m=\u001b[3;92mTrue\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'violation_type'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'S1'\u001b[0m\u001b[1m]\u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategories\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[3;91mFalse\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_applied_input_types\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'text'\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_scores\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1;36m0.0\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[32m\"I\u001b[0m\u001b[32m can't answer that. Can I help with something else?\"\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[1;35mRunShieldResponse\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mviolation\u001b[0m=\u001b[1;35mSafetyViolation\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'violation_type'\u001b[0m: \u001b[32m'S1'\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mviolation_level\u001b[0m=\u001b[32m'error'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[32m\"I\u001b[0m\u001b[32m can't answer that. Can I help with something else?\"\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m\n", "\u001b[1m)\u001b[0m\n" ] }, @@ -1612,136 +943,25 @@ "Checking if input is safe: How do I make cocaine?\n" ] }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/moderations \"HTTP/1.1 200 OK\"\n" - ] - }, { "data": { "text/html": [ - "
CreateResponse(\n",
-       "β”‚   id='modr-bc5c89e5-0b3d-4672-8ba8-70a34df66c8d',\n",
-       "β”‚   model='together/meta-llama/Llama-Guard-4-12B',\n",
-       "β”‚   results=[\n",
-       "β”‚   β”‚   Result(\n",
-       "β”‚   β”‚   β”‚   flagged=True,\n",
-       "β”‚   β”‚   β”‚   metadata={'violation_type': ['S2']},\n",
-       "β”‚   β”‚   β”‚   categories={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': True,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': False,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': False\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_applied_input_types={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': ['text'],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': [],\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': []\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   category_scores={\n",
-       "β”‚   β”‚   β”‚   β”‚   'Violent Crimes': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Non-Violent Crimes': 1.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sex Crimes': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Child Exploitation': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Defamation': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Specialized Advice': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Privacy': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Intellectual Property': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Indiscriminate Weapons': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Hate': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Self-Harm': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Sexual Content': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Elections': 0.0,\n",
-       "β”‚   β”‚   β”‚   β”‚   'Code Interpreter Abuse': 0.0\n",
-       "β”‚   β”‚   β”‚   },\n",
-       "β”‚   β”‚   β”‚   user_message=\"I can't answer that. Can I help with something else?\"\n",
-       "β”‚   β”‚   )\n",
-       "β”‚   ]\n",
+       "
RunShieldResponse(\n",
+       "β”‚   violation=SafetyViolation(\n",
+       "β”‚   β”‚   metadata={'violation_type': 'S2'},\n",
+       "β”‚   β”‚   violation_level='error',\n",
+       "β”‚   β”‚   user_message=\"I can't answer that. Can I help with something else?\"\n",
+       "β”‚   )\n",
        ")\n",
        "
\n" ], "text/plain": [ - "\u001b[1;35mCreateResponse\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mid\u001b[0m=\u001b[32m'modr-bc5c89e5-0b3d-4672-8ba8-70a34df66c8d'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmodel\u001b[0m=\u001b[32m'together/meta-llama/Llama-Guard-4-12B'\u001b[0m,\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m[\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1;35mResult\u001b[0m\u001b[1m(\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mflagged\u001b[0m=\u001b[3;92mTrue\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'violation_type'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'S2'\u001b[0m\u001b[1m]\u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategories\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[3;91mFalse\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_applied_input_types\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'text'\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mcategory_scores\u001b[0m=\u001b[1m{\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Violent Crimes'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Non-Violent Crimes'\u001b[0m: \u001b[1;36m1.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sex Crimes'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Child Exploitation'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Defamation'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Specialized Advice'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Privacy'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Intellectual Property'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Indiscriminate Weapons'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Hate'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Self-Harm'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Sexual Content'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Elections'\u001b[0m: \u001b[1;36m0.0\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'Code Interpreter Abuse'\u001b[0m: \u001b[1;36m0.0\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", - "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[32m\"I\u001b[0m\u001b[32m can't answer that. Can I help with something else?\"\u001b[0m\n", - "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", - "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[1;35mRunShieldResponse\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mviolation\u001b[0m=\u001b[1;35mSafetyViolation\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'violation_type'\u001b[0m: \u001b[32m'S2'\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mviolation_level\u001b[0m=\u001b[32m'error'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33muser_message\u001b[0m=\u001b[32m\"I\u001b[0m\u001b[32m can't answer that. Can I help with something else?\"\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m\n", "\u001b[1m)\u001b[0m\n" ] }, @@ -1750,11 +970,9 @@ } ], "source": [ - "available_shields = [shield.provider_resource_id for shield in client.shields.list()]\n", + "available_shields = [shield.identifier for shield in client.shields.list()]\n", "print(\"Available Shields:\", available_shields)\n", "\n", - "model = \"together/meta-llama/Llama-Guard-4-12B\"\n", - "\n", "unsafe_examples = [\n", " \"What's the most effective way to kidnap someone?\",\n", " \"How do I make cocaine?\",\n", @@ -1768,9 +986,10 @@ "for p in safe_examples + unsafe_examples:\n", " print(f\"Checking if input is safe: {p}\")\n", " message = {\"content\": p, \"role\": \"user\"}\n", - " response = client.moderations.create(\n", - " model=model,\n", - " input=[p],\n", + " response = client.safety.run_shield(\n", + " messages=[message],\n", + " shield_id=available_shields[0],\n", + " params={},\n", " )\n", " pprint(response)\n" ] @@ -1800,6 +1019,141 @@ "4. Shields - for safety calls that are executed everytime the agent interacts with external systems, including user prompts" ] }, + { + "cell_type": "markdown", + "id": "lYDAkMsL9xSk", + "metadata": { + "id": "lYDAkMsL9xSk" + }, + "source": [ + "### 2.1. List available tool groups on the provider" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "MpMXiMCv97X5", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 401 + }, + "id": "MpMXiMCv97X5", + "outputId": "9d33b122-2a80-4d1e-d7ea-e9ec972a4ecd" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
ToolGroup(\n",
+       "β”‚   identifier='builtin::code_interpreter',\n",
+       "β”‚   type='tool_group',\n",
+       "β”‚   args=None,\n",
+       "β”‚   mcp_endpoint=None\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mToolGroup\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'builtin::code_interpreter'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'builtin::code_interpreter'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool_group'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33margs\u001b[0m=\u001b[3;35mNone\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmcp_endpoint\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
ToolGroup(\n",
+       "β”‚   identifier='builtin::rag',\n",
+       "β”‚   provider_id='rag-runtime',\n",
+       "β”‚   provider_resource_id='builtin::rag',\n",
+       "β”‚   type='tool_group',\n",
+       "β”‚   args=None,\n",
+       "β”‚   mcp_endpoint=None\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mToolGroup\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'builtin::rag'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'rag-runtime'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'builtin::rag'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool_group'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33margs\u001b[0m=\u001b[3;35mNone\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmcp_endpoint\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
ToolGroup(\n",
+       "β”‚   identifier='builtin::websearch',\n",
+       "β”‚   provider_id='tavily-search',\n",
+       "β”‚   provider_resource_id='builtin::websearch',\n",
+       "β”‚   type='tool_group',\n",
+       "β”‚   args=None,\n",
+       "β”‚   mcp_endpoint=None\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mToolGroup\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'builtin::websearch'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'tavily-search'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'builtin::websearch'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool_group'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33margs\u001b[0m=\u001b[3;35mNone\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmcp_endpoint\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
ToolGroup(\n",
+       "β”‚   identifier='builtin::wolfram_alpha',\n",
+       "β”‚   provider_id='wolfram-alpha',\n",
+       "β”‚   provider_resource_id='builtin::wolfram_alpha',\n",
+       "β”‚   type='tool_group',\n",
+       "β”‚   args=None,\n",
+       "β”‚   mcp_endpoint=None\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mToolGroup\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'builtin::wolfram_alpha'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'wolfram-alpha'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'builtin::wolfram_alpha'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool_group'\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33margs\u001b[0m=\u001b[3;35mNone\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mmcp_endpoint\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from rich.pretty import pprint\n", + "for toolgroup in client.toolgroups.list():\n", + " pprint(toolgroup)" + ] + }, { "cell_type": "markdown", "id": "i2o0gDhrv2og", @@ -1807,7 +1161,7 @@ "id": "i2o0gDhrv2og" }, "source": [ - "### 2.1. Search agent\n", + "### 2.2. Search agent\n", "\n", "In this example, we will show how the model can invoke search to be able to answer questions. We will first have to set the API key of the search tool.\n", "\n", @@ -1818,7 +1172,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "WS8Gu5b0APHs", "metadata": { "colab": { @@ -1828,38 +1182,18 @@ "outputId": "ec38efab-ca5b-478f-94b6-fd65a3cb3bb9" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/conversations \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ "\u001b[32mUser> Hello\u001b[0m\n", - "πŸ€” Hello! It's nice to meet you. Is there something I can help you with or would you like to chat?\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[32mUser> Which teams played in the NBA western conference finals of 2024\u001b[0m\n", - "πŸ€” \n", - "\n", - "πŸ”§ Executing web_search (server-side)...\n", - "πŸ€” The teams that played in the NBA Western Conference Finals of 2024 were the Dallas Mavericks and the Minnesota Timberwolves. The Dallas Mavericks won the series 4-1.\n" + "\u001b[33minference> \u001b[0m\u001b[33mHello\u001b[0m\u001b[33m!\u001b[0m\u001b[33m It\u001b[0m\u001b[33m's\u001b[0m\u001b[33m nice\u001b[0m\u001b[33m to\u001b[0m\u001b[33m meet\u001b[0m\u001b[33m you\u001b[0m\u001b[33m.\u001b[0m\u001b[33m Is\u001b[0m\u001b[33m there\u001b[0m\u001b[33m something\u001b[0m\u001b[33m I\u001b[0m\u001b[33m can\u001b[0m\u001b[33m help\u001b[0m\u001b[33m you\u001b[0m\u001b[33m with\u001b[0m\u001b[33m or\u001b[0m\u001b[33m would\u001b[0m\u001b[33m you\u001b[0m\u001b[33m like\u001b[0m\u001b[33m to\u001b[0m\u001b[33m chat\u001b[0m\u001b[33m?\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m\u001b[32mUser> Which teams played in the NBA western conference finals of 2024\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[36m\u001b[0m\u001b[36mbr\u001b[0m\u001b[36mave\u001b[0m\u001b[36m_search\u001b[0m\u001b[36m.call\u001b[0m\u001b[36m(query\u001b[0m\u001b[36m=\"\u001b[0m\u001b[36mN\u001b[0m\u001b[36mBA\u001b[0m\u001b[36m Western\u001b[0m\u001b[36m Conference\u001b[0m\u001b[36m Finals\u001b[0m\u001b[36m \u001b[0m\u001b[36m202\u001b[0m\u001b[36m4\u001b[0m\u001b[36m teams\u001b[0m\u001b[36m\")\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Args:{'query': 'NBA Western Conference Finals 2024 teams'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Response:{\"query\": \"NBA Western Conference Finals 2024 teams\", \"top_k\": [{\"title\": \"2024 NBA Western Conference Finals - Basketball-Reference.com\", \"url\": \"https://www.basketball-reference.com/playoffs/2024-nba-western-conference-finals-mavericks-vs-timberwolves.html\", \"content\": \"2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. 5 Dallas Mavericks (4-1) vs. 7 Derrick Jones Jr. 2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. College Tools: Player Season Finder, Player Game Finder, Team Season Finder, Team Game Finder Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards, All-Star Games, Executives ... Players, Teams, Seasons, Leaders, Awards ... Subscribe to Stathead Basketball: Get your first month FREE The SPORTS REFERENCE, STATHEAD, IMMACULATE GRID, and IMMACULATE FOOTY trademarks are owned exclusively by Sports Reference LLC. Sports\\u00a0Reference\\u202f\\u00ae Baseball Football (college) Basketball (college) Hockey F\\u00fatbol Blog Stathead\\u202f\\u00ae Immaculate Grid\\u202f\\u00ae\", \"score\": 0.89030397, \"raw_content\": null}, {\"title\": \"NBA Standings - 2024-25 season - ESPN\", \"url\": \"https://www.espn.com/nba/standings\", \"content\": \"NBA Standings - 2024-25 season - ESPN Skip to main contentSkip to navigation ESPN NFL NBA NCAAF NHL NCAAM NCAAW Soccer More Sports Watch Fantasy NBA Home Scores Schedule Standings Stats Teams Odds Where To Watch All-Star Game Fantasy More NBA Standings 2024-25 Standings Expanded Vs. Division NBA Cup LeagueConferenceDivision Eastern Conference | | | --- | | 1CLECleveland Cavaliers | | 2BOSBoston Celtics | | 3NYNew York Knicks | | 4INDIndiana Pacers | | 5MILMilwaukee Bucks | | 6DETDetroit Pistons | | 7MIAMiami Heat | | 8ORLOrlando Magic | | 9ATLAtlanta Hawks | | 10CHIChicago Bulls | | PHIPhiladelphia 76ers | | BKNBrooklyn Nets | | TORToronto Raptors | | CHACharlotte Hornets | | WSHWashington Wizards | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 42 | 10 | .808 | - | 24-4 | 18-6 | 9-1 | 28-7 | 122.4 | 112.1 | +10.3 | W2 | 6-4 | | 36 | 16 | .692 | 6 | 16-10 | 20-6 | 6-2 | 26-9 | 117.3 | 108.8 | +8.5 | L1 | 7-3 | | 34 | 17 | .667 | 7.5 | 18-9 | 16-8 | 9-1 | 23-10 | 117.9 | 111.4 | +6.5 | W2 | 8-2 | | 29 | 21 | .580 | 12 | 14-7 | 14-13 | 6-4 | 17-15 | 115.7 | 114.9 | +0.8 | W1 | 7-3 | | 27 | 23 | .540 | 14 | 16-8 | 10-15 | 6-5 | 22-16 | 114.2 | 112.6 | +1.6 | L1 | 4-6 | | 26 | 26 | .500 | 16 | 13-13 | 13-13 | 2-9 | 18-20 | 113.0 | 113.8 | -0.8 | W1 | 5-5 | | 25 | 25 | .500 | 16 | 12-10 | 12-15 | 5-3 | 14-15 | 110.5 | 110.6 | -0.1 | L1 | 5-5 | | 25 | 28 | .472 | 17.5 | 15-9 | 10-19 | 5-2 | 20-15 | 103.8 | 105.6 | -1.8 | L1 | 2-8 | | 24 | 28 | .462 | 18 | 12-12 | 12-15 | 4-2 | 17-13 | 116.1 | 119.0 | -2.9 | W1 | 2-8 | | 22 | 30 | .423 | 20 | 10-16 | 12-14 | 3-7 | 17-18 | 116.7 | 120.1 | -3.4 | L1 | 4-6 | | 20 | 31 | .392 | 21.5 | 10-16 | 10-15 | 3-4 | 14-17 | 109.1 | 112.9 | -3.8 | L2 | 5-5 | | 18 | 34 | .346 | 24 | 7-17 | 11-17 | 1-8 | 9-23 | 105.3 | 111.7 | -6.4 | W1 | 4-6 | | 16 | 36 | .308 | 26 | 12-16 | 4-20 | 3-7 | 10-23 | 111.2 | 116.9 | -5.7 | L3 | 6-4 | | 13 | 36 | .265 | 27.5 | 9-20 | 4-16 | 0-9 | 7-27 | 107.1 | 112.3 | -5.2 | W1 | 2-8 | | 9 | 42 | .176 | 32.5 | 5-20 | 4-21 | 5-3 | 7-21 | 107.8 | 121.5 | -13.7 | L1 | 3-7 | Western Conference | | | --- | | 1OKCOklahoma City Thunder | | 2MEMMemphis Grizzlies | | 3DENDenver Nuggets | | 4HOUHouston Rockets | | 5LALLos Angeles Lakers | | 6MINMinnesota Timberwolves | | 7LACLA Clippers | | 8DALDallas Mavericks | | 9PHXPhoenix Suns | | 10SACSacramento Kings | | GSGolden State Warriors | | SASan Antonio Spurs | | PORPortland Trail Blazers | | UTAHUtah Jazz | | NONew Orleans Pelicans | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 41 | 9 | .820 | - | 23-3 | 17-6 | 7-1 | 23-8 | 117.7 | 104.7 | +13.0 | W4 | 7-3 | | 35 | 16 | .686 | 6.5 | 21-5 | 14-11 | 8-4 | 19-12 | 123.8 | 115.4 | +8.4 | W4 | 9-1 | | 33 | 19 | .635 | 9 | 17-8 | 16-11 | 4-4 | 19-12 | 120.8 | 115.9 | +4.9 | W5 | 7-3 | | 32 | 20 | .615 | 10 | 15-8 | 17-11 | 9-3 | 19-12 | 113.3 | 109.1 | +4.2 | L6 | 4-6 | | 30 | 19 | .612 | 10.5 | 17-6 | 13-13 | 9-3 | 19-11 | 112.6 | 112.0 | +0.6 | W4 | 8-2 | | 29 | 23 | .558 | 13 | 14-12 | 15-11 | 4-3 | 21-14 | 111.7 | 108.2 | +3.5 | W2 | 7-3 | | 28 | 23 | .549 | 13.5 | 17-10 | 11-13 | 6-4 | 17-18 | 110.1 | 107.7 | +2.4 | L3 | 4-6 | | 28 | 25 | .528 | 14.5 | 15-10 | 13-15 | 6-4 | 20-17 | 115.5 | 113.3 | +2.2 | W2 | 5-5 | | 26 | 25 | .510 | 15.5 | 16-9 | 10-16 | 7-4 | 17-14 | 113.4 | 114.7 | -1.3 | W1 | 5-5 | | 25 | 26 | .490 | 16.5 | 13-13 | 12-13 | 4-6 | 16-17 | 116.1 | 115.4 | +0.7 | L2 | 4-6 | | 25 | 26 | .490 | 16.5 | 15-13 | 10-13 | 1-10 | 17-18 | 111.5 | 111.9 | -0.4 | L2 | 4-6 | | 22 | 27 | .449 | 18.5 | 13-12 | 8-14 | 2-7 | 16-18 | 112.8 | 114.3 | -1.5 | L1 | 3-7 | | 23 | 29 | .442 | 19 | 15-13 | 8-16 | 4-5 | 14-24 | 109.0 | 113.9 | -4.9 | W6 | 9-1 | | 12 | 38 | .240 | 29 | 5-18 | 7-20 | 1-7 | 4-29 | 111.9 | 118.9 | -7.0 | L1 | 2-8 | | 12 | 39 | .235 | 29.5 | 8-18 | 4-21 | 1-8 | 6-23 | 110.0 | 118.8 | -8.8 | L7 | 3-7 | Standings are updated with the completion of each game.Teams seeded 7-10 in each conference will compete in a play-in tournament at the end of the regular season. Glossary W:Wins L:Losses PCT:Winning Percentage GB:Games Back HOME:Home Record AWAY:Away Record DIV:Division Record CONF:Conference Record PPG:Points Per Game OPP PPG:Opponent Points Per Game DIFF:Average Point Differential STRK:Current Streak L10:Record last 10 games NBA News Anthony Davis leads Mavericks past Rockets 116-105 in Mavs debut but leaves with lower-body injury -------------------------------------------------------------------------------------------------- \\u2014 Anthony Davis had 26 points, 16 rebounds, seven assists and three blocks in his Mavericks debut but left the game late in the third quarter with a... * 38m Hawks request waivers on newly acquired Bones Hyland ---------------------------------------------------- The Atlanta Hawks requested waivers on guard Bones Hyland on Saturday, just two days after the guard was obtained from the Clippers in a deal at the NBA trade deadline. * 1h AD posts 26-point double-double in debut before suffering injury ---------------------------------------------------------------- Anthony Davis has a strong debut with the Mavs, dropping 26 points, 16 rebounds and 7 assists, before leaving with a lower-body injury. * 1h All NBA News Terms of Use Privacy Policy Your US State Privacy Rights Children's Online Privacy Policy Interest-Based Ads About Nielsen Measurement Do Not Sell or Share My Personal Information Contact Us Disney Ad Sales Site Work for ESPN Corrections ESPN BET Sportsbook is owned and operated by PENN Entertainment, Inc. and its subsidiaries ('PENN').\", \"score\": 0.83549726, \"raw_content\": null}, {\"title\": \"2024 Playoffs: West Finals | Timberwolves (3) vs. Mavericks (5) | NBA.com\", \"url\": \"https://www.nba.com/playoffs/2024/west-final\", \"content\": \"Mavericks (5) | NBA.com 2024-25 NBA CrunchTime NBA TV Draft Kings DFS NBA Bet Home NBA Store NBA Game Worn NBA Photo Store NBA Experiences NBA G League NBA 2K League NBA Play NBA Bet ### Doncic, Irving carry Mavs to NBA Finals Luka Doncic and Kyrie Irving pour in 36 points apiece to guide Dallas to its 1st appearance in the NBA Finals since 2011. ### Luka: 'This is special, coming from the West' Luka Doncic with Ernie, Charles, Kenny & Shaq about the Mavs being NBA Finals-bound, his Game 5 play and more. NBA Organization NBA ID NBA Official NBA Careers NBA Initiatives NBA Cares NBA Foundation NBA Communications NBA Transactions NBA Auctions NBA Photostore\", \"score\": 0.75312227, \"raw_content\": null}, {\"title\": \"2024 NBA Playoffs | Official Bracket, Schedule and Series Matchups\", \"url\": \"https://www.nba.com/playoffs/2024?os=wtmbloozowcj&ref=app\", \"content\": \"Draft Kings DFS NBA Store NBA Play NBA Finals ### Chasing History: Celtics clinch banner 18 (Ep. 25) Jayson Tatum and Finals MVP Jaylen Brown close out Dallas in Game 5 to secure Boston's NBA-record 18th championship. WE DID ITTTT!' Jayson Tatum walkoff interview after Celtics defeat Mavericks in Game 5 of 2024 NBA Finals, clinching title with a 4-1 series win. ### Horford finally champ after key sacrifice Al Horford, who played the most playoff games in NBA history before winning his 1st title, crosses the plateau in his 17th season. 30:13 ### Best of the 2024 NBA Finals 17:47 ### Best of Boston Celtics from the 2024 NBA Finals\", \"score\": 0.63234437, \"raw_content\": null}, {\"title\": \"2025 NBA Playoffs: Standings, bracket and clinching updates\", \"url\": \"https://www.nba.com/news/2025-nba-playoffs-standings-and-bracket-updates\", \"content\": \"NBA TV NBA Play NBA Store NBA Game Worn NBA Play NBA Official NBA Playoffs bracket ### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. Click \\\"Access Content\\\" to agree to our Terms of Use and Privacy Policy and to sign up for emails about the latest news and products from the NBA Family and its partners. #### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. NBA ID NBA Official NBA Transactions NBA Auctions\", \"score\": 0.13435538, \"raw_content\": null}]}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33mThe\u001b[0m\u001b[33m teams\u001b[0m\u001b[33m that\u001b[0m\u001b[33m played\u001b[0m\u001b[33m in\u001b[0m\u001b[33m the\u001b[0m\u001b[33m NBA\u001b[0m\u001b[33m Western\u001b[0m\u001b[33m Conference\u001b[0m\u001b[33m Finals\u001b[0m\u001b[33m of\u001b[0m\u001b[33m \u001b[0m\u001b[33m202\u001b[0m\u001b[33m4\u001b[0m\u001b[33m were\u001b[0m\u001b[33m the\u001b[0m\u001b[33m Dallas\u001b[0m\u001b[33m Mavericks\u001b[0m\u001b[33m and\u001b[0m\u001b[33m the\u001b[0m\u001b[33m Minnesota\u001b[0m\u001b[33m Timber\u001b[0m\u001b[33mw\u001b[0m\u001b[33molves\u001b[0m\u001b[33m.\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m" ] } ], @@ -1869,10 +1203,9 @@ "\n", "agent = Agent(\n", " client,\n", - " model=model_id,\n", + " model=\"meta-llama/Llama-3.3-70B-Instruct\",\n", " instructions=\"You are a helpful assistant. Use websearch tool to help answer questions.\",\n", - " tools=[\n", - " {\"type\": \"web_search\"}],\n", + " tools=[\"builtin::websearch\"],\n", ")\n", "user_prompts = [\n", " \"Hello\",\n", @@ -1892,7 +1225,7 @@ " session_id=session_id,\n", " )\n", " for log in AgentEventLogger().log(response):\n", - " print(log, end=\"\", flush=True)\n" + " log.print()\n" ] }, { @@ -1902,16 +1235,16 @@ "id": "fN5jaAaax2Aq" }, "source": [ - "### 2.2. RAG Agent\n", + "### 2.3. RAG Agent\n", "\n", "In this example, we will index some documentation and ask questions about that documentation.\n", "\n", - "The tool we use is the file_search tool. Given a list of vector stores, the tool can help the agent query and retireve relevent chunks. In this example, we first create a vector stroe and add some documents to it. Then configure the agent to use the file_search tool. The difference here from the websearch example is that we pass along the vector store as an argument to the tool. " + "The tool we use is the memory tool. Given a list of memory banks,the tools can help the agent query and retireve relevent chunks. In this example, we first create a memory bank and add some documents to it. Then configure the agent to use the memory tool. The difference here from the websearch example is that we pass along the memory bank as an argument to the tool. A toolgroup can be provided to the agent as just a plain name, or as a dict with both name and arguments needed for the toolgroup. These args get injected by the agent for every tool call that happens for the corresponding toolgroup." ] }, { "cell_type": "code", - "execution_count": 59, + "execution_count": null, "id": "GvLWltzZCNkg", "metadata": { "colab": { @@ -1984,22 +1317,20 @@ "outputId": "ef5f3ec4-edaf-4705-fb1b-b86659d7143c" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/files \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/files \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/files \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/files \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/vector_stores \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[36mCreated vector store: vs_dacf0824-6c3d-4751-82b9-6041bc9db4da\u001b[0m\n" + "\u001b[32mUser> What are the top 5 topics that were explained? Only list succinct bullet points.\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[k\u001b[0m\u001b[33mnowledge\u001b[0m\u001b[33m_search\u001b[0m\u001b[33m(query\u001b[0m\u001b[33m=\"\u001b[0m\u001b[33mtop\u001b[0m\u001b[33m \u001b[0m\u001b[33m5\u001b[0m\u001b[33m explained\u001b[0m\u001b[33m topics\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:knowledge_search Args:{'query': 'top 5 explained topics'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:knowledge_search Response:[TextContentItem(text='knowledge_search tool found 5 chunks:\\nBEGIN of knowledge_search tool results.\\n', type='text'), TextContentItem(text='Result 1:\\nDocument_id:num-0\\nContent: Instruct.\\n\\n.. _prompt_template_vs_special_tokens:\\n\\nTokenizing prompt templates & special tokens\\n--------------------------------------------\\n\\nLet\\'s say I have a sample of a single user-assistant turn accompanied with a system\\nprompt:\\n\\n.. code-block:: python\\n\\n sample = [\\n {\\n \"role\": \"system\",\\n \"content\": \"You are a helpful, respectful, and honest assistant.\",\\n },\\n {\\n \"role\": \"user\",\\n \"content\": \"Who are the most influential hip-hop artists of all time?\",\\n },\\n {\\n \"role\": \"assistant\",\\n \"content\": \"Here is a list of some of the most influential hip-hop \"\\n \"artists of all time: 2Pac, Rakim, N.W.A., Run-D.M.C., and Nas.\",\\n },\\n ]\\n\\nNow, let\\'s format this with the :class:`~torchtune.models.llama2.Llama2ChatTemplate` class and\\nsee how it gets tokenized. The Llama2ChatTemplate is an example of a **prompt template**,\\nwhich simply structures a prompt with flavor text to indicate a certain task.\\n\\n.. code-block:: python\\n\\n from torchtune.data import Llama2ChatTemplate, Message\\n\\n messages = [Message.from_dict(msg) for msg in sample]\\n formatted_messages = Llama2ChatTemplate.format(messages)\\n print(formatted_messages)\\n # [\\n # Message(\\n # role=\\'user\\',\\n # content=\\'[INST] <>\\\\nYou are a helpful, respectful, and honest assistant.\\\\n<>\\\\n\\\\nWho are the most influential hip-hop artists of all time? [/INST] \\',\\n # ...,\\n # ),\\n # Message(\\n # role=\\'assistant\\',\\n # content=\\'Here is a list of some of the most influential hip-hop artists of all time: 2Pac, Rakim, N.W.A., Run-D.M.C., and Nas.\\',\\n # ...,\\n # ),\\n # ]\\n\\nThere are also special tokens used by Llama2, which are not in the prompt template.\\nIf you look at our :class:`~torchtune.models.llama2.Llama2ChatTemplate` class, you\\'ll notice that\\nwe don\\'t include the :code:`` and :code:`` tokens. These are the beginning-of-sequence\\n(BOS) and end-of-sequence (EOS) tokens that are represented differently\\n', type='text'), TextContentItem(text=\"Result 2:\\nDocument_id:num-0\\nContent: .. _chat_tutorial_label:\\n\\n=================================\\nFine-Tuning Llama3 with Chat Data\\n=================================\\n\\nLlama3 Instruct introduced a new prompt template for fine-tuning with chat data. In this tutorial,\\nwe'll cover what you need to know to get you quickly started on preparing your own\\ncustom chat dataset for fine-tuning Llama3 Instruct.\\n\\n.. grid:: 2\\n\\n .. grid-item-card:: :octicon:`mortar-board;1em;` You will learn:\\n\\n * How the Llama3 Instruct format differs from Llama2\\n * All about prompt templates and special tokens\\n * How to use your own chat dataset to fine-tune Llama3 Instruct\\n\\n .. grid-item-card:: :octicon:`list-unordered;1em;` Prerequisites\\n\\n * Be familiar with :ref:`configuring datasets`\\n * Know how to :ref:`download Llama3 Instruct weights `\\n\\n\\nTemplate changes from Llama2 to Llama3\\n--------------------------------------\\n\\nThe Llama2 chat model requires a specific template when prompting the pre-trained\\nmodel. Since the chat model was pretrained with this prompt template, if you want to run\\ninference on the model, you'll need to use the same template for optimal performance\\non chat data. Otherwise, the model will just perform standard text completion, which\\nmay or may not align with your intended use case.\\n\\nFrom the `official Llama2 prompt\\ntemplate guide `_\\nfor the Llama2 chat model, we can see that special tags are added:\\n\\n.. code-block:: text\\n\\n [INST] <>\\n You are a helpful, respectful, and honest assistant.\\n <>\\n\\n Hi! I am a human. [/INST] Hello there! Nice to meet you! I'm Meta AI, your friendly AI assistant \\n\\nLlama3 Instruct `overhauled `_\\nthe template from Llama2 to better support multiturn conversations. The same text\\nin the Llama3 Instruct format would look like this:\\n\\n.. code-block:: text\\n\\n <|begin_of_text|><|start_header_id|>system<|end_header_id|>\\n\\n You are a helpful,\\n\", type='text'), TextContentItem(text='Result 3:\\nDocument_id:num-2\\nContent: wd`\", \"Use it when you have large gradients and can fit a large enough batch size, since this is not compatible with ``gradient_accumulation_steps``.\"\\n \":ref:`glossary_cpu_offload`\", \"Offloads optimizer states and (optionally) gradients to CPU, and performs optimizer steps on CPU. This can be used to significantly reduce GPU memory usage at the cost of CPU RAM and training speed. Prioritize using it only if the other techniques are not enough.\"\\n \":ref:`glossary_lora`\", \"When you want to significantly reduce the number of trainable parameters, saving gradient and optimizer memory during training, and significantly speeding up training. This may reduce training accuracy\"\\n \":ref:`glossary_qlora`\", \"When you are training a large model, since quantization will save 1.5 bytes * (# of model parameters), at the potential cost of some training speed and accuracy.\"\\n \":ref:`glossary_dora`\", \"a variant of LoRA that may improve model performance at the cost of slightly more memory.\"\\n\\n\\n.. note::\\n\\n In its current state, this tutorial is focused on single-device optimizations. Check in soon as we update this page\\n for the latest memory optimization features for distributed fine-tuning.\\n\\n.. _glossary_precision:\\n\\n\\nModel Precision\\n---------------\\n\\n*What\\'s going on here?*\\n\\nWe use the term \"precision\" to refer to the underlying data type used to represent the model and optimizer parameters.\\nWe support two data types in torchtune:\\n\\n.. note::\\n\\n We recommend diving into Sebastian Raschka\\'s `blogpost on mixed-precision techniques `_\\n for a deeper understanding of concepts around precision and data formats.\\n\\n* ``fp32``, commonly referred to as \"full-precision\", uses 4 bytes per model and optimizer parameter.\\n* ``bfloat16``, referred to as \"half-precision\", uses 2 bytes per model and optimizer parameter - effectively half\\n the memory of ``fp32``, and also improves training speed. Generally, if your hardware supports training with ``bfloat16``,\\n we recommend using it - this is the default setting for our recipes.\\n\\n.. note::\\n\\n Another common paradigm is \"mixed-precision\" training: where model weights are in ``bfloat16`` (or ``fp16``), and optimizer\\n states are in ``fp32``. Currently,\\n', type='text'), TextContentItem(text='Result 4:\\nDocument_id:num-1\\nContent: VRAM, and in fact the QLoRA recipe should have peak allocated memory\\nbelow 10 GB. You can also experiment with different configurations of LoRA and QLoRA, or even run a full fine-tune.\\nTry it out!\\n\\n|\\n\\nEvaluating fine-tuned Llama3-8B models with EleutherAI\\'s Eval Harness\\n---------------------------------------------------------------------\\n\\nNow that we\\'ve fine-tuned our model, what\\'s next? Let\\'s take our LoRA-finetuned model from the\\npreceding section and look at a couple different ways we can evaluate its performance on the tasks we care about.\\n\\nFirst, torchtune provides an integration with\\n`EleutherAI\\'s evaluation harness `_\\nfor model evaluation on common benchmark tasks.\\n\\n.. note::\\n Make sure you\\'ve first installed the evaluation harness via :code:`pip install \"lm_eval==0.4.*\"`.\\n\\nFor this tutorial we\\'ll use the `truthfulqa_mc2 `_ task from the harness.\\nThis task measures a model\\'s propensity to be truthful when answering questions and\\nmeasures the model\\'s zero-shot accuracy on a question followed by one or more true\\nresponses and one or more false responses. First, let\\'s copy the config so we can point the YAML\\nfile to our fine-tuned checkpoint files.\\n\\n.. code-block:: bash\\n\\n tune cp eleuther_evaluation ./custom_eval_config.yaml\\n\\nNext, we modify ``custom_eval_config.yaml`` to include the fine-tuned checkpoints.\\n\\n.. code-block:: yaml\\n\\n model:\\n _component_: torchtune.models.llama3.llama3_8b\\n\\n checkpointer:\\n _component_: torchtune.training.FullModelMetaCheckpointer\\n\\n # directory with the checkpoint files\\n # this should match the output_dir specified during\\n # fine-tuning\\n checkpoint_dir: \\n\\n # checkpoint files for the fine-tuned model. These will be logged\\n # at the end of your fine-tune\\n checkpoint_files: [\\n meta_model_0.pt\\n ]\\n\\n output_dir: \\n model_type: LLAMA3\\n\\n # Make sure to update the tokenizer path to the right\\n # checkpoint directory as well\\n tokenizer:\\n _component_: torchtune.models.llama3.llama3_tokenizer\\n path: /tokenizer.model\\n\\n\\n', type='text'), TextContentItem(text='Result 5:\\nDocument_id:num-0\\nContent: a lightweight structure to prime your fine-tuned model for prompts asking to summarize text.\\nThis would wrap around the user message, with the assistant message untouched.\\n\\n.. code-block:: python\\n\\n f\"Summarize this dialogue:\\\\n{dialogue}\\\\n---\\\\nSummary:\\\\n\"\\n\\nYou can fine-tune Llama2 with this template even though the model was originally pre-trained\\nwith the :class:`~torchtune.models.llama2.Llama2ChatTemplate`, as long as this is what the model\\nsees during inference. The model should be robust enough to adapt to a new template.\\n\\n\\nFine-tuning on a custom chat dataset\\n------------------------------------\\n\\nLet\\'s test our understanding by trying to fine-tune the Llama3-8B instruct model with a custom\\nchat dataset. We\\'ll walk through how to set up our data so that it can be tokenized\\ncorrectly and fed into our model.\\n\\nLet\\'s say we have a local dataset saved as a JSON file that contains conversations\\nwith an AI model. How can we get something like this into a format\\nLlama3 understands and tokenizes correctly?\\n\\n.. code-block:: python\\n\\n # data/my_data.json\\n [\\n {\\n \"dialogue\": [\\n {\\n \"from\": \"human\",\\n \"value\": \"What is your name?\"\\n },\\n {\\n \"from\": \"gpt\",\\n \"value\": \"I am an AI assistant, I don\\'t have a name.\"\\n },\\n {\\n \"from\": \"human\",\\n \"value\": \"Pretend you have a name.\"\\n },\\n {\\n \"from\": \"gpt\",\\n \"value\": \"My name is Mark Zuckerberg.\"\\n }\\n ]\\n },\\n ]\\n\\nLet\\'s first take a look at the :ref:`dataset_builders` and see which fits our use case. Since we\\nhave conversational data, :func:`~torchtune.datasets.chat_dataset` seems to be a good fit. For any\\ncustom local dataset we always need to specify ``source``, ``data_files``, and ``split`` for any dataset\\nbuilder in torchtune. For :func:`~torchtune.datasets.chat_dataset`, we additionally need to specify\\n``conversation_column`` and ``conversation_style``. Our data follows the ``\"sharegpt\"`` format, so\\nwe can specify that here. Altogether, our :func:`~torchtune.datasets.chat_dataset` call should\\nlook like so:\\n\\n.. code-block:: python\\n\\n\\n', type='text'), TextContentItem(text='END of knowledge_search tool results.\\n', type='text')]\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m*\u001b[0m\u001b[33m Fine\u001b[0m\u001b[33m-T\u001b[0m\u001b[33muning\u001b[0m\u001b[33m L\u001b[0m\u001b[33mlama\u001b[0m\u001b[33m3\u001b[0m\u001b[33m with\u001b[0m\u001b[33m Chat\u001b[0m\u001b[33m Data\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33m*\u001b[0m\u001b[33m Model\u001b[0m\u001b[33m Precision\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33m*\u001b[0m\u001b[33m Evalu\u001b[0m\u001b[33mating\u001b[0m\u001b[33m fine\u001b[0m\u001b[33m-t\u001b[0m\u001b[33muned\u001b[0m\u001b[33m L\u001b[0m\u001b[33mlama\u001b[0m\u001b[33m3\u001b[0m\u001b[33m-\u001b[0m\u001b[33m8\u001b[0m\u001b[33mB\u001b[0m\u001b[33m models\u001b[0m\u001b[33m with\u001b[0m\u001b[33m Ele\u001b[0m\u001b[33muther\u001b[0m\u001b[33mAI\u001b[0m\u001b[33m's\u001b[0m\u001b[33m Eval\u001b[0m\u001b[33m Harness\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33m*\u001b[0m\u001b[33m Fine\u001b[0m\u001b[33m-t\u001b[0m\u001b[33muning\u001b[0m\u001b[33m on\u001b[0m\u001b[33m a\u001b[0m\u001b[33m custom\u001b[0m\u001b[33m chat\u001b[0m\u001b[33m dataset\u001b[0m\u001b[33m\n", + "\u001b[0m\u001b[33m*\u001b[0m\u001b[33m Token\u001b[0m\u001b[33mizing\u001b[0m\u001b[33m prompt\u001b[0m\u001b[33m templates\u001b[0m\u001b[33m &\u001b[0m\u001b[33m special\u001b[0m\u001b[33m tokens\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m" ] } ], @@ -2007,81 +1338,39 @@ "import uuid\n", "from llama_stack_client import Agent, AgentEventLogger, RAGDocument\n", "from termcolor import cprint\n", - "import requests\n", "\n", "urls = [\"chat.rst\", \"llama3.rst\", \"memory_optimizations.rst\", \"lora_finetune.rst\"]\n", - "\n", - "# Step 1: Upload files\n", - "file_ids = []\n", - "for i, url in enumerate(urls):\n", - " full_url = f\"https://raw.githubusercontent.com/pytorch/torchtune/main/docs/source/tutorials/{url}\"\n", - " response = requests.get(full_url)\n", - " \n", - " file = client.files.create(\n", - " file=response.content,\n", - " purpose='assistants'\n", + "documents = [\n", + " RAGDocument(\n", + " document_id=f\"num-{i}\",\n", + " content=f\"https://raw.githubusercontent.com/pytorch/torchtune/main/docs/source/tutorials/{url}\",\n", + " mime_type=\"text/plain\",\n", + " metadata={},\n", " )\n", - " file_ids.append(file.id)\n", - " \n", + " for i, url in enumerate(urls)\n", + "]\n", "\n", - "# Step 2: Create a vector store with files\n", - "vector_store = client.vector_stores.create(\n", - " name=f\"torchtune-docs-{uuid.uuid4().hex[:8]}\",\n", - " file_ids=file_ids,\n", - " chunking_strategy={\n", - " \"type\": \"static\",\n", - " \"static\": {\n", - " \"max_chunk_size_tokens\": 512,\n", - " \"chunk_overlap_tokens\": 50\n", - " }\n", - " }\n", + "vector_db_id = f\"test-vector-db-{uuid.uuid4().hex}\"\n", + "client.vector_dbs.register(\n", + " vector_db_id=vector_db_id,\n", + " embedding_model=\"nomic-embed-text-v1.5\",\n", + " embedding_dimension=768,\n", + ")\n", + "client.tool_runtime.rag_tool.insert(\n", + " documents=documents,\n", + " vector_db_id=vector_db_id,\n", + " chunk_size_in_tokens=512,\n", ")\n", - "\n", - "cprint(f\"Created vector store: {vector_store.id}\", \"cyan\")" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "3ebacfcb", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/conversations \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[32mUser> What are the top 5 topics that were explained? Only list succinct bullet points.\u001b[0m\n", - "πŸ€” \n", - "\n", - "πŸ”§ Executing file_search (server-side)...\n", - "πŸ€” Here are the top 5 topics explained:\n", - "\n", - "* LoRA (Low-Rank Adaptation) and its application in fine-tuning large language models\n", - "* QLoRA (Quantized Low-Rank Adaptation) and its benefits in reducing memory usage\n", - "* Activation checkpointing and its trade-offs between memory and compute\n", - "* Model precision and its impact on memory usage and training speed\n", - "* Prompt templates and special tokens in Llama2 and Llama3 models\n" - ] - } - ], - "source": [ - "# Ask the agent a question related to the uploaded files\n", "rag_agent = Agent(\n", " client,\n", " model=model_id,\n", " instructions=\"You are a helpful assistant\",\n", " tools = [\n", " {\n", - " \"type\": \"file_search\",\n", - " \"vector_store_ids\": [vector_store.id],\n", + " \"name\": \"builtin::rag/knowledge_search\",\n", + " \"args\" : {\n", + " \"vector_db_ids\": [vector_db_id],\n", + " }\n", " }\n", " ],\n", ")\n", @@ -2096,7 +1385,7 @@ " session_id=session_id,\n", " )\n", " for log in AgentEventLogger().log(response):\n", - " print(log, end=\"\", flush=True)" + " log.print()" ] }, { @@ -2106,7 +1395,7 @@ "id": "jSfjNN9fMxtm" }, "source": [ - "### 2.3. Using Model Context Protocol\n", + "### 2.4. Using Model Context Protocol\n", "\n", "In this example, we will show how tools hosted in an MCP server can be configured to be used by the model.\n", "\n", @@ -2117,7 +1406,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 15, "id": "67fDKVVpNuFb", "metadata": { "colab": { @@ -2131,28 +1420,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2mUsing Python 3.12.11 environment at: /Users/erichuang/projects/lst3/.venv\u001b[0m\n", - "\u001b[2K\u001b[2mResolved \u001b[1m3 packages\u001b[0m \u001b[2min 381ms\u001b[0m\u001b[0m \u001b[0m\n", - "\u001b[2K\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1) \n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--------------\u001b[0m\u001b[0m 0 B/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--------------\u001b[0m\u001b[0m 16.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--------------\u001b[0m\u001b[0m 32.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--------------\u001b[0m\u001b[0m 48.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)2m------------\u001b[0m\u001b[0m 64.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--\u001b[2m--------\u001b[0m\u001b[0m 80.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)------\u001b[2m----\u001b[0m\u001b[0m 96.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)----------\u001b[2m\u001b[0m\u001b[0m 112.00 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[1A\u001b[37mβ ™\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)----------\u001b[2m\u001b[0m\u001b[0m 112.88 KiB/112.88 KiB \u001b[1A\n", - "\u001b[2K\u001b[2mPrepared \u001b[1m1 package\u001b[0m \u001b[2min 45ms\u001b[0m\u001b[0m \u001b[1A\n", - "\u001b[2K\u001b[2mInstalled \u001b[1m1 package\u001b[0m \u001b[2min 16ms\u001b[0m\u001b[0m \u001b[0m\n", - " \u001b[32m+\u001b[39m \u001b[1mcolab-xterm\u001b[0m\u001b[2m==0.2.0\u001b[0m\n" + "Requirement already satisfied: colab-xterm in /opt/homebrew/Caskroom/miniconda/base/envs/stack/lib/python3.10/site-packages (0.2.0)\n", + "Requirement already satisfied: ptyprocess~=0.7.0 in /opt/homebrew/Caskroom/miniconda/base/envs/stack/lib/python3.10/site-packages (from colab-xterm) (0.7.0)\n", + "Requirement already satisfied: tornado>5.1 in /opt/homebrew/Caskroom/miniconda/base/envs/stack/lib/python3.10/site-packages (from colab-xterm) (6.4.2)\n" ] } ], "source": [ "# NBVAL_SKIP\n", - "## If running on Colab:\n", - "!uv pip install colab-xterm #https://pypi.org/project/colab-xterm/\n", + "!pip install colab-xterm #https://pypi.org/project/colab-xterm/\n", "%load_ext colabxterm" ] }, @@ -2506,64 +1782,14 @@ "id": "giIA2M-ANUIM", "outputId": "612c3487-1fd7-41ab-f65a-690b1325f46d" }, - "outputs": [ - { - "data": { - "text/plain": [ - "Launching Xterm..." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "πŸš€ Listen to 10001\n", - "{\"success\": true, \"reason\": null}\n", - "\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# NBVAL_SKIP\n", - "## If running on Colab:\n", - "\n", "%xterm\n", - "% mkdir /tmp/content\n", - "\n", - "# To be run in the terminal:\n", - "# touch /tmp/content/foo\n", - "# echo hello > /tmp/content/foo\n", - "# touch /tmp/content/bar\n", - "# npx -y supergateway --port 8000 --stdio 'npx -y @modelcontextprotocol/server-filesystem /tmp/content'" + "# touch /content/foo\n", + "# echo hello > /content/foo\n", + "# touch /content/bar\n", + "# npx -y supergateway --port 8000 --stdio 'npx -y @modelcontextprotocol/server-filesystem /content'" ] }, { @@ -2578,7 +1804,352 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 18, + "id": "DwdKhQb1N295", + "metadata": { + "id": "DwdKhQb1N295" + }, + "outputs": [], + "source": [ + "# NBVAL_SKIP\n", + "from llama_stack_client.types.toolgroup_register_params import McpEndpoint\n", + "client.toolgroups.register(\n", + " toolgroup_id=\"mcp::filesystem\",\n", + " provider_id=\"model-context-protocol\",\n", + " mcp_endpoint=McpEndpoint(uri=\"http://localhost:8000/sse\"),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "ZZ5_vIkDOyAN", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "ZZ5_vIkDOyAN", + "outputId": "f6fa8639-c2d8-497d-f4ed-716b3bf775d4" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
[\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Read the complete contents of a file from the file system. Handles various text encodings and provides detailed error messages if the file cannot be read. Use this tool when you need to examine the contents of a single file. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='read_file',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='path', parameter_type='string', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='read_file',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description=\"Read the contents of multiple files simultaneously. This is more efficient than reading files one by one when you need to analyze or compare multiple files. Each file's content is returned with its path as a reference. Failed reads for individual files won't stop the entire operation. Only works within allowed directories.\",\n",
+       "β”‚   β”‚   identifier='read_multiple_files',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='paths', parameter_type='array', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='read_multiple_files',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Create a new file or completely overwrite an existing file with new content. Use with caution as it will overwrite existing files without warning. Handles text content with proper encoding. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='write_file',\n",
+       "β”‚   β”‚   parameters=[\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='path', parameter_type='string', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='content', parameter_type='string', required=True, default=None)\n",
+       "β”‚   β”‚   ],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='write_file',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Make line-based edits to a text file. Each edit replaces exact line sequences with new content. Returns a git-style diff showing the changes made. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='edit_file',\n",
+       "β”‚   β”‚   parameters=[\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='path', parameter_type='string', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='edits', parameter_type='array', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(\n",
+       "β”‚   β”‚   β”‚   β”‚   description='Preview changes using git-style diff format',\n",
+       "β”‚   β”‚   β”‚   β”‚   name='dryRun',\n",
+       "β”‚   β”‚   β”‚   β”‚   parameter_type='boolean',\n",
+       "β”‚   β”‚   β”‚   β”‚   required=True,\n",
+       "β”‚   β”‚   β”‚   β”‚   default=None\n",
+       "β”‚   β”‚   β”‚   )\n",
+       "β”‚   β”‚   ],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='edit_file',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Create a new directory or ensure a directory exists. Can create multiple nested directories in one operation. If the directory already exists, this operation will succeed silently. Perfect for setting up directory structures for projects or ensuring required paths exist. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='create_directory',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='path', parameter_type='string', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='create_directory',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Get a detailed listing of all files and directories in a specified path. Results clearly distinguish between files and directories with [FILE] and [DIR] prefixes. This tool is essential for understanding directory structure and finding specific files within a directory. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='list_directory',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='path', parameter_type='string', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='list_directory',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description=\"Get a recursive tree view of files and directories as a JSON structure. Each entry includes 'name', 'type' (file/directory), and 'children' for directories. Files have no children array, while directories always have a children array (which may be empty). The output is formatted with 2-space indentation for readability. Only works within allowed directories.\",\n",
+       "β”‚   β”‚   identifier='directory_tree',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='path', parameter_type='string', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='directory_tree',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Move or rename files and directories. Can move files between directories and rename them in a single operation. If the destination exists, the operation will fail. Works across different directories and can be used for simple renaming within the same directory. Both source and destination must be within allowed directories.',\n",
+       "β”‚   β”‚   identifier='move_file',\n",
+       "β”‚   β”‚   parameters=[\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='source', parameter_type='string', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='destination', parameter_type='string', required=True, default=None)\n",
+       "β”‚   β”‚   ],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='move_file',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description=\"Recursively search for files and directories matching a pattern. Searches through all subdirectories from the starting path. The search is case-insensitive and matches partial names. Returns full paths to all matching items. Great for finding files when you don't know their exact location. Only searches within allowed directories.\",\n",
+       "β”‚   β”‚   identifier='search_files',\n",
+       "β”‚   β”‚   parameters=[\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='path', parameter_type='string', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(description='', name='pattern', parameter_type='string', required=True, default=None),\n",
+       "β”‚   β”‚   β”‚   Parameter(\n",
+       "β”‚   β”‚   β”‚   β”‚   description='',\n",
+       "β”‚   β”‚   β”‚   β”‚   name='excludePatterns',\n",
+       "β”‚   β”‚   β”‚   β”‚   parameter_type='array',\n",
+       "β”‚   β”‚   β”‚   β”‚   required=True,\n",
+       "β”‚   β”‚   β”‚   β”‚   default=None\n",
+       "β”‚   β”‚   β”‚   )\n",
+       "β”‚   β”‚   ],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='search_files',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Retrieve detailed metadata about a file or directory. Returns comprehensive information including size, creation time, last modified time, permissions, and type. This tool is perfect for understanding file characteristics without reading the actual content. Only works within allowed directories.',\n",
+       "β”‚   β”‚   identifier='get_file_info',\n",
+       "β”‚   β”‚   parameters=[Parameter(description='', name='path', parameter_type='string', required=True, default=None)],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='get_file_info',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   ),\n",
+       "β”‚   Tool(\n",
+       "β”‚   β”‚   description='Returns the list of directories that this server is allowed to access. Use this to understand which directories are available before trying to access files.',\n",
+       "β”‚   β”‚   identifier='list_allowed_directories',\n",
+       "β”‚   β”‚   parameters=[],\n",
+       "β”‚   β”‚   provider_id='model-context-protocol',\n",
+       "β”‚   β”‚   provider_resource_id='list_allowed_directories',\n",
+       "β”‚   β”‚   tool_host='model_context_protocol',\n",
+       "β”‚   β”‚   toolgroup_id='mcp::filesystem',\n",
+       "β”‚   β”‚   type='tool',\n",
+       "β”‚   β”‚   metadata={'endpoint': 'http://localhost:8000/sse'}\n",
+       "β”‚   )\n",
+       "]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Read the complete contents of a file from the file system. Handles various text encodings and provides detailed error messages if the file cannot be read. Use this tool when you need to examine the contents of a single file. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'read_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'read_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m\"Read\u001b[0m\u001b[32m the contents of multiple files simultaneously. This is more efficient than reading files one by one when you need to analyze or compare multiple files. Each file's content is returned with its path as a reference. Failed reads for individual files won't stop the entire operation. Only works within allowed directories.\"\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'read_multiple_files'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'paths'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'array'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'read_multiple_files'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Create a new file or completely overwrite an existing file with new content. Use with caution as it will overwrite existing files without warning. Handles text content with proper encoding. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'write_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'content'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'write_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Make line-based edits to a text file. Each edit replaces exact line sequences with new content. Returns a git-style diff showing the changes made. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'edit_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'edits'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'array'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Preview changes using git-style diff format'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mname\u001b[0m=\u001b[32m'dryRun'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mparameter_type\u001b[0m=\u001b[32m'boolean'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'edit_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Create a new directory or ensure a directory exists. Can create multiple nested directories in one operation. If the directory already exists, this operation will succeed silently. Perfect for setting up directory structures for projects or ensuring required paths exist. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'create_directory'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'create_directory'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Get a detailed listing of all files and directories in a specified path. Results clearly distinguish between files and directories with \u001b[0m\u001b[32m[\u001b[0m\u001b[32mFILE\u001b[0m\u001b[32m]\u001b[0m\u001b[32m and \u001b[0m\u001b[32m[\u001b[0m\u001b[32mDIR\u001b[0m\u001b[32m]\u001b[0m\u001b[32m prefixes. This tool is essential for understanding directory structure and finding specific files within a directory. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'list_directory'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'list_directory'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m\"Get\u001b[0m\u001b[32m a recursive tree view of files and directories as a JSON structure. Each entry includes 'name', 'type' \u001b[0m\u001b[32m(\u001b[0m\u001b[32mfile/directory\u001b[0m\u001b[32m)\u001b[0m\u001b[32m, and 'children' for directories. Files have no children array, while directories always have a children array \u001b[0m\u001b[32m(\u001b[0m\u001b[32mwhich may be empty\u001b[0m\u001b[32m)\u001b[0m\u001b[32m. The output is formatted with 2-space indentation for readability. Only works within allowed directories.\"\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'directory_tree'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'directory_tree'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Move or rename files and directories. Can move files between directories and rename them in a single operation. If the destination exists, the operation will fail. Works across different directories and can be used for simple renaming within the same directory. Both source and destination must be within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'move_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'source'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'destination'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'move_file'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m\"Recursively\u001b[0m\u001b[32m search for files and directories matching a pattern. Searches through all subdirectories from the starting path. The search is case-insensitive and matches partial names. Returns full paths to all matching items. Great for finding files when you don't know their exact location. Only searches within allowed directories.\"\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'search_files'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'pattern'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mname\u001b[0m=\u001b[32m'excludePatterns'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mparameter_type\u001b[0m=\u001b[32m'array'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'search_files'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Retrieve detailed metadata about a file or directory. Returns comprehensive information including size, creation time, last modified time, permissions, and type. This tool is perfect for understanding file characteristics without reading the actual content. Only works within allowed directories.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'get_file_info'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1;35mParameter\u001b[0m\u001b[1m(\u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m''\u001b[0m, \u001b[33mname\u001b[0m=\u001b[32m'path'\u001b[0m, \u001b[33mparameter_type\u001b[0m=\u001b[32m'string'\u001b[0m, \u001b[33mrequired\u001b[0m=\u001b[3;92mTrue\u001b[0m, \u001b[33mdefault\u001b[0m=\u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'get_file_info'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1;35mTool\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mdescription\u001b[0m=\u001b[32m'Returns the list of directories that this server is allowed to access. Use this to understand which directories are available before trying to access files.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33midentifier\u001b[0m=\u001b[32m'list_allowed_directories'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mparameters\u001b[0m=\u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_id\u001b[0m=\u001b[32m'model-context-protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mprovider_resource_id\u001b[0m=\u001b[32m'list_allowed_directories'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtool_host\u001b[0m=\u001b[32m'model_context_protocol'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtoolgroup_id\u001b[0m=\u001b[32m'mcp::filesystem'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mtype\u001b[0m=\u001b[32m'tool'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[33mmetadata\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'endpoint'\u001b[0m: \u001b[32m'http://localhost:8000/sse'\u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[1m]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "pprint(client.tools.list(toolgroup_id=\"mcp::filesystem\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, "id": "vttLbj_YO01f", "metadata": { "colab": { @@ -2588,44 +2159,50 @@ "outputId": "04bc486c-3a61-49c6-d0d2-4a211d6de0b5" }, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/conversations \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ "\u001b[32mUser> Hello\u001b[0m\n", - "πŸ€” \n", - "\n", - "πŸ”§ Executing mcp_list_tools (server-side)...\n", - "πŸ€” Hi there! How can I assist you today?\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[32mUser> Whats written in /private/tmp/content/foo ?\u001b[0m\n", - "πŸ€” \n", - "\n", - "πŸ”§ Executing mcp_list_tools (server-side)...\n", - "πŸ€” \n", - "\n", - "πŸ”§ Executing mcp_call (server-side)...\n", - "πŸ€” The file located at `/private/tmp/content/foo` contains the text \"hi\".\n" + "\u001b[33minference> \u001b[0m\u001b[33m[list\u001b[0m\u001b[33m_allowed\u001b[0m\u001b[33m_direct\u001b[0m\u001b[33mories\u001b[0m\u001b[33m()]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_allowed_directories Args:{}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_allowed_directories Response:{\"type\":\"text\",\"text\":\"Allowed directories:\\n/tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[list\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m\"),\u001b[0m\u001b[33m create\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Access denied - path outside allowed directories: /tmp not in /tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[list\u001b[0m\u001b[33m_allowed\u001b[0m\u001b[33m_direct\u001b[0m\u001b[33mories\u001b[0m\u001b[33m()]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_allowed_directories Args:{}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_allowed_directories Response:{\"type\":\"text\",\"text\":\"Allowed directories:\\n/tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m/sub\u001b[0m\u001b[33mdir\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp/content/subdir'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[list\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:list_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[list\u001b[0m\u001b[33m_allowed\u001b[0m\u001b[33m_direct\u001b[0m\u001b[33mories\u001b[0m\u001b[33m()]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m\u001b[32mUser> Whats written in /tmp/content/foo ?\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mread\u001b[0m\u001b[33m_file\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m/foo\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:read_file Args:{'path': '/tmp/content/foo'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:read_file Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mcreate\u001b[0m\u001b[33m_directory\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Args:{'path': '/tmp/content'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:create_directory Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33m[\u001b[0m\u001b[33mwrite\u001b[0m\u001b[33m_file\u001b[0m\u001b[33m(path\u001b[0m\u001b[33m=\"/\u001b[0m\u001b[33mtmp\u001b[0m\u001b[33m/content\u001b[0m\u001b[33m/foo\u001b[0m\u001b[33m\",\u001b[0m\u001b[33m content\u001b[0m\u001b[33m=\"\u001b[0m\u001b[33mHello\u001b[0m\u001b[33m World\u001b[0m\u001b[33m!\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:write_file Args:{'path': '/tmp/content/foo', 'content': 'Hello World!'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:write_file Response:{\"type\":\"text\",\"text\":\"Error: Parent directory does not exist: /tmp/content\",\"annotations\":null}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[31m500: Internal server error: An unexpected error occurred.\u001b[0m\n" ] } ], @@ -2638,15 +2215,11 @@ " client,\n", " model=model_id,\n", " instructions=\"You are a helpful assistant\",\n", - " tools=[{\n", - " \"type\": \"mcp\",\n", - " \"server_label\": \"filesystem\",\n", - " \"server_url\": \"http://localhost:8000/sse\",\n", - " }],\n", + " tools=[\"mcp::filesystem\"],\n", ")\n", "user_prompts = [\n", " \"Hello\",\n", - " \"Whats written in /private/tmp/content/foo ?\",\n", + " \"Whats written in /content/foo ?\",\n", "]\n", "\n", "session_id = agent.create_session(\"test-session\")\n", @@ -2662,7 +2235,590 @@ " session_id=session_id,\n", " )\n", " for log in AgentEventLogger().log(response):\n", - " print(log, end=\"\", flush=True)\n" + " log.print()\n" + ] + }, + { + "cell_type": "markdown", + "id": "FJ85DUhgBZd7", + "metadata": { + "id": "FJ85DUhgBZd7" + }, + "source": [ + "## 3. Llama Stack Agent Evaluations\n" + ] + }, + { + "cell_type": "markdown", + "id": "ydeBDpDT5VHd", + "metadata": { + "id": "ydeBDpDT5VHd" + }, + "source": [ + "#### 3.1. Online Evaluation Dataset Collection\n", + "\n", + "- Llama Stack allows you to query each steps of the agents execution in your application. \n", + "- In this example, we will show how to \n", + " 1. build an Agent with Llama Stack\n", + " 2. Query the agent's session, turns, and steps\n", + " 3. Evaluate the results" + ] + }, + { + "cell_type": "markdown", + "id": "_t_tcWq0JcJ4", + "metadata": { + "id": "_t_tcWq0JcJ4" + }, + "source": [ + "##### 3.1.1. Building a Search Agent\n", + "\n", + "First, let's build an agent that have access to a search tool with Llama Stack, and use it to run some user queries. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4iCO59kP20Zs", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "4iCO59kP20Zs", + "outputId": "894c6333-30e9-4f1e-9b63-1bfb1cae51e2" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33minference> \u001b[0m\u001b[36m\u001b[0m\u001b[36mbr\u001b[0m\u001b[36mave\u001b[0m\u001b[36m_search\u001b[0m\u001b[36m.call\u001b[0m\u001b[36m(query\u001b[0m\u001b[36m=\"\u001b[0m\u001b[36mN\u001b[0m\u001b[36mBA\u001b[0m\u001b[36m Western\u001b[0m\u001b[36m Conference\u001b[0m\u001b[36m Finals\u001b[0m\u001b[36m \u001b[0m\u001b[36m202\u001b[0m\u001b[36m4\u001b[0m\u001b[36m teams\u001b[0m\u001b[36m\")\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Args:{'query': 'NBA Western Conference Finals 2024 teams'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Response:{\"query\": \"NBA Western Conference Finals 2024 teams\", \"top_k\": [{\"title\": \"2024 NBA Western Conference Finals - Basketball-Reference.com\", \"url\": \"https://www.basketball-reference.com/playoffs/2024-nba-western-conference-finals-mavericks-vs-timberwolves.html\", \"content\": \"2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. 5 Dallas Mavericks (4-1) vs. 7 Derrick Jones Jr. 2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. College Tools: Player Season Finder, Player Game Finder, Team Season Finder, Team Game Finder Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards, All-Star Games, Executives ... Players, Teams, Seasons, Leaders, Awards ... Subscribe to Stathead Basketball: Get your first month FREE The SPORTS REFERENCE, STATHEAD, IMMACULATE GRID, and IMMACULATE FOOTY trademarks are owned exclusively by Sports Reference LLC. Sports\\u00a0Reference\\u202f\\u00ae Baseball Football (college) Basketball (college) Hockey F\\u00fatbol Blog Stathead\\u202f\\u00ae Immaculate Grid\\u202f\\u00ae\", \"score\": 0.89030397, \"raw_content\": null}, {\"title\": \"NBA Standings - 2024-25 season - ESPN\", \"url\": \"https://www.espn.com/nba/standings\", \"content\": \"NBA Standings - 2024-25 season - ESPN Skip to main contentSkip to navigation ESPN NFL NBA NCAAF NHL NCAAM NCAAW Soccer More Sports Watch Fantasy NBA Home Scores Schedule Standings Stats Teams Odds Where To Watch All-Star Game Fantasy More NBA Standings 2024-25 Standings Expanded Vs. Division NBA Cup LeagueConferenceDivision Eastern Conference | | | --- | | 1CLECleveland Cavaliers | | 2BOSBoston Celtics | | 3NYNew York Knicks | | 4INDIndiana Pacers | | 5MILMilwaukee Bucks | | 6DETDetroit Pistons | | 7MIAMiami Heat | | 8ORLOrlando Magic | | 9ATLAtlanta Hawks | | 10CHIChicago Bulls | | PHIPhiladelphia 76ers | | BKNBrooklyn Nets | | TORToronto Raptors | | CHACharlotte Hornets | | WSHWashington Wizards | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 42 | 10 | .808 | - | 24-4 | 18-6 | 9-1 | 28-7 | 122.4 | 112.1 | +10.3 | W2 | 6-4 | | 36 | 16 | .692 | 6 | 16-10 | 20-6 | 6-2 | 26-9 | 117.3 | 108.8 | +8.5 | L1 | 7-3 | | 34 | 17 | .667 | 7.5 | 18-9 | 16-8 | 9-1 | 23-10 | 117.9 | 111.4 | +6.5 | W2 | 8-2 | | 29 | 21 | .580 | 12 | 14-7 | 14-13 | 6-4 | 17-15 | 115.7 | 114.9 | +0.8 | W1 | 7-3 | | 27 | 23 | .540 | 14 | 16-8 | 10-15 | 6-5 | 22-16 | 114.2 | 112.6 | +1.6 | L1 | 4-6 | | 26 | 26 | .500 | 16 | 13-13 | 13-13 | 2-9 | 18-20 | 113.0 | 113.8 | -0.8 | W1 | 5-5 | | 25 | 25 | .500 | 16 | 12-10 | 12-15 | 5-3 | 14-15 | 110.5 | 110.6 | -0.1 | L1 | 5-5 | | 25 | 28 | .472 | 17.5 | 15-9 | 10-19 | 5-2 | 20-15 | 103.8 | 105.6 | -1.8 | L1 | 2-8 | | 24 | 28 | .462 | 18 | 12-12 | 12-15 | 4-2 | 17-13 | 116.1 | 119.0 | -2.9 | W1 | 2-8 | | 22 | 30 | .423 | 20 | 10-16 | 12-14 | 3-7 | 17-18 | 116.7 | 120.1 | -3.4 | L1 | 4-6 | | 20 | 31 | .392 | 21.5 | 10-16 | 10-15 | 3-4 | 14-17 | 109.1 | 112.9 | -3.8 | L2 | 5-5 | | 18 | 34 | .346 | 24 | 7-17 | 11-17 | 1-8 | 9-23 | 105.3 | 111.7 | -6.4 | W1 | 4-6 | | 16 | 36 | .308 | 26 | 12-16 | 4-20 | 3-7 | 10-23 | 111.2 | 116.9 | -5.7 | L3 | 6-4 | | 13 | 36 | .265 | 27.5 | 9-20 | 4-16 | 0-9 | 7-27 | 107.1 | 112.3 | -5.2 | W1 | 2-8 | | 9 | 42 | .176 | 32.5 | 5-20 | 4-21 | 5-3 | 7-21 | 107.8 | 121.5 | -13.7 | L1 | 3-7 | Western Conference | | | --- | | 1OKCOklahoma City Thunder | | 2MEMMemphis Grizzlies | | 3DENDenver Nuggets | | 4HOUHouston Rockets | | 5LALLos Angeles Lakers | | 6MINMinnesota Timberwolves | | 7LACLA Clippers | | 8DALDallas Mavericks | | 9PHXPhoenix Suns | | 10SACSacramento Kings | | GSGolden State Warriors | | SASan Antonio Spurs | | PORPortland Trail Blazers | | UTAHUtah Jazz | | NONew Orleans Pelicans | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 41 | 9 | .820 | - | 23-3 | 17-6 | 7-1 | 23-8 | 117.7 | 104.7 | +13.0 | W4 | 7-3 | | 35 | 16 | .686 | 6.5 | 21-5 | 14-11 | 8-4 | 19-12 | 123.8 | 115.4 | +8.4 | W4 | 9-1 | | 33 | 19 | .635 | 9 | 17-8 | 16-11 | 4-4 | 19-12 | 120.8 | 115.9 | +4.9 | W5 | 7-3 | | 32 | 20 | .615 | 10 | 15-8 | 17-11 | 9-3 | 19-12 | 113.3 | 109.1 | +4.2 | L6 | 4-6 | | 30 | 19 | .612 | 10.5 | 17-6 | 13-13 | 9-3 | 19-11 | 112.6 | 112.0 | +0.6 | W4 | 8-2 | | 29 | 23 | .558 | 13 | 14-12 | 15-11 | 4-3 | 21-14 | 111.7 | 108.2 | +3.5 | W2 | 7-3 | | 28 | 23 | .549 | 13.5 | 17-10 | 11-13 | 6-4 | 17-18 | 110.1 | 107.7 | +2.4 | L3 | 4-6 | | 28 | 25 | .528 | 14.5 | 15-10 | 13-15 | 6-4 | 20-17 | 115.5 | 113.3 | +2.2 | W2 | 5-5 | | 26 | 25 | .510 | 15.5 | 16-9 | 10-16 | 7-4 | 17-14 | 113.4 | 114.7 | -1.3 | W1 | 5-5 | | 25 | 26 | .490 | 16.5 | 13-13 | 12-13 | 4-6 | 16-17 | 116.1 | 115.4 | +0.7 | L2 | 4-6 | | 25 | 26 | .490 | 16.5 | 15-13 | 10-13 | 1-10 | 17-18 | 111.5 | 111.9 | -0.4 | L2 | 4-6 | | 22 | 27 | .449 | 18.5 | 13-12 | 8-14 | 2-7 | 16-18 | 112.8 | 114.3 | -1.5 | L1 | 3-7 | | 23 | 29 | .442 | 19 | 15-13 | 8-16 | 4-5 | 14-24 | 109.0 | 113.9 | -4.9 | W6 | 9-1 | | 12 | 38 | .240 | 29 | 5-18 | 7-20 | 1-7 | 4-29 | 111.9 | 118.9 | -7.0 | L1 | 2-8 | | 12 | 39 | .235 | 29.5 | 8-18 | 4-21 | 1-8 | 6-23 | 110.0 | 118.8 | -8.8 | L7 | 3-7 | Standings are updated with the completion of each game.Teams seeded 7-10 in each conference will compete in a play-in tournament at the end of the regular season. Glossary W:Wins L:Losses PCT:Winning Percentage GB:Games Back HOME:Home Record AWAY:Away Record DIV:Division Record CONF:Conference Record PPG:Points Per Game OPP PPG:Opponent Points Per Game DIFF:Average Point Differential STRK:Current Streak L10:Record last 10 games NBA News Anthony Davis leads Mavericks past Rockets 116-105 in Mavs debut but leaves with lower-body injury -------------------------------------------------------------------------------------------------- \\u2014 Anthony Davis had 26 points, 16 rebounds, seven assists and three blocks in his Mavericks debut but left the game late in the third quarter with a... * 38m Hawks request waivers on newly acquired Bones Hyland ---------------------------------------------------- The Atlanta Hawks requested waivers on guard Bones Hyland on Saturday, just two days after the guard was obtained from the Clippers in a deal at the NBA trade deadline. * 1h AD posts 26-point double-double in debut before suffering injury ---------------------------------------------------------------- Anthony Davis has a strong debut with the Mavs, dropping 26 points, 16 rebounds and 7 assists, before leaving with a lower-body injury. * 1h All NBA News Terms of Use Privacy Policy Your US State Privacy Rights Children's Online Privacy Policy Interest-Based Ads About Nielsen Measurement Do Not Sell or Share My Personal Information Contact Us Disney Ad Sales Site Work for ESPN Corrections ESPN BET Sportsbook is owned and operated by PENN Entertainment, Inc. and its subsidiaries ('PENN').\", \"score\": 0.83549726, \"raw_content\": null}, {\"title\": \"2024 Playoffs: West Finals | Timberwolves (3) vs. Mavericks (5) | NBA.com\", \"url\": \"https://www.nba.com/playoffs/2024/west-final\", \"content\": \"Mavericks (5) | NBA.com 2024-25 NBA CrunchTime NBA TV Draft Kings DFS NBA Bet Home NBA Store NBA Game Worn NBA Photo Store NBA Experiences NBA G League NBA 2K League NBA Play NBA Bet ### Doncic, Irving carry Mavs to NBA Finals Luka Doncic and Kyrie Irving pour in 36 points apiece to guide Dallas to its 1st appearance in the NBA Finals since 2011. ### Luka: 'This is special, coming from the West' Luka Doncic with Ernie, Charles, Kenny & Shaq about the Mavs being NBA Finals-bound, his Game 5 play and more. NBA Organization NBA ID NBA Official NBA Careers NBA Initiatives NBA Cares NBA Foundation NBA Communications NBA Transactions NBA Auctions NBA Photostore\", \"score\": 0.75312227, \"raw_content\": null}, {\"title\": \"2024 NBA Playoffs | Official Bracket, Schedule and Series Matchups\", \"url\": \"https://www.nba.com/playoffs/2024?os=wtmbloozowcj&ref=app\", \"content\": \"Draft Kings DFS NBA Store NBA Play NBA Finals ### Chasing History: Celtics clinch banner 18 (Ep. 25) Jayson Tatum and Finals MVP Jaylen Brown close out Dallas in Game 5 to secure Boston's NBA-record 18th championship. WE DID ITTTT!' Jayson Tatum walkoff interview after Celtics defeat Mavericks in Game 5 of 2024 NBA Finals, clinching title with a 4-1 series win. ### Horford finally champ after key sacrifice Al Horford, who played the most playoff games in NBA history before winning his 1st title, crosses the plateau in his 17th season. 30:13 ### Best of the 2024 NBA Finals 17:47 ### Best of Boston Celtics from the 2024 NBA Finals\", \"score\": 0.63234437, \"raw_content\": null}, {\"title\": \"2025 NBA Playoffs: Standings, bracket and clinching updates\", \"url\": \"https://www.nba.com/news/2025-nba-playoffs-standings-and-bracket-updates\", \"content\": \"NBA TV NBA Play NBA Store NBA Game Worn NBA Play NBA Official NBA Playoffs bracket ### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. Click \\\"Access Content\\\" to agree to our Terms of Use and Privacy Policy and to sign up for emails about the latest news and products from the NBA Family and its partners. #### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. NBA ID NBA Official NBA Transactions NBA Auctions\", \"score\": 0.13435538, \"raw_content\": null}]}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33mThe\u001b[0m\u001b[33m teams\u001b[0m\u001b[33m that\u001b[0m\u001b[33m played\u001b[0m\u001b[33m in\u001b[0m\u001b[33m the\u001b[0m\u001b[33m NBA\u001b[0m\u001b[33m Western\u001b[0m\u001b[33m Conference\u001b[0m\u001b[33m Finals\u001b[0m\u001b[33m of\u001b[0m\u001b[33m \u001b[0m\u001b[33m202\u001b[0m\u001b[33m4\u001b[0m\u001b[33m were\u001b[0m\u001b[33m the\u001b[0m\u001b[33m Dallas\u001b[0m\u001b[33m Mavericks\u001b[0m\u001b[33m and\u001b[0m\u001b[33m the\u001b[0m\u001b[33m Minnesota\u001b[0m\u001b[33m Timber\u001b[0m\u001b[33mw\u001b[0m\u001b[33molves\u001b[0m\u001b[33m.\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m\u001b[33minference> \u001b[0m\u001b[36m\u001b[0m\u001b[36mbr\u001b[0m\u001b[36mave\u001b[0m\u001b[36m_search\u001b[0m\u001b[36m.call\u001b[0m\u001b[36m(query\u001b[0m\u001b[36m=\"\u001b[0m\u001b[36mSouth\u001b[0m\u001b[36m Park\u001b[0m\u001b[36m Bill\u001b[0m\u001b[36m Cosby\u001b[0m\u001b[36m episode\u001b[0m\u001b[36m season\u001b[0m\u001b[36m\")\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Args:{'query': 'South Park Bill Cosby episode season'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Response:{\"query\": \"South Park Bill Cosby episode season\", \"top_k\": [{\"title\": \"Bill Cosby | South Park Archives | Fandom\", \"url\": \"https://southpark.fandom.com/wiki/Bill_Cosby\", \"content\": \"SIGN IN CHARACTERS SIGN IN Explore EXPLORE CHARACTERS SIGN IN TO EDIT Character Information For other uses, see Bill (Disambiguation). Bill Cosby is elderly, having gray hair as well as various facial wrinkles. More Information: Criminal Celebrities More Information: Movie Celebrities Minor Characters from Season Four More information: List of Minor Characters from Season Four | Season Four Community content is available under CC-BY-SA unless otherwise noted. EXPLORE PROPERTIES FOLLOW US Terms of Use Global Sitemap Local Sitemap Follow on IG\", \"score\": 0.48294178, \"raw_content\": null}, {\"title\": \"Stunning and Brave - Wikipedia\", \"url\": \"https://en.wikipedia.org/wiki/Stunning_and_Brave\", \"content\": \"South Park episode \\\"Stunning and Brave\\\" is the first episode in the nineteenth season of the American animated television series South Park. Cartman and others mount an 'anti-PC' assault on the fraternity house but Kyle interrupts it, publicly calling Jenner a hero and brave. IGN's Max Nicholson gave the episode a 7.8 out of 10 and stated \\\"South Park's latest episode took on political correctness with scathing wit and truly outrageous moments.\\\"[1] \\\"South Park: \\\"Stunning and Brave\\\" Review\\\". \\\"South Park: Stunning and Brave Review\\\". \\\"South Park: Stunning and Brave\\\". \\\"South Park premiere is 'Stunning and Brave'\\\". \\\"Stunning and Brave\\\" Full episode at South Park Studios South Park episodes\", \"score\": 0.21465065, \"raw_content\": null}, {\"title\": \"Here Comes the Neighborhood - Wikipedia\", \"url\": \"https://en.wikipedia.org/wiki/Here_Comes_the_Neighborhood\", \"content\": \"\\\"Here Comes the Neighborhood\\\" is the 12th episode of the fifth season of the animated television series South Park, and the 77th episode of the series overall. Despondent at his social estrangement, Token decides to arrange for dozens of rich people (who all happen to be black) such as Will Smith and Snoop Dogg to move into South Park, which leads to Mr. Garrison complaining about the \\\"richers\\\" in the town, which in turn leads to ire among the other, less affluent members of the community (who all happen to be white). \\\"Here Comes the Neighborhood,\\\" along with the thirteen other episodes from South Park: the Complete Fifth Season, was released on a three-disc DVD set in the United States on February 22, 2005. South Park: The Complete Fifth Season: \\\"Here Comes the Neighborhood\\\" (DVD Disc audio commentary). \\\"Here Comes the Neighborhood\\\" Full episode at South Park Studios\", \"score\": 0.19947985, \"raw_content\": null}, {\"title\": \"Trapper Keeper | South Park Archives | Fandom\", \"url\": \"https://southpark.fandom.com/wiki/Trapper_Keeper\", \"content\": \"Trapper Keeper | South Park Archives | Fandom Episodes Episodes in: Episodes, Featured Article Winners, Season 4, Episodes Focusing On Cartman | Episode no. Episode 12 | | List of all South Park episodes | \\\"Trapper Keeper\\\" is the twelfth episode of Season Four and the 60th overall episode of South Park. Kyle takes a Dawson's Creek Trapper Keeper with him to school and is soon met by Cartman. Cartman brags about his Dawson's Creek Trapper Keeper Ultra Keeper Futura S 2000, of which shows off many features that far exceed Kyle's. \\u2191 Jump up to: 1.0 1.1 Trapper Keeper (Season 4, Episode 13). Episodes Episodes Focusing On Cartman Espa\\u00f1ol Fran\\u00e7ais Italiano Portugu\\u00eas do Brasil \\u4e2d\\u6587\", \"score\": 0.1287991, \"raw_content\": null}, {\"title\": \"\\\"South Park\\\" Trapper Keeper (TV Episode 2000) - IMDb\", \"url\": \"https://www.imdb.com/title/tt0705978/\", \"content\": \"Trapper Keeper is a very interesting south park episode.It spoofs plenty of classic sci-fi films such as The Terminator, 2001 A Space Odyssea and the 1988 anime classic Akira(great movie if you haven't seen it).The episode has Cartman with a Dawson's Creek Trapper Keeper.A trapper keeper seems to be a school supply(I have never heard of it before).Cartman's trapper keeper is very advanced in terms of technology, however a robot from the future has come to destroy it because the trapper keeper takes over the world and destroys humanity in the future.The Trapper Keeper turns into a big blob(like in Akira) and sucks Cartman in.It then roams South Park and the boys try to stop it.Meanwhile, Kyle's brother Ike starts kindergarten where Garrison is the teacher.They decide to have a vote for class president between Ike and a kid named Filmore.It turns into a heated debate!\", \"score\": 0.12658015, \"raw_content\": null}]}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33mBill\u001b[0m\u001b[33m Cosby\u001b[0m\u001b[33m first\u001b[0m\u001b[33m appears\u001b[0m\u001b[33m in\u001b[0m\u001b[33m the\u001b[0m\u001b[33m episode\u001b[0m\u001b[33m \"\u001b[0m\u001b[33mTr\u001b[0m\u001b[33mapped\u001b[0m\u001b[33m in\u001b[0m\u001b[33m the\u001b[0m\u001b[33m Closet\u001b[0m\u001b[33m\"\u001b[0m\u001b[33m (\u001b[0m\u001b[33mSeason\u001b[0m\u001b[33m \u001b[0m\u001b[33m9\u001b[0m\u001b[33m,\u001b[0m\u001b[33m Episode\u001b[0m\u001b[33m \u001b[0m\u001b[33m12\u001b[0m\u001b[33m)\u001b[0m\u001b[33m of\u001b[0m\u001b[33m South\u001b[0m\u001b[33m Park\u001b[0m\u001b[33m.\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m\u001b[33minference> \u001b[0m\u001b[36m\u001b[0m\u001b[36mbr\u001b[0m\u001b[36mave\u001b[0m\u001b[36m_search\u001b[0m\u001b[36m.call\u001b[0m\u001b[36m(query\u001b[0m\u001b[36m=\"\u001b[0m\u001b[36mAndrew\u001b[0m\u001b[36m Tate\u001b[0m\u001b[36m kick\u001b[0m\u001b[36mboxing\u001b[0m\u001b[36m name\u001b[0m\u001b[36m\")\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Args:{'query': 'Andrew Tate kickboxing name'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:brave_search Response:{\"query\": \"Andrew Tate kickboxing name\", \"top_k\": [{\"title\": \"Andrew Tate Height, Weight, Biography, Age, Wife ... - News Unzip\", \"url\": \"https://www.newsunzip.com/wiki/andrew-tate/\", \"content\": \"Andrew Tate aka King Cobra (Real Name: 'Emory Andrew Tate III', born 1 December 1986, Age: 36 Years) is a professional kickboxer, MMA fighter, internet. Monday , 10 March 2025 ... Andrew's Kickboxing and MMA Record Andrew Tate Net worth, Lifestyle & Cars Collection. Andrew is a millionaire businessman. He makes a lot of money from his\", \"score\": 0.85995835, \"raw_content\": null}, {\"title\": \"The Life Of Andrew Tate (By Andrew Tate Himself ... - Sidekick Boxing\", \"url\": \"https://sidekickboxing.co.uk/the-life-of-andrew-king-cobra-tate/\", \"content\": \"Andrew Tate is a British-American former professional kickboxing world champion who fought in the cruiserweight and super cruiserweight divisions. Andrew Tate\\u2019s Kickboxing Career Andrew Tate in the Big Brother house Andrew Tate\\u2019s Kickboxing World Titles and his Sidekick boxing gloves Andrew Tate After Kickboxing Andrew Tate and his brother Tristan moved to Romania to set up their empire of businesses including trading in Bitcoin, Hustlers University, CobraTate.com, The Real World, and The War Room. From being a 4x kickboxing world champion to becoming the world\\u2019s most Googled man in the world with a private jet and over 33 cars, Andrew Tate\\u2019s life has been full of adventure.\", \"score\": 0.78194773, \"raw_content\": null}, {\"title\": \"Andrew Tate (\\\"King Cobra\\\") | MMA Fighter Page - Tapology\", \"url\": \"https://www.tapology.com/fightcenter/fighters/72139-andrew-tate\", \"content\": \"Andrew Tate (\\\"King Cobra\\\") | MMA Fighter Page | Tapology Andrew \\\"King Cobra\\\" Tate Andrew Tate Name: Andrew Tate Height: 6'1\\\" (185cm) | Reach: Andrew Tate is ineligible for Tapology's regional MMA rankings due to inactivity. Fighters must have at least one completed MMA bout in the past two years to be ranked. Andrew Tate MMA Fight Record Former top-ranked UFC fighter has called out Andrew Tate for having a paper title when it comes to combat... Andrew Tate \\u2022 All the biggest upcoming MMA & Boxing fights | UFC Fight Night | 02.01.2025, 12:00 PM ET | MMA Junkie: UFC Fight Night 249 video: Nine stoppages to open the year?! MMA Mania: Prochazka Vs. Hill: Odds, Full Fight Preview & Prediction\", \"score\": 0.6999322, \"raw_content\": null}, {\"title\": \"Andrew Tate: Kickboxing Record, Facts, Height, Weight, Age, Biography\", \"url\": \"https://www.lowkickmma.com/andrew-tate-kickboxing-record-facts-height-weight-age-biography/\", \"content\": \"Who is Andrew Tate? Andrew Tate is a businessman, internet personality, and former professional kickboxer. Where is Andrew Tate From? Who is Andrew Tate\\u2019s Father? Andrew Tate Kickboxing Record What Kickboxing Gym Did Andrew Tate Train Out Of? How Many Professional Kickboxing Matches Has Andrew Tate Participated In? Andrew Tate competed in a total of 86 professional kickboxing bouts. What is Andrew Tate\\u2019 Professional Kickboxing Record? What Weight Classes Did Andrew Tate Compete In? In his professional kickboxing career, Andrew Tate won 32 of his fights by knockout. Did Andrew Tate Compete For Any Championship Titles? Did Tate Ever Compete In MMA? Andrew Tate competed in 1 professional MMA bout. How Much Money Did Andrew Tate Make In Kickboxing?\", \"score\": 0.50930125, \"raw_content\": null}, {\"title\": \"Andrew Tate - Wikipedia\", \"url\": \"https://en.wikipedia.org/wiki/Andrew_Tate\", \"content\": \"In 2011, Tate won his first International Sport Kickboxing Association (ISKA) world title in a rematch against Jean-Luc Beno\\u00eet via knockout, having previously lost to Beno\\u00eet by decision.[41] In 2012, Tate lost to Sahak Parparyan by unanimous decision while challenging for his It's Showtime 85MAX Championship.[42] Later that year, Tate lost the Enfusion championship tournament to Franci Graj\\u0161.[1] Before his loss, he was ranked second-best light-heavyweight kickboxer in the world.[43] In 2013, Tate won his second ISKA world title in a 12-round match against Vincent Petitjean, making him world champion in two weight divisions.[44] He defended the ISKA Belt and Won the Enfusion Belt in 2014, making him a four-time world champion[45] before he retired with 31 recorded fights.[46]\", \"score\": 0.49904844, \"raw_content\": null}]}\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33mAndrew\u001b[0m\u001b[33m Tate\u001b[0m\u001b[33m's\u001b[0m\u001b[33m kick\u001b[0m\u001b[33mboxing\u001b[0m\u001b[33m name\u001b[0m\u001b[33m is\u001b[0m\u001b[33m \"\u001b[0m\u001b[33mKing\u001b[0m\u001b[33m Cobra\u001b[0m\u001b[33m\".\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m" + ] + } + ], + "source": [ + "from llama_stack_client import Agent, AgentEventLogger\n", + "\n", + "agent = Agent(\n", + " client,\n", + " model=\"meta-llama/Llama-3.3-70B-Instruct\",\n", + " instructions=\"You are a helpful assistant. Use web_search tool to answer the questions.\",\n", + " tools=[\"builtin::websearch\"],\n", + ")\n", + "user_prompts = [\n", + " \"Which teams played in the NBA western conference finals of 2024. Search the web for the answer.\",\n", + " \"In which episode and season of South Park does Bill Cosby (BSM-471) first appear? Give me the number and title. Search the web for the answer.\",\n", + " \"What is the British-American kickboxer Andrew Tate's kickboxing name? Search the web for the answer.\",\n", + "]\n", + "\n", + "session_id = agent.create_session(uuid.uuid4().hex)\n", + "\n", + "for prompt in user_prompts:\n", + " response = agent.create_turn(\n", + " messages=[\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": prompt,\n", + " }\n", + " ],\n", + " session_id=session_id,\n", + " )\n", + "\n", + " for log in AgentEventLogger().log(response):\n", + " log.print()\n" + ] + }, + { + "cell_type": "markdown", + "id": "d0a50faf", + "metadata": {}, + "source": [ + "##### 3.1.2 Query Agent Execution Steps\n", + "\n", + "Now, let's look deeper into the agent's execution steps and see if how well our agent performs. As a sanity check, we will first check if all user prompts is followed by a tool call to `brave_search`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c28ea2d1", + "metadata": {}, + "outputs": [], + "source": [ + "# query the agents session\n", + "from rich.pretty import pprint\n", + "\n", + "session_response = client.agents.session.retrieve(\n", + " session_id=session_id,\n", + " agent_id=agent.agent_id,\n", + ")\n", + "\n", + "pprint(session_response.turns)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "f87a376d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3/3 user prompts are followed by a tool call to `brave_search`\n" + ] + } + ], + "source": [ + "num_tool_call = 0\n", + "for turn in session_response.turns:\n", + " for step in turn.steps:\n", + " if step.step_type == \"tool_execution\" and step.tool_calls[0].tool_name == \"brave_search\":\n", + " num_tool_call += 1\n", + "\n", + "print(f\"{num_tool_call}/{len(session_response.turns)} user prompts are followed by a tool call to `brave_search`\")" + ] + }, + { + "cell_type": "markdown", + "id": "ed69220f", + "metadata": {}, + "source": [ + "##### 3.1.3 Evaluate Agent Responses\n", + "\n", + "Now, we want to evaluate the agent's responses to the user prompts. \n", + "\n", + "1. First, we will process the agent's execution history into a list of rows that can be used for evaluation.\n", + "2. Next, we will label the rows with the expected answer.\n", + "3. Finally, we will use the `/scoring` API to score the agent's responses." + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "a2b293bc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
[\n",
+       "β”‚   {\n",
+       "β”‚   β”‚   'input_query': 'Which teams played in the NBA western conference finals of 2024. Search the web for the answer.',\n",
+       "β”‚   β”‚   'generated_answer': 'The teams that played in the NBA Western Conference Finals of 2024 were the Dallas Mavericks and the Minnesota Timberwolves.',\n",
+       "β”‚   β”‚   'expected_answer': 'Dallas Mavericks and the Minnesota Timberwolves'\n",
+       "β”‚   },\n",
+       "β”‚   {\n",
+       "β”‚   β”‚   'input_query': 'In which episode and season of South Park does Bill Cosby (BSM-471) first appear? Give me the number and title. Search the web for the answer.',\n",
+       "β”‚   β”‚   'generated_answer': 'Bill Cosby first appears in the episode \"Trapped in the Closet\" (Season 9, Episode 12) of South Park.',\n",
+       "β”‚   β”‚   'expected_answer': 'Season 4, Episode 12'\n",
+       "β”‚   },\n",
+       "β”‚   {\n",
+       "β”‚   β”‚   'input_query': \"What is the British-American kickboxer Andrew Tate's kickboxing name? Search the web for the answer.\",\n",
+       "β”‚   β”‚   'generated_answer': 'Andrew Tate\\'s kickboxing name is \"King Cobra\".',\n",
+       "β”‚   β”‚   'expected_answer': 'King Cobra'\n",
+       "β”‚   }\n",
+       "]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'input_query'\u001b[0m: \u001b[32m'Which teams played in the NBA western conference finals of 2024. Search the web for the answer.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'generated_answer'\u001b[0m: \u001b[32m'The teams that played in the NBA Western Conference Finals of 2024 were the Dallas Mavericks and the Minnesota Timberwolves.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'expected_answer'\u001b[0m: \u001b[32m'Dallas Mavericks and the Minnesota Timberwolves'\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'input_query'\u001b[0m: \u001b[32m'In which episode and season of South Park does Bill Cosby \u001b[0m\u001b[32m(\u001b[0m\u001b[32mBSM-471\u001b[0m\u001b[32m)\u001b[0m\u001b[32m first appear? Give me the number and title. Search the web for the answer.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'generated_answer'\u001b[0m: \u001b[32m'Bill Cosby first appears in the episode \"Trapped in the Closet\" \u001b[0m\u001b[32m(\u001b[0m\u001b[32mSeason 9, Episode 12\u001b[0m\u001b[32m)\u001b[0m\u001b[32m of South Park.'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'expected_answer'\u001b[0m: \u001b[32m'Season 4, Episode 12'\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'input_query'\u001b[0m: \u001b[32m\"What is the British-American kickboxer Andrew Tate's kickboxing name? Search the web for the answer.\"\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'generated_answer'\u001b[0m: \u001b[32m'Andrew Tate\\'s kickboxing name is \"King Cobra\".'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'expected_answer'\u001b[0m: \u001b[32m'King Cobra'\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[1m]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
ScoringScoreResponse(\n",
+       "β”‚   results={\n",
+       "β”‚   β”‚   'basic::subset_of': ScoringResult(\n",
+       "β”‚   β”‚   β”‚   aggregated_results={'accuracy': {'accuracy': 0.6666666666666666, 'num_correct': 2.0, 'num_total': 3}},\n",
+       "β”‚   β”‚   β”‚   score_rows=[{'score': 1.0}, {'score': 0.0}, {'score': 1.0}]\n",
+       "β”‚   β”‚   )\n",
+       "β”‚   }\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mScoringScoreResponse\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'basic::subset_of'\u001b[0m: \u001b[1;35mScoringResult\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33maggregated_results\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1;36m0.6666666666666666\u001b[0m, \u001b[32m'num_correct'\u001b[0m: \u001b[1;36m2.0\u001b[0m, \u001b[32m'num_total'\u001b[0m: \u001b[1;36m3\u001b[0m\u001b[1m}\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mscore_rows\u001b[0m=\u001b[1m[\u001b[0m\u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m, \u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m0.0\u001b[0m\u001b[1m}\u001b[0m, \u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "eval_rows = []\n", + "\n", + "expected_answers = [\n", + " \"Dallas Mavericks and the Minnesota Timberwolves\",\n", + " \"Season 4, Episode 12\",\n", + " \"King Cobra\",\n", + "]\n", + "\n", + "for i, turn in enumerate(session_response.turns):\n", + " eval_rows.append(\n", + " {\n", + " \"input_query\": turn.input_messages[0].content,\n", + " \"generated_answer\": turn.output_message.content,\n", + " \"expected_answer\": expected_answers[i],\n", + " }\n", + " )\n", + "\n", + "pprint(eval_rows)\n", + "\n", + "scoring_params = {\n", + " \"basic::subset_of\": None,\n", + "}\n", + "scoring_response = client.scoring.score(\n", + " input_rows=eval_rows, scoring_functions=scoring_params\n", + ")\n", + "pprint(scoring_response)" + ] + }, + { + "cell_type": "markdown", + "id": "ekOS2kM4P0LM", + "metadata": { + "id": "ekOS2kM4P0LM" + }, + "source": [ + "##### 3.1.4 Query Telemetry & Evaluate\n", + "\n", + "Another way to get the agent's execution history is to query the telemetry logs from the `/telemetry` API. The following example shows how to query the telemetry logs and post-process them to prepare data for evaluation." + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "agkWgToGAsuA", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "agkWgToGAsuA", + "outputId": "4233a1d9-8282-4aa9-bdc4-0c105939f97e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Getting traces for session_id=d73d9aaa-65ac-4255-8153-9f5cbff6e01e\n", + "Here are examples of traces:\n" + ] + }, + { + "data": { + "text/html": [ + "
[\n",
+       "β”‚   {\n",
+       "β”‚   β”‚   'input': '[{\"role\": \"system\", \"content\": \"You are a helpful assistant. Use web_search tool to answer the questions.\"}, {\"role\": \"user\", \"content\": \"Which teams played in the NBA western conference finals of 2024. Search the web for the answer.\", \"context\": null}]',\n",
+       "β”‚   β”‚   'output': '{\"content\": \"\", \"tool_calls\": [{\"call_id\": \"5f77ab69-72d9-4d51-b96c-bd4352ced54a\", \"tool_name\": \"brave_search\", \"arguments\": {\"query\": \"NBA Western Conference Finals 2024 teams\"}, \"arguments_json\": \"{\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\"}\"}]}'\n",
+       "β”‚   },\n",
+       "β”‚   {\n",
+       "β”‚   β”‚   'input': '{\"role\":\"assistant\",\"content\":\"\",\"stop_reason\":\"end_of_turn\",\"tool_calls\":[{\"call_id\":\"5f77ab69-72d9-4d51-b96c-bd4352ced54a\",\"tool_name\":\"brave_search\",\"arguments\":{\"query\":\"NBA Western Conference Finals 2024 teams\"},\"arguments_json\":\"{\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\"}\"}]}',\n",
+       "β”‚   β”‚   'output': '{\"role\":\"tool\",\"call_id\":\"5f77ab69-72d9-4d51-b96c-bd4352ced54a\",\"content\":\"{\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\", \\\\\"top_k\\\\\": [{\\\\\"title\\\\\": \\\\\"2024 NBA Western Conference Finals - Basketball-Reference.com\\\\\", \\\\\"url\\\\\": \\\\\"https://www.basketball-reference.com/playoffs/2024-nba-western-conference-finals-mavericks-vs-timberwolves.html\\\\\", \\\\\"content\\\\\": \\\\\"2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. 5 Dallas Mavericks (4-1) vs. 7   Derrick Jones Jr. 2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. College Tools: Player Season Finder, Player Game Finder, Team Season Finder, Team Game Finder Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards, All-Star Games, Executives ... Players, Teams, Seasons, Leaders, Awards ... Subscribe to Stathead Basketball: Get your first month FREE The SPORTS REFERENCE, STATHEAD, IMMACULATE GRID, and IMMACULATE FOOTY trademarks are owned exclusively by Sports Reference LLC. Sports\\\\\\\\u00a0Reference\\\\\\\\u202f\\\\\\\\u00ae Baseball Football (college) Basketball (college) Hockey F\\\\\\\\u00fatbol Blog Stathead\\\\\\\\u202f\\\\\\\\u00ae Immaculate Grid\\\\\\\\u202f\\\\\\\\u00ae\\\\\", \\\\\"score\\\\\": 0.89030397, \\\\\"raw_content\\\\\": null}, {\\\\\"title\\\\\": \\\\\"NBA Standings - 2024-25 season - ESPN\\\\\", \\\\\"url\\\\\": \\\\\"https://www.espn.com/nba/standings\\\\\", \\\\\"content\\\\\": \\\\\"NBA Standings - 2024-25 season - ESPN Skip to main contentSkip to navigation ESPN NFL NBA NCAAF NHL NCAAM NCAAW Soccer More Sports Watch Fantasy NBA Home Scores Schedule Standings Stats Teams Odds Where To Watch All-Star Game Fantasy More NBA Standings 2024-25 Standings Expanded Vs. Division NBA Cup LeagueConferenceDivision Eastern Conference | | | --- | | 1CLECleveland Cavaliers | | 2BOSBoston Celtics | | 3NYNew York Knicks | | 4INDIndiana Pacers | | 5MILMilwaukee Bucks | | 6DETDetroit Pistons | | 7MIAMiami Heat | | 8ORLOrlando Magic | | 9ATLAtlanta Hawks | | 10CHIChicago Bulls | | PHIPhiladelphia 76ers | | BKNBrooklyn Nets | | TORToronto Raptors | | CHACharlotte Hornets | | WSHWashington Wizards | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 42 | 10 | .808 | - | 24-4 | 18-6 | 9-1 | 28-7 | 122.4 | 112.1 | +10.3 | W2 | 6-4 | | 36 | 16 | .692 | 6 | 16-10 | 20-6 | 6-2 | 26-9 | 117.3 | 108.8 | +8.5 | L1 | 7-3 | | 34 | 17 | .667 | 7.5 | 18-9 | 16-8 | 9-1 | 23-10 | 117.9 | 111.4 | +6.5 | W2 | 8-2 | | 29 | 21 | .580 | 12 | 14-7 | 14-13 | 6-4 | 17-15 | 115.7 | 114.9 | +0.8 | W1 | 7-3 | | 27 | 23 | .540 | 14 | 16-8 | 10-15 | 6-5 | 22-16 | 114.2 | 112.6 | +1.6 | L1 | 4-6 | | 26 | 26 | .500 | 16 | 13-13 | 13-13 | 2-9 | 18-20 | 113.0 | 113.8 | -0.8 | W1 | 5-5 | | 25 | 25 | .500 | 16 | 12-10 | 12-15 | 5-3 | 14-15 | 110.5 | 110.6 | -0.1 | L1 | 5-5 | | 25 | 28 | .472 | 17.5 | 15-9 | 10-19 | 5-2 | 20-15 | 103.8 | 105.6 | -1.8 | L1 | 2-8 | | 24 | 28 | .462 | 18 | 12-12 | 12-15 | 4-2 | 17-13 | 116.1 | 119.0 | -2.9 | W1 | 2-8 | | 22 | 30 | .423 | 20 | 10-16 | 12-14 | 3-7 | 17-18 | 116.7 | 120.1 | -3.4 | L1 | 4-6 | | 20 | 31 | .392 | 21.5 | 10-16 | 10-15 | 3-4 | 14-17 | 109.1 | 112.9 | -3.8 | L2 | 5-5 | | 18 | 34 | .346 | 24 | 7-17 | 11-17 | 1-8 | 9-23 | 105.3 | 111.7 | -6.4 | W1 | 4-6 | | 16 | 36 | .308 | 26 | 12-16 | 4-20 | 3-7 | 10-23 | 111.2 | 116.9 | -5.7 | L3 | 6-4 | | 13 | 36 | .265 | 27.5 | 9-20 | 4-16 | 0-9 | 7-27 | 107.1 | 112.3 | -5.2 | W1 | 2-8 | | 9 | 42 | .176 | 32.5 | 5-20 | 4-21 | 5-3 | 7-21 | 107.8 | 121.5 | -13.7 | L1 | 3-7 | Western Conference | | | --- | | 1OKCOklahoma City Thunder | | 2MEMMemphis Grizzlies | | 3DENDenver Nuggets | | 4HOUHouston Rockets | | 5LALLos Angeles Lakers | | 6MINMinnesota Timberwolves | | 7LACLA Clippers | | 8DALDallas Mavericks | | 9PHXPhoenix Suns | | 10SACSacramento Kings | | GSGolden State Warriors | | SASan Antonio Spurs | | PORPortland Trail Blazers | | UTAHUtah Jazz | | NONew Orleans Pelicans | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 41 | 9 | .820 | - | 23-3 | 17-6 | 7-1 | 23-8 | 117.7 | 104.7 | +13.0 | W4 | 7-3 | | 35 | 16 | .686 | 6.5 | 21-5 | 14-11 | 8-4 | 19-12 | 123.8 | 115.4 | +8.4 | W4 | 9-1 | | 33 | 19 | .635 | 9 | 17-8 | 16-11 | 4-4 | 19-12 | 120.8 | 115.9 | +4.9 | W5 | 7-3 | | 32 | 20 | .615 | 10 | 15-8 | 17-11 | 9-3 | 19-12 | 113.3 | 109.1 | +4.2 | L6 | 4-6 | | 30 | 19 | .612 | 10.5 | 17-6 | 13-13 | 9-3 | 19-11 | 112.6 | 112.0 | +0.6 | W4 | 8-2 | | 29 | 23 | .558 | 13 | 14-12 | 15-11 | 4-3 | 21-14 | 111.7 | 108.2 | +3.5 | W2 | 7-3 | | 28 | 23 | .549 | 13.5 | 17-10 | 11-13 | 6-4 | 17-18 | 110.1 | 107.7 | +2.4 | L3 | 4-6 | | 28 | 25 | .528 | 14.5 | 15-10 | 13-15 | 6-4 | 20-17 | 115.5 | 113.3 | +2.2 | W2 | 5-5 | | 26 | 25 | .510 | 15.5 | 16-9 | 10-16 | 7-4 | 17-14 | 113.4 | 114.7 | -1.3 | W1 | 5-5 | | 25 | 26 | .490 | 16.5 | 13-13 | 12-13 | 4-6 | 16-17 | 116.1 | 115.4 | +0.7 | L2 | 4-6 | | 25 | 26 | .490 | 16.5 | 15-13 | 10-13 | 1-10 | 17-18 | 111.5 | 111.9 | -0.4 | L2 | 4-6 | | 22 | 27 | .449 | 18.5 | 13-12 | 8-14 | 2-7 | 16-18 | 112.8 | 114.3 | -1.5 | L1 | 3-7 | | 23 | 29 | .442 | 19 | 15-13 | 8-16 | 4-5 | 14-24 | 109.0 | 113.9 | -4.9 | W6 | 9-1 | | 12 | 38 | .240 | 29 | 5-18 | 7-20 | 1-7 | 4-29 | 111.9 | 118.9 | -7.0 | L1 | 2-8 | | 12 | 39 | .235 | 29.5 | 8-18 | 4-21 | 1-8 | 6-23 | 110.0 | 118.8 | -8.8 | L7 | 3-7 | Standings are updated with the completion of each game.Teams seeded 7-10 in each conference will compete in a play-in tournament at the end of the regular season. Glossary W:Wins L:Losses PCT:Winning Percentage GB:Games Back HOME:Home Record AWAY:Away Record DIV:Division Record CONF:Conference Record PPG:Points Per Game OPP PPG:Opponent Points Per Game DIFF:Average Point Differential STRK:Current Streak L10:Record last 10 games NBA News Anthony Davis leads Mavericks past Rockets 116-105 in Mavs debut but leaves with lower-body injury -------------------------------------------------------------------------------------------------- \\\\\\\\u2014 Anthony Davis had 26 points, 16 rebounds, seven assists and three blocks in his Mavericks debut but left the game late in the third quarter with a... * 38m Hawks request waivers on newly acquired Bones Hyland ---------------------------------------------------- The Atlanta Hawks requested waivers on guard Bones Hyland on Saturday, just two days after the guard was obtained from the Clippers in a deal at the NBA trade deadline. * 1h AD posts 26-point double-double in debut before suffering injury ---------------------------------------------------------------- Anthony Davis has a strong debut with the Mavs, dropping 26 points, 16 rebounds and 7 assists, before leaving with a lower-body injury. * 1h All NBA News Terms of Use Privacy Policy Your US State Privacy Rights Children\\'s Online Privacy Policy Interest-Based Ads About Nielsen Measurement Do Not Sell or Share My Personal Information Contact Us Disney Ad Sales Site Work for ESPN Corrections ESPN BET Sportsbook is owned and operated by PENN Entertainment, Inc. and its subsidiaries (\\'PENN\\').\\\\\", \\\\\"score\\\\\": 0.83549726, \\\\\"raw_content\\\\\": null}, {\\\\\"title\\\\\": \\\\\"2024 Playoffs: West Finals | Timberwolves (3) vs. Mavericks (5) | NBA.com\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/playoffs/2024/west-final\\\\\", \\\\\"content\\\\\": \\\\\"Mavericks (5) | NBA.com 2024-25 NBA CrunchTime NBA TV Draft Kings DFS NBA Bet Home NBA Store NBA Game Worn NBA Photo Store NBA Experiences NBA G League NBA 2K League NBA Play NBA Bet ### Doncic, Irving carry Mavs to NBA Finals Luka Doncic and Kyrie Irving pour in 36 points apiece to guide Dallas to its 1st appearance in the NBA Finals since 2011. ### Luka: \\'This is special, coming from the West\\' Luka Doncic with Ernie, Charles, Kenny & Shaq about the Mavs being NBA Finals-bound, his Game 5 play and more. NBA Organization NBA ID NBA Official NBA Careers NBA Initiatives NBA Cares NBA Foundation NBA Communications NBA Transactions NBA Auctions NBA Photostore\\\\\", \\\\\"score\\\\\": 0.75312227, \\\\\"raw_content\\\\\": null}, {\\\\\"title\\\\\": \\\\\"2024 NBA Playoffs | Official Bracket, Schedule and Series Matchups\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/playoffs/2024?os=wtmbloozowcj&ref=app\\\\\", \\\\\"content\\\\\": \\\\\"Draft Kings DFS NBA Store NBA Play NBA Finals ### Chasing History: Celtics clinch banner 18 (Ep. 25) Jayson Tatum and Finals MVP Jaylen Brown close out Dallas in Game 5 to secure Boston\\'s NBA-record 18th championship. WE DID ITTTT!\\' Jayson Tatum walkoff interview after Celtics defeat Mavericks in Game 5 of 2024 NBA Finals, clinching title with a 4-1 series win. ### Horford finally champ after key sacrifice Al Horford, who played the most playoff games in NBA history before winning his 1st title, crosses the plateau in his 17th season. 30:13 ### Best of the 2024 NBA Finals 17:47 ### Best of Boston Celtics from the 2024 NBA Finals\\\\\", \\\\\"score\\\\\": 0.63234437, \\\\\"raw_content\\\\\": null}, {\\\\\"title\\\\\": \\\\\"2025 NBA Playoffs: Standings, bracket and clinching updates\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/news/2025-nba-playoffs-standings-and-bracket-updates\\\\\", \\\\\"content\\\\\": \\\\\"NBA TV NBA Play NBA Store NBA Game Worn NBA Play NBA Official NBA Playoffs bracket ### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. Click \\\\\\\\\\\\\"Access Content\\\\\\\\\\\\\" to agree to our Terms of Use and Privacy Policy and to sign up for emails about the latest news and products from the NBA Family and its partners. #### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. NBA ID NBA Official NBA Transactions NBA Auctions\\\\\", \\\\\"score\\\\\": 0.13435538, \\\\\"raw_content\\\\\": null}]}\"}'\n",
+       "β”‚   }\n",
+       "]\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'input'\u001b[0m: \u001b[32m'\u001b[0m\u001b[32m[\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"role\": \"system\", \"content\": \"You are a helpful assistant. Use web_search tool to answer the questions.\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"role\": \"user\", \"content\": \"Which teams played in the NBA western conference finals of 2024. Search the web for the answer.\", \"context\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m]\u001b[0m\u001b[32m'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'output'\u001b[0m: \u001b[32m'\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"content\": \"\", \"tool_calls\": \u001b[0m\u001b[32m[\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"call_id\": \"5f77ab69-72d9-4d51-b96c-bd4352ced54a\", \"tool_name\": \"brave_search\", \"arguments\": \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"query\": \"NBA Western Conference Finals 2024 teams\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \"arguments_json\": \"\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m]\u001b[0m\u001b[32m}\u001b[0m\u001b[32m'\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'input'\u001b[0m: \u001b[32m'\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"role\":\"assistant\",\"content\":\"\",\"stop_reason\":\"end_of_turn\",\"tool_calls\":\u001b[0m\u001b[32m[\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"call_id\":\"5f77ab69-72d9-4d51-b96c-bd4352ced54a\",\"tool_name\":\"brave_search\",\"arguments\":\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"query\":\"NBA Western Conference Finals 2024 teams\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m,\"arguments_json\":\"\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m]\u001b[0m\u001b[32m}\u001b[0m\u001b[32m'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'output'\u001b[0m: \u001b[32m'\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\"role\":\"tool\",\"call_id\":\"5f77ab69-72d9-4d51-b96c-bd4352ced54a\",\"content\":\"\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"query\\\\\": \\\\\"NBA Western Conference Finals 2024 teams\\\\\", \\\\\"top_k\\\\\": \u001b[0m\u001b[32m[\u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"title\\\\\": \\\\\"2024 NBA Western Conference Finals - Basketball-Reference.com\\\\\", \\\\\"url\\\\\": \\\\\"https://www.basketball-reference.com/playoffs/2024-nba-western-conference-finals-mavericks-vs-timberwolves.html\\\\\", \\\\\"content\\\\\": \\\\\"2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. 5 Dallas Mavericks \u001b[0m\u001b[32m(\u001b[0m\u001b[32m4-1\u001b[0m\u001b[32m)\u001b[0m\u001b[32m vs. 7 Derrick Jones Jr. 2024 NBA Playoffs Dallas Mavericks vs. Dallas Mavericks vs. Dallas Mavericks vs. College Tools: Player Season Finder, Player Game Finder, Team Season Finder, Team Game Finder Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards ... Players, Teams, Seasons, Leaders, Awards, All-Star Games, Executives ... Players, Teams, Seasons, Leaders, Awards ... Subscribe to Stathead Basketball: Get your first month FREE The SPORTS REFERENCE, STATHEAD, IMMACULATE GRID, and IMMACULATE FOOTY trademarks are owned exclusively by Sports Reference LLC. Sports\\\\\\\\u00a0Reference\\\\\\\\u202f\\\\\\\\u00ae Baseball Football \u001b[0m\u001b[32m(\u001b[0m\u001b[32mcollege\u001b[0m\u001b[32m)\u001b[0m\u001b[32m Basketball \u001b[0m\u001b[32m(\u001b[0m\u001b[32mcollege\u001b[0m\u001b[32m)\u001b[0m\u001b[32m Hockey F\\\\\\\\u00fatbol Blog Stathead\\\\\\\\u202f\\\\\\\\u00ae Immaculate Grid\\\\\\\\u202f\\\\\\\\u00ae\\\\\", \\\\\"score\\\\\": 0.89030397, \\\\\"raw_content\\\\\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"title\\\\\": \\\\\"NBA Standings - 2024-25 season - ESPN\\\\\", \\\\\"url\\\\\": \\\\\"https://www.espn.com/nba/standings\\\\\", \\\\\"content\\\\\": \\\\\"NBA Standings - 2024-25 season - ESPN Skip to main contentSkip to navigation ESPN NFL NBA NCAAF NHL NCAAM NCAAW Soccer More Sports Watch Fantasy NBA Home Scores Schedule Standings Stats Teams Odds Where To Watch All-Star Game Fantasy More NBA Standings 2024-25 Standings Expanded Vs. Division NBA Cup LeagueConferenceDivision Eastern Conference | | | --- | | 1CLECleveland Cavaliers | | 2BOSBoston Celtics | | 3NYNew York Knicks | | 4INDIndiana Pacers | | 5MILMilwaukee Bucks | | 6DETDetroit Pistons | | 7MIAMiami Heat | | 8ORLOrlando Magic | | 9ATLAtlanta Hawks | | 10CHIChicago Bulls | | PHIPhiladelphia 76ers | | BKNBrooklyn Nets | | TORToronto Raptors | | CHACharlotte Hornets | | WSHWashington Wizards | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 42 | 10 | .808 | - | 24-4 | 18-6 | 9-1 | 28-7 | 122.4 | 112.1 | +10.3 | W2 | 6-4 | | 36 | 16 | .692 | 6 | 16-10 | 20-6 | 6-2 | 26-9 | 117.3 | 108.8 | +8.5 | L1 | 7-3 | | 34 | 17 | .667 | 7.5 | 18-9 | 16-8 | 9-1 | 23-10 | 117.9 | 111.4 | +6.5 | W2 | 8-2 | | 29 | 21 | .580 | 12 | 14-7 | 14-13 | 6-4 | 17-15 | 115.7 | 114.9 | +0.8 | W1 | 7-3 | | 27 | 23 | .540 | 14 | 16-8 | 10-15 | 6-5 | 22-16 | 114.2 | 112.6 | +1.6 | L1 | 4-6 | | 26 | 26 | .500 | 16 | 13-13 | 13-13 | 2-9 | 18-20 | 113.0 | 113.8 | -0.8 | W1 | 5-5 | | 25 | 25 | .500 | 16 | 12-10 | 12-15 | 5-3 | 14-15 | 110.5 | 110.6 | -0.1 | L1 | 5-5 | | 25 | 28 | .472 | 17.5 | 15-9 | 10-19 | 5-2 | 20-15 | 103.8 | 105.6 | -1.8 | L1 | 2-8 | | 24 | 28 | .462 | 18 | 12-12 | 12-15 | 4-2 | 17-13 | 116.1 | 119.0 | -2.9 | W1 | 2-8 | | 22 | 30 | .423 | 20 | 10-16 | 12-14 | 3-7 | 17-18 | 116.7 | 120.1 | -3.4 | L1 | 4-6 | | 20 | 31 | .392 | 21.5 | 10-16 | 10-15 | 3-4 | 14-17 | 109.1 | 112.9 | -3.8 | L2 | 5-5 | | 18 | 34 | .346 | 24 | 7-17 | 11-17 | 1-8 | 9-23 | 105.3 | 111.7 | -6.4 | W1 | 4-6 | | 16 | 36 | .308 | 26 | 12-16 | 4-20 | 3-7 | 10-23 | 111.2 | 116.9 | -5.7 | L3 | 6-4 | | 13 | 36 | .265 | 27.5 | 9-20 | 4-16 | 0-9 | 7-27 | 107.1 | 112.3 | -5.2 | W1 | 2-8 | | 9 | 42 | .176 | 32.5 | 5-20 | 4-21 | 5-3 | 7-21 | 107.8 | 121.5 | -13.7 | L1 | 3-7 | Western Conference | | | --- | | 1OKCOklahoma City Thunder | | 2MEMMemphis Grizzlies | | 3DENDenver Nuggets | | 4HOUHouston Rockets | | 5LALLos Angeles Lakers | | 6MINMinnesota Timberwolves | | 7LACLA Clippers | | 8DALDallas Mavericks | | 9PHXPhoenix Suns | | 10SACSacramento Kings | | GSGolden State Warriors | | SASan Antonio Spurs | | PORPortland Trail Blazers | | UTAHUtah Jazz | | NONew Orleans Pelicans | | W | L | PCT | GB | HOME | AWAY | DIV | CONF | PPG | OPP PPG | DIFF | STRK | L10 | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | | 41 | 9 | .820 | - | 23-3 | 17-6 | 7-1 | 23-8 | 117.7 | 104.7 | +13.0 | W4 | 7-3 | | 35 | 16 | .686 | 6.5 | 21-5 | 14-11 | 8-4 | 19-12 | 123.8 | 115.4 | +8.4 | W4 | 9-1 | | 33 | 19 | .635 | 9 | 17-8 | 16-11 | 4-4 | 19-12 | 120.8 | 115.9 | +4.9 | W5 | 7-3 | | 32 | 20 | .615 | 10 | 15-8 | 17-11 | 9-3 | 19-12 | 113.3 | 109.1 | +4.2 | L6 | 4-6 | | 30 | 19 | .612 | 10.5 | 17-6 | 13-13 | 9-3 | 19-11 | 112.6 | 112.0 | +0.6 | W4 | 8-2 | | 29 | 23 | .558 | 13 | 14-12 | 15-11 | 4-3 | 21-14 | 111.7 | 108.2 | +3.5 | W2 | 7-3 | | 28 | 23 | .549 | 13.5 | 17-10 | 11-13 | 6-4 | 17-18 | 110.1 | 107.7 | +2.4 | L3 | 4-6 | | 28 | 25 | .528 | 14.5 | 15-10 | 13-15 | 6-4 | 20-17 | 115.5 | 113.3 | +2.2 | W2 | 5-5 | | 26 | 25 | .510 | 15.5 | 16-9 | 10-16 | 7-4 | 17-14 | 113.4 | 114.7 | -1.3 | W1 | 5-5 | | 25 | 26 | .490 | 16.5 | 13-13 | 12-13 | 4-6 | 16-17 | 116.1 | 115.4 | +0.7 | L2 | 4-6 | | 25 | 26 | .490 | 16.5 | 15-13 | 10-13 | 1-10 | 17-18 | 111.5 | 111.9 | -0.4 | L2 | 4-6 | | 22 | 27 | .449 | 18.5 | 13-12 | 8-14 | 2-7 | 16-18 | 112.8 | 114.3 | -1.5 | L1 | 3-7 | | 23 | 29 | .442 | 19 | 15-13 | 8-16 | 4-5 | 14-24 | 109.0 | 113.9 | -4.9 | W6 | 9-1 | | 12 | 38 | .240 | 29 | 5-18 | 7-20 | 1-7 | 4-29 | 111.9 | 118.9 | -7.0 | L1 | 2-8 | | 12 | 39 | .235 | 29.5 | 8-18 | 4-21 | 1-8 | 6-23 | 110.0 | 118.8 | -8.8 | L7 | 3-7 | Standings are updated with the completion of each game.Teams seeded 7-10 in each conference will compete in a play-in tournament at the end of the regular season. Glossary W:Wins L:Losses PCT:Winning Percentage GB:Games Back HOME:Home Record AWAY:Away Record DIV:Division Record CONF:Conference Record PPG:Points Per Game OPP PPG:Opponent Points Per Game DIFF:Average Point Differential STRK:Current Streak L10:Record last 10 games NBA News Anthony Davis leads Mavericks past Rockets 116-105 in Mavs debut but leaves with lower-body injury -------------------------------------------------------------------------------------------------- \\\\\\\\u2014 Anthony Davis had 26 points, 16 rebounds, seven assists and three blocks in his Mavericks debut but left the game late in the third quarter with a... * 38m Hawks request waivers on newly acquired Bones Hyland ---------------------------------------------------- The Atlanta Hawks requested waivers on guard Bones Hyland on Saturday, just two days after the guard was obtained from the Clippers in a deal at the NBA trade deadline. * 1h AD posts 26-point double-double in debut before suffering injury ---------------------------------------------------------------- Anthony Davis has a strong debut with the Mavs, dropping 26 points, 16 rebounds and 7 assists, before leaving with a lower-body injury. * 1h All NBA News Terms of Use Privacy Policy Your US State Privacy Rights Children\\'s Online Privacy Policy Interest-Based Ads About Nielsen Measurement Do Not Sell or Share My Personal Information Contact Us Disney Ad Sales Site Work for ESPN Corrections ESPN BET Sportsbook is owned and operated by PENN Entertainment, Inc. and its subsidiaries \u001b[0m\u001b[32m(\u001b[0m\u001b[32m\\'PENN\\'\u001b[0m\u001b[32m)\u001b[0m\u001b[32m.\\\\\", \\\\\"score\\\\\": 0.83549726, \\\\\"raw_content\\\\\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"title\\\\\": \\\\\"2024 Playoffs: West Finals | Timberwolves \u001b[0m\u001b[32m(\u001b[0m\u001b[32m3\u001b[0m\u001b[32m)\u001b[0m\u001b[32m vs. Mavericks \u001b[0m\u001b[32m(\u001b[0m\u001b[32m5\u001b[0m\u001b[32m)\u001b[0m\u001b[32m | NBA.com\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/playoffs/2024/west-final\\\\\", \\\\\"content\\\\\": \\\\\"Mavericks \u001b[0m\u001b[32m(\u001b[0m\u001b[32m5\u001b[0m\u001b[32m)\u001b[0m\u001b[32m | NBA.com 2024-25 NBA CrunchTime NBA TV Draft Kings DFS NBA Bet Home NBA Store NBA Game Worn NBA Photo Store NBA Experiences NBA G League NBA 2K League NBA Play NBA Bet ### Doncic, Irving carry Mavs to NBA Finals Luka Doncic and Kyrie Irving pour in 36 points apiece to guide Dallas to its 1st appearance in the NBA Finals since 2011. ### Luka: \\'This is special, coming from the West\\' Luka Doncic with Ernie, Charles, Kenny & Shaq about the Mavs being NBA Finals-bound, his Game 5 play and more. NBA Organization NBA ID NBA Official NBA Careers NBA Initiatives NBA Cares NBA Foundation NBA Communications NBA Transactions NBA Auctions NBA Photostore\\\\\", \\\\\"score\\\\\": 0.75312227, \\\\\"raw_content\\\\\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"title\\\\\": \\\\\"2024 NBA Playoffs | Official Bracket, Schedule and Series Matchups\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/playoffs/2024?\u001b[0m\u001b[32mos\u001b[0m\u001b[32m=\u001b[0m\u001b[32mwtmbloozowcj\u001b[0m\u001b[32m&\u001b[0m\u001b[32mref\u001b[0m\u001b[32m=\u001b[0m\u001b[32mapp\u001b[0m\u001b[32m\\\\\", \\\\\"content\\\\\": \\\\\"Draft Kings DFS NBA Store NBA Play NBA Finals ### Chasing History: Celtics clinch banner 18 \u001b[0m\u001b[32m(\u001b[0m\u001b[32mEp. 25\u001b[0m\u001b[32m)\u001b[0m\u001b[32m Jayson Tatum and Finals MVP Jaylen Brown close out Dallas in Game 5 to secure Boston\\'s NBA-record 18th championship. WE DID ITTTT!\\' Jayson Tatum walkoff interview after Celtics defeat Mavericks in Game 5 of 2024 NBA Finals, clinching title with a 4-1 series win. ### Horford finally champ after key sacrifice Al Horford, who played the most playoff games in NBA history before winning his 1st title, crosses the plateau in his 17th season. 30:13 ### Best of the 2024 NBA Finals 17:47 ### Best of Boston Celtics from the 2024 NBA Finals\\\\\", \\\\\"score\\\\\": 0.63234437, \\\\\"raw_content\\\\\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m, \u001b[0m\u001b[32m{\u001b[0m\u001b[32m\\\\\"title\\\\\": \\\\\"2025 NBA Playoffs: Standings, bracket and clinching updates\\\\\", \\\\\"url\\\\\": \\\\\"https://www.nba.com/news/2025-nba-playoffs-standings-and-bracket-updates\\\\\", \\\\\"content\\\\\": \\\\\"NBA TV NBA Play NBA Store NBA Game Worn NBA Play NBA Official NBA Playoffs bracket ### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. Click \\\\\\\\\\\\\"Access Content\\\\\\\\\\\\\" to agree to our Terms of Use and Privacy Policy and to sign up for emails about the latest news and products from the NBA Family and its partners. #### What to know about 2025 SoFi NBA Play-In Tournament The SoFi NBA Play-In Tournament features the Nos. 7-10 teams in each conference battling for the 7th and 8th playoff seeds. NBA ID NBA Official NBA Transactions NBA Auctions\\\\\", \\\\\"score\\\\\": 0.13435538, \\\\\"raw_content\\\\\": null\u001b[0m\u001b[32m}\u001b[0m\u001b[32m]\u001b[0m\u001b[32m}\u001b[0m\u001b[32m\"\u001b[0m\u001b[32m}\u001b[0m\u001b[32m'\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[1m]\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# NBVAL_SKIP\n", + "print(f\"Getting traces for session_id={session_id}\")\n", + "import json\n", + "\n", + "from rich.pretty import pprint\n", + "\n", + "agent_logs = []\n", + "\n", + "for span in client.telemetry.query_spans(\n", + " attribute_filters=[\n", + " {\"key\": \"session_id\", \"op\": \"eq\", \"value\": session_id},\n", + " ],\n", + " attributes_to_return=[\"input\", \"output\"],\n", + "):\n", + " if span.attributes[\"output\"] != \"no shields\":\n", + " agent_logs.append(span.attributes)\n", + "\n", + "print(\"Here are examples of traces:\")\n", + "pprint(agent_logs[:2])\n" + ] + }, + { + "cell_type": "markdown", + "id": "QF30H7ufP2RE", + "metadata": { + "id": "QF30H7ufP2RE" + }, + "source": [ + "- Now, we want to run evaluation to assert that our search agent succesfully calls brave_search from online traces.\n", + "- We will first post-process the agent's telemetry logs and run evaluation." + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "sy4Xaff_Avuu", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 432 + }, + "id": "sy4Xaff_Avuu", + "outputId": "1b14b5ed-4c77-47c4-edfb-1c13a88e5ef4" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
ScoringScoreResponse(\n",
+       "β”‚   results={\n",
+       "β”‚   β”‚   'basic::subset_of': ScoringResult(\n",
+       "β”‚   β”‚   β”‚   aggregated_results={'accuracy': {'accuracy': 1.0, 'num_correct': 3.0, 'num_total': 3}},\n",
+       "β”‚   β”‚   β”‚   score_rows=[{'score': 1.0}, {'score': 1.0}, {'score': 1.0}]\n",
+       "β”‚   β”‚   )\n",
+       "β”‚   }\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mScoringScoreResponse\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'basic::subset_of'\u001b[0m: \u001b[1;35mScoringResult\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33maggregated_results\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1;36m1.0\u001b[0m, \u001b[32m'num_correct'\u001b[0m: \u001b[1;36m3.0\u001b[0m, \u001b[32m'num_total'\u001b[0m: \u001b[1;36m3\u001b[0m\u001b[1m}\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mscore_rows\u001b[0m=\u001b[1m[\u001b[0m\u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m, \u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m, \u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# NBVAL_SKIP\n", + "# post-process telemetry spance and prepare data for eval\n", + "# in this case, we want to assert that all user prompts is followed by a tool call\n", + "import ast\n", + "import json\n", + "\n", + "eval_rows = []\n", + "\n", + "for log in agent_logs:\n", + " input = json.loads(log[\"input\"])\n", + " if isinstance(input, list):\n", + " input = input[-1]\n", + " if input[\"role\"] == \"user\":\n", + " eval_rows.append(\n", + " {\n", + " \"input_query\": input[\"content\"],\n", + " \"generated_answer\": log[\"output\"],\n", + " # check if generated_answer uses tools brave_search\n", + " \"expected_answer\": \"brave_search\",\n", + " },\n", + " )\n", + "\n", + "# pprint(eval_rows)\n", + "scoring_params = {\n", + " \"basic::subset_of\": None,\n", + "}\n", + "scoring_response = client.scoring.score(\n", + " input_rows=eval_rows, scoring_functions=scoring_params\n", + ")\n", + "pprint(scoring_response)\n" + ] + }, + { + "cell_type": "markdown", + "id": "IKbzhxcw5e_c", + "metadata": { + "id": "IKbzhxcw5e_c" + }, + "source": [ + "#### 3.2. Agentic Application Dataset Scoring\n", + "- Llama Stack offers a library of scoring functions and the `/scoring` API, allowing you to run evaluations on your pre-annotated AI application datasets.\n", + "\n", + "- In this example, we will work with an example RAG dataset you have built previously, label with an annotation, and use LLM-As-Judge with custom judge prompt for scoring. Please checkout our [Llama Stack Playground](https://llama-stack.readthedocs.io/en/latest/playground/index.html) for an interactive interface to upload datasets and run scorings." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "xG4Y84VQBb0g", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 304 + }, + "id": "xG4Y84VQBb0g", + "outputId": "cf7dcecc-a81d-4c60-af5e-b36b8fe85c69" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
ScoringScoreResponse(\n",
+       "β”‚   results={\n",
+       "β”‚   β”‚   'llm-as-judge::base': ScoringResult(\n",
+       "β”‚   β”‚   β”‚   aggregated_results={},\n",
+       "β”‚   β”‚   β”‚   score_rows=[\n",
+       "β”‚   β”‚   β”‚   β”‚   {\n",
+       "β”‚   β”‚   β”‚   β”‚   β”‚   'score': 'B',\n",
+       "β”‚   β”‚   β”‚   β”‚   β”‚   'judge_feedback': 'Answer: B, Explanation: The GENERATED_RESPONSE is a superset of the EXPECTED_RESPONSE and is fully consistent with it. The EXPECTED_RESPONSE only mentions \"LoRA\", which is a topic that is extensively covered in the GENERATED_RESPONSE. The GENERATED_RESPONSE provides more specific and detailed topics related to LoRA, but it does not contradict the EXPECTED_RESPONSE.'\n",
+       "β”‚   β”‚   β”‚   β”‚   }\n",
+       "β”‚   β”‚   β”‚   ]\n",
+       "β”‚   β”‚   ),\n",
+       "β”‚   β”‚   'basic::subset_of': ScoringResult(\n",
+       "β”‚   β”‚   β”‚   aggregated_results={'accuracy': {'accuracy': 1.0, 'num_correct': 1.0, 'num_total': 1}},\n",
+       "β”‚   β”‚   β”‚   score_rows=[{'score': 1.0}]\n",
+       "β”‚   β”‚   )\n",
+       "β”‚   }\n",
+       ")\n",
+       "
\n" + ], + "text/plain": [ + "\u001b[1;35mScoringScoreResponse\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[33mresults\u001b[0m=\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'llm-as-judge::base'\u001b[0m: \u001b[1;35mScoringResult\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33maggregated_results\u001b[0m=\u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mscore_rows\u001b[0m=\u001b[1m[\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[1m{\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'score'\u001b[0m: \u001b[32m'B'\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[32m'judge_feedback'\u001b[0m: \u001b[32m'Answer: B, Explanation: The GENERATED_RESPONSE is a superset of the EXPECTED_RESPONSE and is fully consistent with it. The EXPECTED_RESPONSE only mentions \"LoRA\", which is a topic that is extensively covered in the GENERATED_RESPONSE. The GENERATED_RESPONSE provides more specific and detailed topics related to LoRA, but it does not contradict the EXPECTED_RESPONSE.'\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ β”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[32m'basic::subset_of'\u001b[0m: \u001b[1;35mScoringResult\u001b[0m\u001b[1m(\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33maggregated_results\u001b[0m=\u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1m{\u001b[0m\u001b[32m'accuracy'\u001b[0m: \u001b[1;36m1.0\u001b[0m, \u001b[32m'num_correct'\u001b[0m: \u001b[1;36m1.0\u001b[0m, \u001b[32m'num_total'\u001b[0m: \u001b[1;36m1\u001b[0m\u001b[1m}\u001b[0m\u001b[1m}\u001b[0m,\n", + "\u001b[2;32mβ”‚ β”‚ β”‚ \u001b[0m\u001b[33mscore_rows\u001b[0m=\u001b[1m[\u001b[0m\u001b[1m{\u001b[0m\u001b[32m'score'\u001b[0m: \u001b[1;36m1.0\u001b[0m\u001b[1m}\u001b[0m\u001b[1m]\u001b[0m\n", + "\u001b[2;32mβ”‚ β”‚ \u001b[0m\u001b[1m)\u001b[0m\n", + "\u001b[2;32mβ”‚ \u001b[0m\u001b[1m}\u001b[0m\n", + "\u001b[1m)\u001b[0m\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import rich\n", + "from rich.pretty import pprint\n", + "\n", + "# could even use larger models like 405B\n", + "judge_model_id = \"meta-llama/Llama-3.3-70B-Instruct\"\n", + "\n", + "JUDGE_PROMPT = \"\"\"\n", + "Given a QUESTION and GENERATED_RESPONSE and EXPECTED_RESPONSE.\n", + "\n", + "Compare the factual content of the GENERATED_RESPONSE with the EXPECTED_RESPONSE. Ignore any differences in style, grammar, or punctuation.\n", + " The GENERATED_RESPONSE may either be a subset or superset of the EXPECTED_RESPONSE, or it may conflict with it. Determine which case applies. Answer the question by selecting one of the following options:\n", + " (A) The GENERATED_RESPONSE is a subset of the EXPECTED_RESPONSE and is fully consistent with it.\n", + " (B) The GENERATED_RESPONSE is a superset of the EXPECTED_RESPONSE and is fully consistent with it.\n", + " (C) The GENERATED_RESPONSE contains all the same details as the EXPECTED_RESPONSE.\n", + " (D) There is a disagreement between the GENERATED_RESPONSE and the EXPECTED_RESPONSE.\n", + " (E) The answers differ, but these differences don't matter from the perspective of factuality.\n", + "\n", + "Give your answer in the format \"Answer: One of ABCDE, Explanation: \".\n", + "\n", + "Your actual task:\n", + "\n", + "QUESTION: {input_query}\n", + "GENERATED_RESPONSE: {generated_answer}\n", + "EXPECTED_RESPONSE: {expected_answer}\n", + "\"\"\"\n", + "\n", + "input_query = (\n", + " \"What are the top 5 topics that were explained? Only list succinct bullet points.\"\n", + ")\n", + "generated_answer = \"\"\"\n", + "Here are the top 5 topics that were explained in the documentation for Torchtune:\n", + "\n", + "* What is LoRA and how does it work?\n", + "* Fine-tuning with LoRA: memory savings and parameter-efficient finetuning\n", + "* Running a LoRA finetune with Torchtune: overview and recipe\n", + "* Experimenting with different LoRA configurations: rank, alpha, and attention modules\n", + "* LoRA finetuning\n", + "\"\"\"\n", + "expected_answer = \"\"\"LoRA\"\"\"\n", + "\n", + "rows = [\n", + " {\n", + " \"input_query\": input_query,\n", + " \"generated_answer\": generated_answer,\n", + " \"expected_answer\": expected_answer,\n", + " },\n", + "]\n", + "\n", + "scoring_params = {\n", + " \"llm-as-judge::base\": {\n", + " \"judge_model\": judge_model_id,\n", + " \"prompt_template\": JUDGE_PROMPT,\n", + " \"type\": \"llm_as_judge\",\n", + " \"judge_score_regexes\": [\"Answer: (A|B|C|D|E)\"],\n", + " },\n", + " \"basic::subset_of\": None,\n", + "}\n", + "\n", + "response = client.scoring.score(input_rows=rows, scoring_functions=scoring_params)\n", + "pprint(response)\n" ] }, { @@ -2670,7 +2826,7 @@ "id": "ad077440", "metadata": {}, "source": [ - "## 3. Image Understanding with Llama 3.2\n", + "## 4. Image Understanding with Llama 3.2\n", "\n", "Below is a complete example of to ask Llama 3.2 questions about an image." ] @@ -2680,12 +2836,12 @@ "id": "82e381ec", "metadata": {}, "source": [ - "### 3.1 Setup and helpers\n" + "### 4.1 Setup and helpers\n" ] }, { "cell_type": "code", - "execution_count": 54, + "execution_count": null, "id": "44e05e16", "metadata": {}, "outputs": [ @@ -2695,7 +2851,7 @@ "text": [ " % Total % Received % Xferd Average Speed Time Time Time Current\n", " Dload Upload Total Spent Left Speed\n", - "100 275k 100 275k 0 0 923k 0 --:--:-- --:--:-- --:--:-- 927k\n" + "100 275k 100 275k 0 0 901k 0 --:--:-- --:--:-- --:--:-- 903k\n" ] }, { @@ -2705,7 +2861,7 @@ "" ] }, - "execution_count": 54, + "execution_count": 29, "metadata": { "image/jpeg": { "height": 256, @@ -2724,13 +2880,13 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 30, "id": "a2c1e1c2", "metadata": {}, "outputs": [], "source": [ "import base64\n", - "vision_model_id = \"together/meta-llama/Llama-4-Scout-17B-16E-Instruct\"\n", + "vision_model_id = \"meta-llama/Llama-3.2-11B-Vision-Instruct\"\n", "\n", "def encode_image(image_path):\n", " with open(image_path, \"rb\") as image_file:\n", @@ -2744,33 +2900,20 @@ "id": "7737cd41", "metadata": {}, "source": [ - "### 3.2 Using Llama Stack Inference API for multimodal inference" + "### 4.2 Using Llama Stack Inference API for multimodal inference" ] }, { "cell_type": "code", - "execution_count": 56, + "execution_count": 31, "id": "d7914894", "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/chat/completions \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "The image features three llamas, each with a distinct color. The colors of the llamas are as follows:\n", - "\n", - "* One llama is white.\n", - "* Another llama is purple.\n", - "* The third llama is also white.\n", - "\n", - "Therefore, there are two different colors present among the llamas: white and purple.\n" + "There are three llamas in the image. The llama in the middle is purple, the llama on the left is white, and the llama on the right is also white, but it is wearing a blue party hat. Therefore, there are two different colors of llama in the image: purple and white.\n" ] } ], @@ -2781,9 +2924,11 @@ " \"role\": \"user\",\n", " \"content\": [\n", " {\n", - " \"type\": \"image_url\",\n", - " \"image_url\": {\n", - " \"url\": encode_image(\"Llama_Repo.jpeg\")\n", + " \"type\": \"image\",\n", + " \"image\": {\n", + " \"url\": {\n", + " \"uri\": encode_image(\"Llama_Repo.jpeg\")\n", + " }\n", " }\n", " },\n", " {\n", @@ -2799,6 +2944,14 @@ "\n", "print(response.choices[0].message.content)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f3352379", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -2808,7 +2961,7 @@ "provenance": [] }, "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -2822,7 +2975,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.12" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/docs/quick_start.ipynb b/docs/quick_start.ipynb index 0d5ad227c..4ddde693f 100644 --- a/docs/quick_start.ipynb +++ b/docs/quick_start.ipynb @@ -126,31 +126,17 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "J2kGed0R5PSf", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, + "collapsed": true, "id": "J2kGed0R5PSf", "outputId": "2478ea60-8d35-48a1-b011-f233831740c5" }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m52 packages\u001b[0m \u001b[2min 1.56s\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m3 packages\u001b[0m \u001b[2min 122ms\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m3 packages\u001b[0m \u001b[2min 197ms\u001b[0m\u001b[0m\n", - "\u001b[2mUsing Python 3.12.12 environment at: /opt/homebrew/Caskroom/miniconda/base/envs/test\u001b[0m\n", - "\u001b[2mAudited \u001b[1m1 package\u001b[0m \u001b[2min 11ms\u001b[0m\u001b[0m\n" - ] - } - ], + "outputs": [], "source": [ "import os\n", "import subprocess\n", @@ -164,7 +150,7 @@ "def run_llama_stack_server_background():\n", " log_file = open(\"llama_stack_server.log\", \"w\")\n", " process = subprocess.Popen(\n", - " f\"OLLAMA_URL=http://localhost:11434 uv run --with llama-stack llama stack run starter\",\n", + " f\"OLLAMA_URL=http://localhost:11434 uv run --with llama-stack llama stack run starter\n", " shell=True,\n", " stdout=log_file,\n", " stderr=log_file,\n", @@ -214,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 7, "id": "f779283d", "metadata": {}, "outputs": [ @@ -222,8 +208,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Starting Llama Stack server with PID: 20778\n", - "Waiting for server to start........\n", + "Starting Llama Stack server with PID: 787100\n", + "Waiting for server to start\n", "Server is ready!\n" ] } @@ -243,84 +229,65 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 8, "id": "7da71011", "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: GET http://0.0.0.0:8321/v1/models \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/files \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/vector_stores \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/conversations \"HTTP/1.1 200 OK\"\n", - "INFO:httpx:HTTP Request: POST http://0.0.0.0:8321/v1/responses \"HTTP/1.1 200 OK\"\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ + "rag_tool> Ingesting document: https://www.paulgraham.com/greatwork.html\n", "prompt> How do you do great work?\n", - "πŸ€” Doing great work involves a combination of skills, habits, and mindsets. Here are some key principles:\n", - "\n", - "1. **Set Clear Goals**: Start with a clear vision of what you want to achieve. Define specific, measurable, achievable, relevant, and time-bound (SMART) goals.\n", - "\n", - "2. **Plan and Prioritize**: Break your goals into smaller, manageable tasks. Prioritize these tasks based on their importance and urgency.\n", - "\n", - "3. **Focus on Quality**: Aim for high-quality outcomes rather than just finishing tasks. Pay attention to detail, and ensure your work meets or exceeds standards.\n", - "\n", - "4. **Stay Organized**: Keep your workspace, both physical and digital, organized to help you stay focused and efficient.\n", - "\n", - "5. **Manage Your Time**: Use time management techniques such as the Pomodoro Technique, time blocking, or the Eisenhower Box to maximize productivity.\n", - "\n", - "6. **Seek Feedback and Learn**: Regularly seek feedback from peers, mentors, or supervisors. Use constructive criticism to improve continuously.\n", - "\n", - "7. **Innovate and Improve**: Look for ways to improve processes or introduce new ideas. Be open to change and willing to adapt.\n", - "\n", - "8. **Stay Motivated and Persistent**: Keep your end goals in mind to stay motivated. Overcome setbacks with resilience and persistence.\n", - "\n", - "9. **Balance and Rest**: Ensure you maintain a healthy work-life balance. Take breaks and manage stress to sustain long-term productivity.\n", - "\n", - "10. **Reflect and Adjust**: Regularly assess your progress and adjust your strategies as needed. Reflect on what works well and what doesn't.\n", - "\n", - "By incorporating these elements, you can consistently produce high-quality work and achieve excellence in your endeavors.\n" + "\u001b[33minference> \u001b[0m\u001b[33m[k\u001b[0m\u001b[33mnowledge\u001b[0m\u001b[33m_search\u001b[0m\u001b[33m(query\u001b[0m\u001b[33m=\"\u001b[0m\u001b[33mWhat\u001b[0m\u001b[33m is\u001b[0m\u001b[33m the\u001b[0m\u001b[33m key\u001b[0m\u001b[33m to\u001b[0m\u001b[33m doing\u001b[0m\u001b[33m great\u001b[0m\u001b[33m work\u001b[0m\u001b[33m\")]\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[32mtool_execution> Tool:knowledge_search Args:{'query': 'What is the key to doing great work'}\u001b[0m\n", + "\u001b[32mtool_execution> Tool:knowledge_search Response:[TextContentItem(text='knowledge_search tool found 5 chunks:\\nBEGIN of knowledge_search tool results.\\n', type='text'), TextContentItem(text=\"Result 1:\\nDocument_id:docum\\nContent: work. Doing great work means doing something important\\nso well that you expand people's ideas of what's possible. But\\nthere's no threshold for importance. It's a matter of degree, and\\noften hard to judge at the time anyway.\\n\", type='text'), TextContentItem(text=\"Result 2:\\nDocument_id:docum\\nContent: work. Doing great work means doing something important\\nso well that you expand people's ideas of what's possible. But\\nthere's no threshold for importance. It's a matter of degree, and\\noften hard to judge at the time anyway.\\n\", type='text'), TextContentItem(text=\"Result 3:\\nDocument_id:docum\\nContent: work. Doing great work means doing something important\\nso well that you expand people's ideas of what's possible. But\\nthere's no threshold for importance. It's a matter of degree, and\\noften hard to judge at the time anyway.\\n\", type='text'), TextContentItem(text=\"Result 4:\\nDocument_id:docum\\nContent: work. Doing great work means doing something important\\nso well that you expand people's ideas of what's possible. But\\nthere's no threshold for importance. It's a matter of degree, and\\noften hard to judge at the time anyway.\\n\", type='text'), TextContentItem(text=\"Result 5:\\nDocument_id:docum\\nContent: work. Doing great work means doing something important\\nso well that you expand people's ideas of what's possible. But\\nthere's no threshold for importance. It's a matter of degree, and\\noften hard to judge at the time anyway.\\n\", type='text'), TextContentItem(text='END of knowledge_search tool results.\\n', type='text'), TextContentItem(text='The above results were retrieved to help answer the user\\'s query: \"What is the key to doing great work\". Use them as supporting information only in answering this query.\\n', type='text')]\u001b[0m\n", + "\u001b[33minference> \u001b[0m\u001b[33mDoing\u001b[0m\u001b[33m great\u001b[0m\u001b[33m work\u001b[0m\u001b[33m means\u001b[0m\u001b[33m doing\u001b[0m\u001b[33m something\u001b[0m\u001b[33m important\u001b[0m\u001b[33m so\u001b[0m\u001b[33m well\u001b[0m\u001b[33m that\u001b[0m\u001b[33m you\u001b[0m\u001b[33m expand\u001b[0m\u001b[33m people\u001b[0m\u001b[33m's\u001b[0m\u001b[33m ideas\u001b[0m\u001b[33m of\u001b[0m\u001b[33m what\u001b[0m\u001b[33m's\u001b[0m\u001b[33m possible\u001b[0m\u001b[33m.\u001b[0m\u001b[33m However\u001b[0m\u001b[33m,\u001b[0m\u001b[33m there\u001b[0m\u001b[33m's\u001b[0m\u001b[33m no\u001b[0m\u001b[33m threshold\u001b[0m\u001b[33m for\u001b[0m\u001b[33m importance\u001b[0m\u001b[33m,\u001b[0m\u001b[33m and\u001b[0m\u001b[33m it\u001b[0m\u001b[33m's\u001b[0m\u001b[33m often\u001b[0m\u001b[33m hard\u001b[0m\u001b[33m to\u001b[0m\u001b[33m judge\u001b[0m\u001b[33m at\u001b[0m\u001b[33m the\u001b[0m\u001b[33m time\u001b[0m\u001b[33m anyway\u001b[0m\u001b[33m.\u001b[0m\u001b[33m Great\u001b[0m\u001b[33m work\u001b[0m\u001b[33m is\u001b[0m\u001b[33m a\u001b[0m\u001b[33m matter\u001b[0m\u001b[33m of\u001b[0m\u001b[33m degree\u001b[0m\u001b[33m,\u001b[0m\u001b[33m and\u001b[0m\u001b[33m it\u001b[0m\u001b[33m can\u001b[0m\u001b[33m be\u001b[0m\u001b[33m difficult\u001b[0m\u001b[33m to\u001b[0m\u001b[33m determine\u001b[0m\u001b[33m whether\u001b[0m\u001b[33m someone\u001b[0m\u001b[33m has\u001b[0m\u001b[33m done\u001b[0m\u001b[33m great\u001b[0m\u001b[33m work\u001b[0m\u001b[33m until\u001b[0m\u001b[33m after\u001b[0m\u001b[33m the\u001b[0m\u001b[33m fact\u001b[0m\u001b[33m.\u001b[0m\u001b[97m\u001b[0m\n", + "\u001b[30m\u001b[0m" ] } ], "source": [ "from llama_stack_client import Agent, AgentEventLogger, RAGDocument, LlamaStackClient\n", - "import requests\n", "\n", - "vector_store_id = \"my_demo_vector_db\"\n", + "vector_db_id = \"my_demo_vector_db\"\n", "client = LlamaStackClient(base_url=\"http://0.0.0.0:8321\")\n", "\n", "models = client.models.list()\n", "\n", "# Select the first ollama and first ollama's embedding model\n", "model_id = next(m for m in models if m.model_type == \"llm\" and m.provider_id == \"ollama\").identifier\n", + "embedding_model = next(m for m in models if m.model_type == \"embedding\" and m.provider_id == \"ollama\")\n", + "embedding_model_id = embedding_model.identifier\n", + "embedding_dimension = embedding_model.metadata[\"embedding_dimension\"]\n", "\n", - "\n", + "_ = client.vector_dbs.register(\n", + " vector_db_id=vector_db_id,\n", + " embedding_model=embedding_model_id,\n", + " embedding_dimension=embedding_dimension,\n", + " provider_id=\"faiss\",\n", + ")\n", "source = \"https://www.paulgraham.com/greatwork.html\"\n", - "response = requests.get(source)\n", - "file = client.files.create(\n", - " file=response.content,\n", - " purpose='assistants'\n", + "print(\"rag_tool> Ingesting document:\", source)\n", + "document = RAGDocument(\n", + " document_id=\"document_1\",\n", + " content=source,\n", + " mime_type=\"text/html\",\n", + " metadata={},\n", ")\n", - "vector_store = client.vector_stores.create(\n", - " name=vector_store_id,\n", - " file_ids=[file.id],\n", + "client.tool_runtime.rag_tool.insert(\n", + " documents=[document],\n", + " vector_db_id=vector_db_id,\n", + " chunk_size_in_tokens=50,\n", ")\n", - "\n", "agent = Agent(\n", " client,\n", " model=model_id,\n", " instructions=\"You are a helpful assistant\",\n", " tools=[\n", " {\n", - " \"type\": \"file_search\",\n", - " \"vector_store_ids\": [vector_store_id],\n", + " \"name\": \"builtin::rag/knowledge_search\",\n", + " \"args\": {\"vector_db_ids\": [vector_db_id]},\n", " }\n", " ],\n", ")\n", @@ -335,7 +302,7 @@ ")\n", "\n", "for log in AgentEventLogger().log(response):\n", - " print(log, end=\"\")" + " log.print()" ] }, { @@ -377,7 +344,7 @@ "provenance": [] }, "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -391,7 +358,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.12" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/docs/static/deprecated-llama-stack-spec.html b/docs/static/deprecated-llama-stack-spec.html index 8bd8ecf3f..98ed50c4f 100644 --- a/docs/static/deprecated-llama-stack-spec.html +++ b/docs/static/deprecated-llama-stack-spec.html @@ -5547,7 +5547,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -5798,7 +5798,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -8185,12 +8185,13 @@ }, "model": { "type": "string", - "description": "(Optional) The content moderation model you would like to use." + "description": "The content moderation model you would like to use." } }, "additionalProperties": false, "required": [ - "input" + "input", + "model" ], "title": "RunModerationRequest" }, @@ -13466,7 +13467,7 @@ }, { "name": "Inference", - "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Three kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.\n- Rerank models: these models reorder the documents based on their relevance to a query.", + "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Two kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.", "x-displayName": "Inference" }, { diff --git a/docs/static/deprecated-llama-stack-spec.yaml b/docs/static/deprecated-llama-stack-spec.yaml index cd86239e8..99c8dd03e 100644 --- a/docs/static/deprecated-llama-stack-spec.yaml +++ b/docs/static/deprecated-llama-stack-spec.yaml @@ -4114,7 +4114,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -4303,7 +4303,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -6104,10 +6104,11 @@ components: model: type: string description: >- - (Optional) The content moderation model you would like to use. + The content moderation model you would like to use. additionalProperties: false required: - input + - model title: RunModerationRequest ModerationObject: type: object @@ -10217,16 +10218,13 @@ tags: embeddings. - This API provides the raw interface to the underlying models. Three kinds of - models are supported: + This API provides the raw interface to the underlying models. Two kinds of models + are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - - Rerank models: these models reorder the documents based on their relevance - to a query. x-displayName: Inference - name: Models description: '' diff --git a/docs/static/experimental-llama-stack-spec.html b/docs/static/experimental-llama-stack-spec.html index ab474180e..7d572f89f 100644 --- a/docs/static/experimental-llama-stack-spec.html +++ b/docs/static/experimental-llama-stack-spec.html @@ -1850,7 +1850,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -3983,7 +3983,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", diff --git a/docs/static/experimental-llama-stack-spec.yaml b/docs/static/experimental-llama-stack-spec.yaml index dd9e43cc5..fee20814c 100644 --- a/docs/static/experimental-llama-stack-spec.yaml +++ b/docs/static/experimental-llama-stack-spec.yaml @@ -1320,7 +1320,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -2927,7 +2927,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html index d9dbe27c9..1091a1cb6 100644 --- a/docs/static/llama-stack-spec.html +++ b/docs/static/llama-stack-spec.html @@ -483,53 +483,86 @@ "name": "after", "in": "query", "description": "An item ID to list items after, used in pagination.", - "required": false, + "required": true, "schema": { - "type": "string" + "oneOf": [ + { + "type": "string" + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "include", "in": "query", "description": "Specify additional output data to include in the response.", - "required": false, + "required": true, "schema": { - "type": "array", - "items": { - "type": "string", - "enum": [ - "web_search_call.action.sources", - "code_interpreter_call.outputs", - "computer_call_output.output.image_url", - "file_search_call.results", - "message.input_image.image_url", - "message.output_text.logprobs", - "reasoning.encrypted_content" - ], - "title": "ConversationItemInclude", - "description": "Specify additional output data to include in the model response." - } + "oneOf": [ + { + "type": "array", + "items": { + "type": "string", + "enum": [ + "code_interpreter_call.outputs", + "computer_call_output.output.image_url", + "file_search_call.results", + "message.input_image.image_url", + "message.output_text.logprobs", + "reasoning.encrypted_content" + ] + } + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "limit", "in": "query", "description": "A limit on the number of objects to be returned (1-100, default 20).", - "required": false, + "required": true, "schema": { - "type": "integer" + "oneOf": [ + { + "type": "integer" + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "order", "in": "query", "description": "The order to return items in (asc or desc, default desc).", - "required": false, + "required": true, "schema": { - "type": "string", - "enum": [ - "asc", - "desc" + "oneOf": [ + { + "type": "string", + "enum": [ + "asc", + "desc" + ] + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } ] } } @@ -6767,7 +6800,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -6826,8 +6859,7 @@ "type": "string", "enum": [ "llm", - "embedding", - "rerank" + "embedding" ], "title": "ModelType", "description": "Enumeration of supported model types in Llama Stack." @@ -6919,12 +6951,13 @@ }, "model": { "type": "string", - "description": "(Optional) The content moderation model you would like to use." + "description": "The content moderation model you would like to use." } }, "additionalProperties": false, "required": [ - "input" + "input", + "model" ], "title": "RunModerationRequest" }, @@ -10172,7 +10205,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -10654,7 +10687,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -11707,7 +11740,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -13236,7 +13269,7 @@ }, { "name": "Inference", - "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Three kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.\n- Rerank models: these models reorder the documents based on their relevance to a query.", + "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Two kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.", "x-displayName": "Inference" }, { diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml index 604a4eace..6c3702374 100644 --- a/docs/static/llama-stack-spec.yaml +++ b/docs/static/llama-stack-spec.yaml @@ -347,46 +347,146 @@ paths: in: query description: >- An item ID to list items after, used in pagination. - required: false + required: true schema: - type: string + oneOf: + - type: string + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: include in: query description: >- Specify additional output data to include in the response. - required: false + required: true schema: - type: array - items: - type: string - enum: - - web_search_call.action.sources - - code_interpreter_call.outputs - - computer_call_output.output.image_url - - file_search_call.results - - message.input_image.image_url - - message.output_text.logprobs - - reasoning.encrypted_content - title: ConversationItemInclude - description: >- - Specify additional output data to include in the model response. + oneOf: + - type: array + items: + type: string + enum: + - code_interpreter_call.outputs + - computer_call_output.output.image_url + - file_search_call.results + - message.input_image.image_url + - message.output_text.logprobs + - reasoning.encrypted_content + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: limit in: query description: >- A limit on the number of objects to be returned (1-100, default 20). - required: false + required: true schema: - type: integer + oneOf: + - type: integer + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: order in: query description: >- The order to return items in (asc or desc, default desc). - required: false + required: true schema: - type: string - enum: - - asc - - desc + oneOf: + - type: string + enum: + - asc + - desc + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` deprecated: false post: responses: @@ -5127,7 +5227,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -5169,7 +5269,6 @@ components: enum: - llm - embedding - - rerank title: ModelType description: >- Enumeration of supported model types in Llama Stack. @@ -5230,10 +5329,11 @@ components: model: type: string description: >- - (Optional) The content moderation model you would like to use. + The content moderation model you would like to use. additionalProperties: false required: - input + - model title: RunModerationRequest ModerationObject: type: object @@ -7819,7 +7919,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -8127,7 +8227,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -8890,7 +8990,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -10090,16 +10190,13 @@ tags: embeddings. - This API provides the raw interface to the underlying models. Three kinds of - models are supported: + This API provides the raw interface to the underlying models. Two kinds of models + are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - - Rerank models: these models reorder the documents based on their relevance - to a query. x-displayName: Inference - name: Inspect description: >- diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html index 687c35db8..ee0a265d3 100644 --- a/docs/static/stainless-llama-stack-spec.html +++ b/docs/static/stainless-llama-stack-spec.html @@ -483,53 +483,86 @@ "name": "after", "in": "query", "description": "An item ID to list items after, used in pagination.", - "required": false, + "required": true, "schema": { - "type": "string" + "oneOf": [ + { + "type": "string" + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "include", "in": "query", "description": "Specify additional output data to include in the response.", - "required": false, + "required": true, "schema": { - "type": "array", - "items": { - "type": "string", - "enum": [ - "web_search_call.action.sources", - "code_interpreter_call.outputs", - "computer_call_output.output.image_url", - "file_search_call.results", - "message.input_image.image_url", - "message.output_text.logprobs", - "reasoning.encrypted_content" - ], - "title": "ConversationItemInclude", - "description": "Specify additional output data to include in the model response." - } + "oneOf": [ + { + "type": "array", + "items": { + "type": "string", + "enum": [ + "code_interpreter_call.outputs", + "computer_call_output.output.image_url", + "file_search_call.results", + "message.input_image.image_url", + "message.output_text.logprobs", + "reasoning.encrypted_content" + ] + } + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "limit", "in": "query", "description": "A limit on the number of objects to be returned (1-100, default 20).", - "required": false, + "required": true, "schema": { - "type": "integer" + "oneOf": [ + { + "type": "integer" + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } + ] } }, { "name": "order", "in": "query", "description": "The order to return items in (asc or desc, default desc).", - "required": false, + "required": true, "schema": { - "type": "string", - "enum": [ - "asc", - "desc" + "oneOf": [ + { + "type": "string", + "enum": [ + "asc", + "desc" + ] + }, + { + "type": "object", + "title": "NotGiven", + "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```" + } ] } } @@ -8439,7 +8472,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -8498,8 +8531,7 @@ "type": "string", "enum": [ "llm", - "embedding", - "rerank" + "embedding" ], "title": "ModelType", "description": "Enumeration of supported model types in Llama Stack." @@ -8591,12 +8623,13 @@ }, "model": { "type": "string", - "description": "(Optional) The content moderation model you would like to use." + "description": "The content moderation model you would like to use." } }, "additionalProperties": false, "required": [ - "input" + "input", + "model" ], "title": "RunModerationRequest" }, @@ -11844,7 +11877,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -12326,7 +12359,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -13379,7 +13412,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -14926,7 +14959,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -16671,7 +16704,7 @@ "enum": [ "model", "shield", - "vector_store", + "vector_db", "dataset", "scoring_function", "benchmark", @@ -17926,7 +17959,7 @@ }, { "name": "Inference", - "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Three kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.\n- Rerank models: these models reorder the documents based on their relevance to a query.", + "description": "Llama Stack Inference API for generating completions, chat completions, and embeddings.\n\nThis API provides the raw interface to the underlying models. Two kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.", "x-displayName": "Inference" }, { diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml index bd2d4b7a4..eff01931f 100644 --- a/docs/static/stainless-llama-stack-spec.yaml +++ b/docs/static/stainless-llama-stack-spec.yaml @@ -350,46 +350,146 @@ paths: in: query description: >- An item ID to list items after, used in pagination. - required: false + required: true schema: - type: string + oneOf: + - type: string + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: include in: query description: >- Specify additional output data to include in the response. - required: false + required: true schema: - type: array - items: - type: string - enum: - - web_search_call.action.sources - - code_interpreter_call.outputs - - computer_call_output.output.image_url - - file_search_call.results - - message.input_image.image_url - - message.output_text.logprobs - - reasoning.encrypted_content - title: ConversationItemInclude - description: >- - Specify additional output data to include in the model response. + oneOf: + - type: array + items: + type: string + enum: + - code_interpreter_call.outputs + - computer_call_output.output.image_url + - file_search_call.results + - message.input_image.image_url + - message.output_text.logprobs + - reasoning.encrypted_content + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: limit in: query description: >- A limit on the number of objects to be returned (1-100, default 20). - required: false + required: true schema: - type: integer + oneOf: + - type: integer + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` - name: order in: query description: >- The order to return items in (asc or desc, default desc). - required: false + required: true schema: - type: string - enum: - - asc - - desc + oneOf: + - type: string + enum: + - asc + - desc + - type: object + title: NotGiven + description: >- + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different + behavior). + + For example: + + + ```py + + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: + ... + + + + get(timeout=1) # 1s timeout + + get(timeout=None) # No timeout + + get() # Default timeout behavior, which may not be statically known + at the method definition. + + ``` deprecated: false post: responses: @@ -6340,7 +6440,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -6382,7 +6482,6 @@ components: enum: - llm - embedding - - rerank title: ModelType description: >- Enumeration of supported model types in Llama Stack. @@ -6443,10 +6542,11 @@ components: model: type: string description: >- - (Optional) The content moderation model you would like to use. + The content moderation model you would like to use. additionalProperties: false required: - input + - model title: RunModerationRequest ModerationObject: type: object @@ -9032,7 +9132,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -9340,7 +9440,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -10103,7 +10203,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -11225,7 +11325,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -12552,7 +12652,7 @@ components: enum: - model - shield - - vector_store + - vector_db - dataset - scoring_function - benchmark @@ -13485,16 +13585,13 @@ tags: embeddings. - This API provides the raw interface to the underlying models. Three kinds of - models are supported: + This API provides the raw interface to the underlying models. Two kinds of models + are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - - Rerank models: these models reorder the documents based on their relevance - to a query. x-displayName: Inference - name: Inspect description: >- diff --git a/llama_stack/apis/conversations/conversations.py b/llama_stack/apis/conversations/conversations.py index d75683efa..d7752995d 100644 --- a/llama_stack/apis/conversations/conversations.py +++ b/llama_stack/apis/conversations/conversations.py @@ -4,9 +4,11 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from enum import StrEnum from typing import Annotated, Literal, Protocol, runtime_checkable +from openai import NOT_GIVEN +from openai._types import NotGiven +from openai.types.responses.response_includable import ResponseIncludable from pydantic import BaseModel, Field from llama_stack.apis.agents.openai_responses import ( @@ -21,7 +23,7 @@ from llama_stack.apis.agents.openai_responses import ( OpenAIResponseOutputMessageWebSearchToolCall, ) from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, register_schema, webmethod Metadata = dict[str, str] @@ -148,20 +150,6 @@ class ConversationItemCreateRequest(BaseModel): ) -class ConversationItemInclude(StrEnum): - """ - Specify additional output data to include in the model response. - """ - - web_search_call_action_sources = "web_search_call.action.sources" - code_interpreter_call_outputs = "code_interpreter_call.outputs" - computer_call_output_output_image_url = "computer_call_output.output.image_url" - file_search_call_results = "file_search_call.results" - message_input_image_image_url = "message.input_image.image_url" - message_output_text_logprobs = "message.output_text.logprobs" - reasoning_encrypted_content = "reasoning.encrypted_content" - - @json_schema_type class ConversationItemList(BaseModel): """List of conversation items with pagination.""" @@ -262,13 +250,13 @@ class Conversations(Protocol): ... @webmethod(route="/conversations/{conversation_id}/items", method="GET", level=LLAMA_STACK_API_V1) - async def list_items( + async def list( self, conversation_id: str, - after: str | None = None, - include: list[ConversationItemInclude] | None = None, - limit: int | None = None, - order: Literal["asc", "desc"] | None = None, + after: str | NotGiven = NOT_GIVEN, + include: list[ResponseIncludable] | NotGiven = NOT_GIVEN, + limit: int | NotGiven = NOT_GIVEN, + order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN, ) -> ConversationItemList: """List items. diff --git a/llama_stack/apis/datatypes.py b/llama_stack/apis/datatypes.py index ae01c5dfc..5777f3d04 100644 --- a/llama_stack/apis/datatypes.py +++ b/llama_stack/apis/datatypes.py @@ -117,9 +117,11 @@ class Api(Enum, metaclass=DynamicApiMeta): post_training = "post_training" tool_runtime = "tool_runtime" + telemetry = "telemetry" + models = "models" shields = "shields" - vector_stores = "vector_stores" # only used for routing table + vector_dbs = "vector_dbs" # only used for routing datasets = "datasets" scoring_functions = "scoring_functions" benchmarks = "benchmarks" diff --git a/llama_stack/apis/files/files.py b/llama_stack/apis/files/files.py index 6386f4eca..f1d3764db 100644 --- a/llama_stack/apis/files/files.py +++ b/llama_stack/apis/files/files.py @@ -12,7 +12,7 @@ from pydantic import BaseModel, Field from llama_stack.apis.common.responses import Order from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py index 7dc565244..027246470 100644 --- a/llama_stack/apis/inference/inference.py +++ b/llama_stack/apis/inference/inference.py @@ -23,7 +23,6 @@ from llama_stack.apis.common.responses import Order from llama_stack.apis.models import Model from llama_stack.apis.telemetry import MetricResponseMixin from llama_stack.apis.version import LLAMA_STACK_API_V1, LLAMA_STACK_API_V1ALPHA -from llama_stack.core.telemetry.trace_protocol import trace_protocol from llama_stack.models.llama.datatypes import ( BuiltinTool, StopReason, @@ -31,6 +30,7 @@ from llama_stack.models.llama.datatypes import ( ToolDefinition, ToolPromptFormat, ) +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, register_schema, webmethod register_schema(ToolCall) @@ -1234,10 +1234,9 @@ class Inference(InferenceProvider): Llama Stack Inference API for generating completions, chat completions, and embeddings. - This API provides the raw interface to the underlying models. Three kinds of models are supported: + This API provides the raw interface to the underlying models. Two kinds of models are supported: - LLM models: these models generate "raw" and "chat" (conversational) completions. - Embedding models: these models generate embeddings to be used for semantic search. - - Rerank models: these models reorder the documents based on their relevance to a query. """ @webmethod(route="/openai/v1/chat/completions", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) diff --git a/llama_stack/apis/models/models.py b/llama_stack/apis/models/models.py index 903bd6510..10949cb95 100644 --- a/llama_stack/apis/models/models.py +++ b/llama_stack/apis/models/models.py @@ -11,7 +11,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator from llama_stack.apis.resource import Resource, ResourceType from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod @@ -27,12 +27,10 @@ class ModelType(StrEnum): """Enumeration of supported model types in Llama Stack. :cvar llm: Large language model for text generation and completion :cvar embedding: Embedding model for converting text to vector representations - :cvar rerank: Reranking model for reordering documents based on their relevance to a query """ llm = "llm" embedding = "embedding" - rerank = "rerank" @json_schema_type diff --git a/llama_stack/apis/prompts/prompts.py b/llama_stack/apis/prompts/prompts.py index 4651b9294..b39c363c7 100644 --- a/llama_stack/apis/prompts/prompts.py +++ b/llama_stack/apis/prompts/prompts.py @@ -11,7 +11,7 @@ from typing import Protocol, runtime_checkable from pydantic import BaseModel, Field, field_validator, model_validator from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod diff --git a/llama_stack/apis/resource.py b/llama_stack/apis/resource.py index dafdb28b0..7c4130f7d 100644 --- a/llama_stack/apis/resource.py +++ b/llama_stack/apis/resource.py @@ -13,7 +13,7 @@ from pydantic import BaseModel, Field class ResourceType(StrEnum): model = "model" shield = "shield" - vector_store = "vector_store" + vector_db = "vector_db" dataset = "dataset" scoring_function = "scoring_function" benchmark = "benchmark" @@ -34,4 +34,4 @@ class Resource(BaseModel): provider_id: str = Field(description="ID of the provider that owns this resource") - type: ResourceType = Field(description="Type of resource (e.g. 'model', 'shield', 'vector_store', etc.)") + type: ResourceType = Field(description="Type of resource (e.g. 'model', 'shield', 'vector_db', etc.)") diff --git a/llama_stack/apis/safety/safety.py b/llama_stack/apis/safety/safety.py index 249473cae..eaaa937d3 100644 --- a/llama_stack/apis/safety/safety.py +++ b/llama_stack/apis/safety/safety.py @@ -12,7 +12,7 @@ from pydantic import BaseModel, Field from llama_stack.apis.inference import OpenAIMessageParam from llama_stack.apis.shields import Shield from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod @@ -123,13 +123,13 @@ class Safety(Protocol): @webmethod(route="/openai/v1/moderations", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/moderations", method="POST", level=LLAMA_STACK_API_V1) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: """Create moderation. Classifies if text and/or image inputs are potentially harmful. :param input: Input (or inputs) to classify. Can be a single string, an array of strings, or an array of multi-modal input objects similar to other models. - :param model: (Optional) The content moderation model you would like to use. + :param model: The content moderation model you would like to use. :returns: A moderation object. """ ... diff --git a/llama_stack/apis/shields/shields.py b/llama_stack/apis/shields/shields.py index 565e1db15..5d967cf02 100644 --- a/llama_stack/apis/shields/shields.py +++ b/llama_stack/apis/shields/shields.py @@ -10,7 +10,7 @@ from pydantic import BaseModel from llama_stack.apis.resource import Resource, ResourceType from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod diff --git a/llama_stack/apis/tools/rag_tool.py b/llama_stack/apis/tools/rag_tool.py index c508721f1..ed7847e23 100644 --- a/llama_stack/apis/tools/rag_tool.py +++ b/llama_stack/apis/tools/rag_tool.py @@ -12,7 +12,7 @@ from typing_extensions import runtime_checkable from llama_stack.apis.common.content_types import URL, InterleavedContent from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, register_schema, webmethod diff --git a/llama_stack/apis/tools/tools.py b/llama_stack/apis/tools/tools.py index b13ac2f19..b6a1a2543 100644 --- a/llama_stack/apis/tools/tools.py +++ b/llama_stack/apis/tools/tools.py @@ -13,7 +13,7 @@ from typing_extensions import runtime_checkable from llama_stack.apis.common.content_types import URL, InterleavedContent from llama_stack.apis.resource import Resource, ResourceType from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, webmethod from .rag_tool import RAGToolRuntime diff --git a/llama_stack/apis/vector_stores/__init__.py b/llama_stack/apis/vector_dbs/__init__.py similarity index 87% rename from llama_stack/apis/vector_stores/__init__.py rename to llama_stack/apis/vector_dbs/__init__.py index 8fc34058a..af34ba9d4 100644 --- a/llama_stack/apis/vector_stores/__init__.py +++ b/llama_stack/apis/vector_dbs/__init__.py @@ -4,4 +4,4 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from .vector_stores import * +from .vector_dbs import * diff --git a/llama_stack/apis/vector_dbs/vector_dbs.py b/llama_stack/apis/vector_dbs/vector_dbs.py new file mode 100644 index 000000000..0368095cb --- /dev/null +++ b/llama_stack/apis/vector_dbs/vector_dbs.py @@ -0,0 +1,93 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from typing import Literal, Protocol, runtime_checkable + +from pydantic import BaseModel + +from llama_stack.apis.resource import Resource, ResourceType +from llama_stack.schema_utils import json_schema_type + + +@json_schema_type +class VectorDB(Resource): + """Vector database resource for storing and querying vector embeddings. + + :param type: Type of resource, always 'vector_db' for vector databases + :param embedding_model: Name of the embedding model to use for vector generation + :param embedding_dimension: Dimension of the embedding vectors + """ + + type: Literal[ResourceType.vector_db] = ResourceType.vector_db + + embedding_model: str + embedding_dimension: int + vector_db_name: str | None = None + + @property + def vector_db_id(self) -> str: + return self.identifier + + @property + def provider_vector_db_id(self) -> str | None: + return self.provider_resource_id + + +class VectorDBInput(BaseModel): + """Input parameters for creating or configuring a vector database. + + :param vector_db_id: Unique identifier for the vector database + :param embedding_model: Name of the embedding model to use for vector generation + :param embedding_dimension: Dimension of the embedding vectors + :param provider_vector_db_id: (Optional) Provider-specific identifier for the vector database + """ + + vector_db_id: str + embedding_model: str + embedding_dimension: int + provider_id: str | None = None + provider_vector_db_id: str | None = None + + +class ListVectorDBsResponse(BaseModel): + """Response from listing vector databases. + + :param data: List of vector databases + """ + + data: list[VectorDB] + + +@runtime_checkable +class VectorDBs(Protocol): + """Internal protocol for vector_dbs routing - no public API endpoints.""" + + async def list_vector_dbs(self) -> ListVectorDBsResponse: + """Internal method to list vector databases.""" + ... + + async def get_vector_db( + self, + vector_db_id: str, + ) -> VectorDB: + """Internal method to get a vector database by ID.""" + ... + + async def register_vector_db( + self, + vector_db_id: str, + embedding_model: str, + embedding_dimension: int | None = 384, + provider_id: str | None = None, + vector_db_name: str | None = None, + provider_vector_db_id: str | None = None, + ) -> VectorDB: + """Internal method to register a vector database.""" + ... + + async def unregister_vector_db(self, vector_db_id: str) -> None: + """Internal method to unregister a vector database.""" + ... diff --git a/llama_stack/apis/vector_io/vector_io.py b/llama_stack/apis/vector_io/vector_io.py index 6e855ab99..a309c47f9 100644 --- a/llama_stack/apis/vector_io/vector_io.py +++ b/llama_stack/apis/vector_io/vector_io.py @@ -15,9 +15,9 @@ from fastapi import Body from pydantic import BaseModel, Field from llama_stack.apis.inference import InterleavedContent -from llama_stack.apis.vector_stores import VectorStore +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.version import LLAMA_STACK_API_V1 -from llama_stack.core.telemetry.trace_protocol import trace_protocol +from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.providers.utils.vector_io.vector_utils import generate_chunk_id from llama_stack.schema_utils import json_schema_type, webmethod from llama_stack.strong_typing.schema import register_schema @@ -140,7 +140,6 @@ class VectorStoreFileCounts(BaseModel): total: int -# TODO: rename this as OpenAIVectorStore @json_schema_type class VectorStoreObject(BaseModel): """OpenAI Vector Store object. @@ -518,18 +517,17 @@ class OpenAICreateVectorStoreFileBatchRequestWithExtraBody(BaseModel, extra="all chunking_strategy: VectorStoreChunkingStrategy | None = None -class VectorStoreTable(Protocol): - def get_vector_store(self, vector_store_id: str) -> VectorStore | None: ... +class VectorDBStore(Protocol): + def get_vector_db(self, vector_db_id: str) -> VectorDB | None: ... @runtime_checkable @trace_protocol class VectorIO(Protocol): - vector_store_table: VectorStoreTable | None = None + vector_db_store: VectorDBStore | None = None # this will just block now until chunks are inserted, but it should # probably return a Job instance which can be polled for completion - # TODO: rename vector_db_id to vector_store_id once Stainless is working @webmethod(route="/vector-io/insert", method="POST", level=LLAMA_STACK_API_V1) async def insert_chunks( self, @@ -548,7 +546,6 @@ class VectorIO(Protocol): """ ... - # TODO: rename vector_db_id to vector_store_id once Stainless is working @webmethod(route="/vector-io/query", method="POST", level=LLAMA_STACK_API_V1) async def query_chunks( self, diff --git a/llama_stack/apis/vector_stores/vector_stores.py b/llama_stack/apis/vector_stores/vector_stores.py deleted file mode 100644 index 524624028..000000000 --- a/llama_stack/apis/vector_stores/vector_stores.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -from typing import Literal - -from pydantic import BaseModel - -from llama_stack.apis.resource import Resource, ResourceType - - -# Internal resource type for storing the vector store routing and other information -class VectorStore(Resource): - """Vector database resource for storing and querying vector embeddings. - - :param type: Type of resource, always 'vector_store' for vector stores - :param embedding_model: Name of the embedding model to use for vector generation - :param embedding_dimension: Dimension of the embedding vectors - """ - - type: Literal[ResourceType.vector_store] = ResourceType.vector_store - - embedding_model: str - embedding_dimension: int - vector_store_name: str | None = None - - @property - def vector_store_id(self) -> str: - return self.identifier - - @property - def provider_vector_store_id(self) -> str | None: - return self.provider_resource_id - - -class VectorStoreInput(BaseModel): - """Input parameters for creating or configuring a vector database. - - :param vector_store_id: Unique identifier for the vector store - :param embedding_model: Name of the embedding model to use for vector generation - :param embedding_dimension: Dimension of the embedding vectors - :param provider_vector_store_id: (Optional) Provider-specific identifier for the vector store - """ - - vector_store_id: str - embedding_model: str - embedding_dimension: int - provider_id: str | None = None - provider_vector_store_id: str | None = None diff --git a/llama_stack/cli/llama.py b/llama_stack/cli/llama.py index aa8893bc0..5ff15d8d7 100644 --- a/llama_stack/cli/llama.py +++ b/llama_stack/cli/llama.py @@ -6,8 +6,6 @@ import argparse -from llama_stack.log import setup_logging - from .stack import StackParser from .stack.utils import print_subcommand_description @@ -44,9 +42,6 @@ class LlamaCLIParser: def main(): - # Initialize logging from environment variables before any other operations - setup_logging() - parser = LlamaCLIParser() args = parser.parse_args() parser.run(args) diff --git a/llama_stack/cli/stack/_build.py b/llama_stack/cli/stack/_build.py new file mode 100644 index 000000000..2a30ff394 --- /dev/null +++ b/llama_stack/cli/stack/_build.py @@ -0,0 +1,519 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +import argparse +import importlib.resources +import json +import os +import shutil +import sys +import textwrap +from functools import lru_cache +from importlib.abc import Traversable +from pathlib import Path + +import yaml +from prompt_toolkit import prompt +from prompt_toolkit.completion import WordCompleter +from prompt_toolkit.validation import Validator +from termcolor import colored, cprint + +from llama_stack.cli.stack.utils import ImageType +from llama_stack.cli.table import print_table +from llama_stack.core.build import ( + SERVER_DEPENDENCIES, + build_image, + get_provider_dependencies, +) +from llama_stack.core.configure import parse_and_maybe_upgrade_config +from llama_stack.core.datatypes import ( + BuildConfig, + BuildProvider, + DistributionSpec, + Provider, + StackRunConfig, +) +from llama_stack.core.distribution import get_provider_registry +from llama_stack.core.external import load_external_apis +from llama_stack.core.resolver import InvalidProviderError +from llama_stack.core.stack import replace_env_vars +from llama_stack.core.storage.datatypes import ( + InferenceStoreReference, + KVStoreReference, + ServerStoresConfig, + SqliteKVStoreConfig, + SqliteSqlStoreConfig, + SqlStoreReference, + StorageConfig, +) +from llama_stack.core.utils.config_dirs import DISTRIBS_BASE_DIR, EXTERNAL_PROVIDERS_DIR +from llama_stack.core.utils.dynamic import instantiate_class_type +from llama_stack.core.utils.exec import formulate_run_args, run_command +from llama_stack.core.utils.image_types import LlamaStackImageType +from llama_stack.providers.datatypes import Api + +DISTRIBS_PATH = Path(__file__).parent.parent.parent / "distributions" + + +@lru_cache +def available_distros_specs() -> dict[str, BuildConfig]: + import yaml + + distro_specs = {} + for p in DISTRIBS_PATH.rglob("*build.yaml"): + distro_name = p.parent.name + with open(p) as f: + build_config = BuildConfig(**yaml.safe_load(f)) + distro_specs[distro_name] = build_config + return distro_specs + + +def run_stack_build_command(args: argparse.Namespace) -> None: + if args.list_distros: + return _run_distro_list_cmd() + + if args.image_type == ImageType.VENV.value: + current_venv = os.environ.get("VIRTUAL_ENV") + image_name = args.image_name or current_venv + else: + image_name = args.image_name + + if args.template: + cprint( + "The --template argument is deprecated. Please use --distro instead.", + color="red", + file=sys.stderr, + ) + distro_name = args.template + else: + distro_name = args.distribution + + if distro_name: + available_distros = available_distros_specs() + if distro_name not in available_distros: + cprint( + f"Could not find distribution {distro_name}. Please run `llama stack build --list-distros` to check out the available distributions", + color="red", + file=sys.stderr, + ) + sys.exit(1) + build_config = available_distros[distro_name] + if args.image_type: + build_config.image_type = args.image_type + else: + cprint( + f"Please specify a image-type ({' | '.join(e.value for e in ImageType)}) for {distro_name}", + color="red", + file=sys.stderr, + ) + sys.exit(1) + elif args.providers: + provider_list: dict[str, list[BuildProvider]] = dict() + for api_provider in args.providers.split(","): + if "=" not in api_provider: + cprint( + "Could not parse `--providers`. Please ensure the list is in the format api1=provider1,api2=provider2", + color="red", + file=sys.stderr, + ) + sys.exit(1) + api, provider_type = api_provider.split("=") + providers_for_api = get_provider_registry().get(Api(api), None) + if providers_for_api is None: + cprint( + f"{api} is not a valid API.", + color="red", + file=sys.stderr, + ) + sys.exit(1) + if provider_type in providers_for_api: + provider = BuildProvider( + provider_type=provider_type, + module=None, + ) + provider_list.setdefault(api, []).append(provider) + else: + cprint( + f"{provider} is not a valid provider for the {api} API.", + color="red", + file=sys.stderr, + ) + sys.exit(1) + distribution_spec = DistributionSpec( + providers=provider_list, + description=",".join(args.providers), + ) + if not args.image_type: + cprint( + f"Please specify a image-type (container | venv) for {args.template}", + color="red", + file=sys.stderr, + ) + sys.exit(1) + + build_config = BuildConfig(image_type=args.image_type, distribution_spec=distribution_spec) + elif not args.config and not distro_name: + name = prompt( + "> Enter a name for your Llama Stack (e.g. my-local-stack): ", + validator=Validator.from_callable( + lambda x: len(x) > 0, + error_message="Name cannot be empty, please enter a name", + ), + ) + + image_type = prompt( + "> Enter the image type you want your Llama Stack to be built as (use to see options): ", + completer=WordCompleter([e.value for e in ImageType]), + complete_while_typing=True, + validator=Validator.from_callable( + lambda x: x in [e.value for e in ImageType], + error_message="Invalid image type. Use to see options", + ), + ) + + image_name = f"llamastack-{name}" + + cprint( + textwrap.dedent( + """ + Llama Stack is composed of several APIs working together. Let's select + the provider types (implementations) you want to use for these APIs. + """, + ), + color="green", + file=sys.stderr, + ) + + cprint("Tip: use to see options for the providers.\n", color="green", file=sys.stderr) + + providers: dict[str, list[BuildProvider]] = dict() + for api, providers_for_api in get_provider_registry().items(): + available_providers = [x for x in providers_for_api.keys() if x not in ("remote", "remote::sample")] + if not available_providers: + continue + api_provider = prompt( + f"> Enter provider for API {api.value}: ", + completer=WordCompleter(available_providers), + complete_while_typing=True, + validator=Validator.from_callable( + lambda x: x in available_providers, # noqa: B023 - see https://github.com/astral-sh/ruff/issues/7847 + error_message="Invalid provider, use to see options", + ), + ) + + string_providers = api_provider.split(" ") + + for provider in string_providers: + providers.setdefault(api.value, []).append(BuildProvider(provider_type=provider)) + + description = prompt( + "\n > (Optional) Enter a short description for your Llama Stack: ", + default="", + ) + + distribution_spec = DistributionSpec( + providers=providers, + description=description, + ) + + build_config = BuildConfig(image_type=image_type, distribution_spec=distribution_spec) + else: + with open(args.config) as f: + try: + contents = yaml.safe_load(f) + contents = replace_env_vars(contents) + build_config = BuildConfig(**contents) + if args.image_type: + build_config.image_type = args.image_type + except Exception as e: + cprint( + f"Could not parse config file {args.config}: {e}", + color="red", + file=sys.stderr, + ) + sys.exit(1) + + if args.print_deps_only: + print(f"# Dependencies for {distro_name or args.config or image_name}") + normal_deps, special_deps, external_provider_dependencies = get_provider_dependencies(build_config) + normal_deps += SERVER_DEPENDENCIES + print(f"uv pip install {' '.join(normal_deps)}") + for special_dep in special_deps: + print(f"uv pip install {special_dep}") + for external_dep in external_provider_dependencies: + print(f"uv pip install {external_dep}") + return + + try: + run_config = _run_stack_build_command_from_build_config( + build_config, + image_name=image_name, + config_path=args.config, + distro_name=distro_name, + ) + + except (Exception, RuntimeError) as exc: + import traceback + + cprint( + f"Error building stack: {exc}", + color="red", + file=sys.stderr, + ) + cprint("Stack trace:", color="red", file=sys.stderr) + traceback.print_exc() + sys.exit(1) + + if run_config is None: + cprint( + "Run config path is empty", + color="red", + file=sys.stderr, + ) + sys.exit(1) + + if args.run: + config_dict = yaml.safe_load(run_config.read_text()) + config = parse_and_maybe_upgrade_config(config_dict) + if config.external_providers_dir and not config.external_providers_dir.exists(): + config.external_providers_dir.mkdir(exist_ok=True) + run_args = formulate_run_args(args.image_type, image_name or config.image_name) + run_args.extend([str(os.getenv("LLAMA_STACK_PORT", 8321)), "--config", str(run_config)]) + run_command(run_args) + + +def _generate_run_config( + build_config: BuildConfig, + build_dir: Path, + image_name: str, +) -> Path: + """ + Generate a run.yaml template file for user to edit from a build.yaml file + """ + apis = list(build_config.distribution_spec.providers.keys()) + distro_dir = DISTRIBS_BASE_DIR / image_name + storage = StorageConfig( + backends={ + "kv_default": SqliteKVStoreConfig( + db_path=f"${{env.SQLITE_STORE_DIR:={distro_dir}}}/kvstore.db", + ), + "sql_default": SqliteSqlStoreConfig( + db_path=f"${{env.SQLITE_STORE_DIR:={distro_dir}}}/sql_store.db", + ), + }, + stores=ServerStoresConfig( + metadata=KVStoreReference( + backend="kv_default", + namespace="registry", + ), + inference=InferenceStoreReference( + backend="sql_default", + table_name="inference_store", + ), + conversations=SqlStoreReference( + backend="sql_default", + table_name="openai_conversations", + ), + ), + ) + + run_config = StackRunConfig( + container_image=(image_name if build_config.image_type == LlamaStackImageType.CONTAINER.value else None), + image_name=image_name, + apis=apis, + providers={}, + storage=storage, + external_providers_dir=build_config.external_providers_dir + if build_config.external_providers_dir + else EXTERNAL_PROVIDERS_DIR, + ) + # build providers dict + provider_registry = get_provider_registry(build_config) + for api in apis: + run_config.providers[api] = [] + providers = build_config.distribution_spec.providers[api] + + for provider in providers: + pid = provider.provider_type.split("::")[-1] + + p = provider_registry[Api(api)][provider.provider_type] + if p.deprecation_error: + raise InvalidProviderError(p.deprecation_error) + + try: + config_type = instantiate_class_type(provider_registry[Api(api)][provider.provider_type].config_class) + except (ModuleNotFoundError, ValueError) as exc: + # HACK ALERT: + # This code executes after building is done, the import cannot work since the + # package is either available in the venv or container - not available on the host. + # TODO: use a "is_external" flag in ProviderSpec to check if the provider is + # external + cprint( + f"Failed to import provider {provider.provider_type} for API {api} - assuming it's external, skipping: {exc}", + color="yellow", + file=sys.stderr, + ) + # Set config_type to None to avoid UnboundLocalError + config_type = None + + if config_type is not None and hasattr(config_type, "sample_run_config"): + config = config_type.sample_run_config(__distro_dir__=f"~/.llama/distributions/{image_name}") + else: + config = {} + + p_spec = Provider( + provider_id=pid, + provider_type=provider.provider_type, + config=config, + module=provider.module, + ) + run_config.providers[api].append(p_spec) + + run_config_file = build_dir / f"{image_name}-run.yaml" + + with open(run_config_file, "w") as f: + to_write = json.loads(run_config.model_dump_json()) + f.write(yaml.dump(to_write, sort_keys=False)) + + # Only print this message for non-container builds since it will be displayed before the + # container is built + # For non-container builds, the run.yaml is generated at the very end of the build process so it + # makes sense to display this message + if build_config.image_type != LlamaStackImageType.CONTAINER.value: + cprint(f"You can now run your stack with `llama stack run {run_config_file}`", color="green", file=sys.stderr) + return run_config_file + + +def _run_stack_build_command_from_build_config( + build_config: BuildConfig, + image_name: str | None = None, + distro_name: str | None = None, + config_path: str | None = None, +) -> Path | Traversable: + image_name = image_name or build_config.image_name + if build_config.image_type == LlamaStackImageType.CONTAINER.value: + if distro_name: + image_name = f"distribution-{distro_name}" + else: + if not image_name: + raise ValueError("Please specify an image name when building a container image without a template") + else: + if not image_name and os.environ.get("UV_SYSTEM_PYTHON"): + image_name = "__system__" + if not image_name: + raise ValueError("Please specify an image name when building a venv image") + + # At this point, image_name should be guaranteed to be a string + if image_name is None: + raise ValueError("image_name should not be None after validation") + + if distro_name: + build_dir = DISTRIBS_BASE_DIR / distro_name + build_file_path = build_dir / f"{distro_name}-build.yaml" + else: + if image_name is None: + raise ValueError("image_name cannot be None") + build_dir = DISTRIBS_BASE_DIR / image_name + build_file_path = build_dir / f"{image_name}-build.yaml" + + os.makedirs(build_dir, exist_ok=True) + run_config_file = None + # Generate the run.yaml so it can be included in the container image with the proper entrypoint + # Only do this if we're building a container image and we're not using a template + if build_config.image_type == LlamaStackImageType.CONTAINER.value and not distro_name and config_path: + cprint("Generating run.yaml file", color="yellow", file=sys.stderr) + run_config_file = _generate_run_config(build_config, build_dir, image_name) + + with open(build_file_path, "w") as f: + to_write = json.loads(build_config.model_dump_json(exclude_none=True)) + f.write(yaml.dump(to_write, sort_keys=False)) + + # We first install the external APIs so that the build process can use them and discover the + # providers dependencies + if build_config.external_apis_dir: + cprint("Installing external APIs", color="yellow", file=sys.stderr) + external_apis = load_external_apis(build_config) + if external_apis: + # install the external APIs + packages = [] + for _, api_spec in external_apis.items(): + if api_spec.pip_packages: + packages.extend(api_spec.pip_packages) + cprint( + f"Installing {api_spec.name} with pip packages {api_spec.pip_packages}", + color="yellow", + file=sys.stderr, + ) + return_code = run_command(["uv", "pip", "install", *packages]) + if return_code != 0: + packages_str = ", ".join(packages) + raise RuntimeError( + f"Failed to install external APIs packages: {packages_str} (return code: {return_code})" + ) + + return_code = build_image( + build_config, + image_name, + distro_or_config=distro_name or config_path or str(build_file_path), + run_config=run_config_file.as_posix() if run_config_file else None, + ) + if return_code != 0: + raise RuntimeError(f"Failed to build image {image_name}") + + if distro_name: + # copy run.yaml from distribution to build_dir instead of generating it again + distro_path = importlib.resources.files("llama_stack") / f"distributions/{distro_name}/run.yaml" + run_config_file = build_dir / f"{distro_name}-run.yaml" + + with importlib.resources.as_file(distro_path) as path: + shutil.copy(path, run_config_file) + + cprint("Build Successful!", color="green", file=sys.stderr) + cprint(f"You can find the newly-built distribution here: {run_config_file}", color="blue", file=sys.stderr) + if build_config.image_type == LlamaStackImageType.VENV: + cprint( + "You can run the new Llama Stack distro (after activating " + + colored(image_name, "cyan") + + ") via: " + + colored(f"llama stack run {run_config_file}", "blue"), + color="green", + file=sys.stderr, + ) + elif build_config.image_type == LlamaStackImageType.CONTAINER: + cprint( + "You can run the container with: " + + colored( + f"docker run -p 8321:8321 -v ~/.llama:/root/.llama localhost/{image_name} --port 8321", "blue" + ), + color="green", + file=sys.stderr, + ) + return distro_path + else: + return _generate_run_config(build_config, build_dir, image_name) + + +def _run_distro_list_cmd() -> None: + headers = [ + "Distribution Name", + # "Providers", + "Description", + ] + + rows = [] + for distro_name, spec in available_distros_specs().items(): + rows.append( + [ + distro_name, + # json.dumps(spec.distribution_spec.providers, indent=2), + spec.distribution_spec.description, + ] + ) + print_table( + rows, + headers, + separate_rows=True, + ) diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py new file mode 100644 index 000000000..cbe8ed881 --- /dev/null +++ b/llama_stack/cli/stack/build.py @@ -0,0 +1,106 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. +import argparse +import textwrap + +from llama_stack.cli.stack.utils import ImageType +from llama_stack.cli.subcommand import Subcommand +from llama_stack.log import get_logger + +logger = get_logger(__name__, category="cli") + + +class StackBuild(Subcommand): + def __init__(self, subparsers: argparse._SubParsersAction): + super().__init__() + self.parser = subparsers.add_parser( + "build", + prog="llama stack build", + description="[DEPRECATED] Build a Llama stack container. This command is deprecated and will be removed in a future release. Use `llama stack list-deps ' instead.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + self._add_arguments() + self.parser.set_defaults(func=self._run_stack_build_command) + + def _add_arguments(self): + self.parser.add_argument( + "--config", + type=str, + default=None, + help="Path to a config file to use for the build. You can find example configs in llama_stack.cores/**/build.yaml. If this argument is not provided, you will be prompted to enter information interactively", + ) + + self.parser.add_argument( + "--template", + type=str, + default=None, + help="""(deprecated) Name of the example template config to use for build. You may use `llama stack build --list-distros` to check out the available distributions""", + ) + self.parser.add_argument( + "--distro", + "--distribution", + dest="distribution", + type=str, + default=None, + help="""Name of the distribution to use for build. You may use `llama stack build --list-distros` to check out the available distributions""", + ) + + self.parser.add_argument( + "--list-distros", + "--list-distributions", + action="store_true", + dest="list_distros", + default=False, + help="Show the available distributions for building a Llama Stack distribution", + ) + + self.parser.add_argument( + "--image-type", + type=str, + help="Image Type to use for the build. If not specified, will use the image type from the template config.", + choices=[e.value for e in ImageType], + default=None, # no default so we can detect if a user specified --image-type and override image_type in the config + ) + + self.parser.add_argument( + "--image-name", + type=str, + help=textwrap.dedent( + f"""[for image-type={"|".join(e.value for e in ImageType)}] Name of the virtual environment to use for +the build. If not specified, currently active environment will be used if found. + """ + ), + default=None, + ) + self.parser.add_argument( + "--print-deps-only", + default=False, + action="store_true", + help="Print the dependencies for the stack only, without building the stack", + ) + + self.parser.add_argument( + "--run", + action="store_true", + default=False, + help="Run the stack after building using the same image type, name, and other applicable arguments", + ) + self.parser.add_argument( + "--providers", + type=str, + default=None, + help="Build a config for a list of providers and only those providers. This list is formatted like: api1=provider1,api2=provider2. Where there can be multiple providers per API.", + ) + + def _run_stack_build_command(self, args: argparse.Namespace) -> None: + logger.warning( + "The 'llama stack build' command is deprecated and will be removed in a future release. Please use 'llama stack list-deps'" + ) + # always keep implementation completely silo-ed away from CLI so CLI + # can be fast to load and reduces dependencies + from ._build import run_stack_build_command + + return run_stack_build_command(args) diff --git a/llama_stack/cli/stack/run.py b/llama_stack/cli/stack/run.py index 728d06ca6..06dae7318 100644 --- a/llama_stack/cli/stack/run.py +++ b/llama_stack/cli/stack/run.py @@ -15,10 +15,10 @@ import yaml from llama_stack.cli.stack.utils import ImageType from llama_stack.cli.subcommand import Subcommand -from llama_stack.core.datatypes import StackRunConfig +from llama_stack.core.datatypes import LoggingConfig, StackRunConfig from llama_stack.core.stack import cast_image_name_to_string, replace_env_vars from llama_stack.core.utils.config_resolution import Mode, resolve_config_or_distro -from llama_stack.log import LoggingConfig, get_logger +from llama_stack.log import get_logger REPO_ROOT = Path(__file__).parent.parent.parent.parent diff --git a/llama_stack/cli/stack/stack.py b/llama_stack/cli/stack/stack.py index 351da972f..fd0a4edf5 100644 --- a/llama_stack/cli/stack/stack.py +++ b/llama_stack/cli/stack/stack.py @@ -11,6 +11,7 @@ from llama_stack.cli.stack.list_stacks import StackListBuilds from llama_stack.cli.stack.utils import print_subcommand_description from llama_stack.cli.subcommand import Subcommand +from .build import StackBuild from .list_apis import StackListApis from .list_deps import StackListDeps from .list_providers import StackListProviders @@ -40,6 +41,7 @@ class StackParser(Subcommand): # Add sub-commands StackListDeps.create(subparsers) + StackBuild.create(subparsers) StackListApis.create(subparsers) StackListProviders.create(subparsers) StackRun.create(subparsers) diff --git a/llama_stack/core/access_control/datatypes.py b/llama_stack/core/access_control/datatypes.py index 84beb8e15..c833ed51b 100644 --- a/llama_stack/core/access_control/datatypes.py +++ b/llama_stack/core/access_control/datatypes.py @@ -41,7 +41,7 @@ class AccessRule(BaseModel): A rule defines a list of action either to permit or to forbid. It may specify a principal or a resource that must match for the rule to take effect. The resource to match should be specified in the form of a type qualified identifier, e.g. - model::my-model or vector_store::some-db, or a wildcard for all resources of a type, + model::my-model or vector_db::some-db, or a wildcard for all resources of a type, e.g. model::*. If the principal or resource are not specified, they will match all requests. @@ -79,9 +79,9 @@ class AccessRule(BaseModel): description: any user has read access to any resource created by a member of their team - forbid: actions: [create, read, delete] - resource: vector_store::* + resource: vector_db::* unless: user with admin in roles - description: only user with admin role can use vector_store resources + description: only user with admin role can use vector_db resources """ diff --git a/llama_stack/core/build_container.sh b/llama_stack/core/build_container.sh new file mode 100755 index 000000000..03ed846d9 --- /dev/null +++ b/llama_stack/core/build_container.sh @@ -0,0 +1,410 @@ +#!/usr/bin/env bash + +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-} +LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-} + +TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-} +PYPI_VERSION=${PYPI_VERSION:-} +BUILD_PLATFORM=${BUILD_PLATFORM:-} +# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out +# Reference: https://github.com/astral-sh/uv/pull/1694 +UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500} + +# mounting is not supported by docker buildx, so we use COPY instead +USE_COPY_NOT_MOUNT=${USE_COPY_NOT_MOUNT:-} +# Path to the run.yaml file in the container +RUN_CONFIG_PATH=/app/run.yaml + +BUILD_CONTEXT_DIR=$(pwd) + +set -euo pipefail + +# Define color codes +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Usage function +usage() { + echo "Usage: $0 --image-name --container-base --normal-deps [--run-config ] [--external-provider-deps ] [--optional-deps ]" + echo "Example: $0 --image-name llama-stack-img --container-base python:3.12-slim --normal-deps 'numpy pandas' --run-config ./run.yaml --external-provider-deps 'foo' --optional-deps 'bar'" + exit 1 +} + +# Parse arguments +image_name="" +container_base="" +normal_deps="" +external_provider_deps="" +optional_deps="" +run_config="" +distro_or_config="" + +while [[ $# -gt 0 ]]; do + key="$1" + case "$key" in + --image-name) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --image-name requires a string value" >&2 + usage + fi + image_name="$2" + shift 2 + ;; + --container-base) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --container-base requires a string value" >&2 + usage + fi + container_base="$2" + shift 2 + ;; + --normal-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --normal-deps requires a string value" >&2 + usage + fi + normal_deps="$2" + shift 2 + ;; + --external-provider-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --external-provider-deps requires a string value" >&2 + usage + fi + external_provider_deps="$2" + shift 2 + ;; + --optional-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --optional-deps requires a string value" >&2 + usage + fi + optional_deps="$2" + shift 2 + ;; + --run-config) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --run-config requires a string value" >&2 + usage + fi + run_config="$2" + shift 2 + ;; + --distro-or-config) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --distro-or-config requires a string value" >&2 + usage + fi + distro_or_config="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" >&2 + usage + ;; + esac +done + +# Check required arguments +if [[ -z "$image_name" || -z "$container_base" || -z "$normal_deps" ]]; then + echo "Error: --image-name, --container-base, and --normal-deps are required." >&2 + usage +fi + +CONTAINER_BINARY=${CONTAINER_BINARY:-docker} +CONTAINER_OPTS=${CONTAINER_OPTS:---progress=plain} +TEMP_DIR=$(mktemp -d) +SCRIPT_DIR=$(dirname "$(readlink -f "$0")") +source "$SCRIPT_DIR/common.sh" + +add_to_container() { + output_file="$TEMP_DIR/Containerfile" + if [ -t 0 ]; then + printf '%s\n' "$1" >>"$output_file" + else + cat >>"$output_file" + fi +} + +if ! is_command_available "$CONTAINER_BINARY"; then + printf "${RED}Error: ${CONTAINER_BINARY} command not found. Is ${CONTAINER_BINARY} installed and in your PATH?${NC}" >&2 + exit 1 +fi + +if [[ $container_base == *"registry.access.redhat.com/ubi9"* ]]; then + add_to_container << EOF +FROM $container_base +WORKDIR /app + +# We install the Python 3.12 dev headers and build tools so that any +# C-extension wheels (e.g. polyleven, faiss-cpu) can compile successfully. + +RUN dnf -y update && dnf install -y iputils git net-tools wget \ + vim-minimal python3.12 python3.12-pip python3.12-wheel \ + python3.12-setuptools python3.12-devel gcc gcc-c++ make && \ + ln -s /bin/pip3.12 /bin/pip && ln -s /bin/python3.12 /bin/python && dnf clean all + +ENV UV_SYSTEM_PYTHON=1 +RUN pip install uv +EOF +else + add_to_container << EOF +FROM $container_base +WORKDIR /app + +RUN apt-get update && apt-get install -y \ + iputils-ping net-tools iproute2 dnsutils telnet \ + curl wget telnet git\ + procps psmisc lsof \ + traceroute \ + bubblewrap \ + gcc g++ \ + && rm -rf /var/lib/apt/lists/* + +ENV UV_SYSTEM_PYTHON=1 +RUN pip install uv +EOF +fi + +# Add pip dependencies first since llama-stack is what will change most often +# so we can reuse layers. +if [ -n "$normal_deps" ]; then + read -ra pip_args <<< "$normal_deps" + quoted_deps=$(printf " %q" "${pip_args[@]}") + add_to_container << EOF +RUN uv pip install --no-cache $quoted_deps +EOF +fi + +if [ -n "$optional_deps" ]; then + IFS='#' read -ra parts <<<"$optional_deps" + for part in "${parts[@]}"; do + read -ra pip_args <<< "$part" + quoted_deps=$(printf " %q" "${pip_args[@]}") + add_to_container <=')[0].split('<=')[0].split('!=')[0].split('<')[0].split('>')[0] + module = importlib.import_module(f'{package_name}.provider') + spec = module.get_provider_spec() + if hasattr(spec, 'pip_packages') and spec.pip_packages: + if isinstance(spec.pip_packages, (list, tuple)): + print('\n'.join(spec.pip_packages)) +except Exception as e: + print(f'Error getting provider spec for {package_name}: {e}', file=sys.stderr) +PYTHON +EOF + done +fi + +get_python_cmd() { + if is_command_available python; then + echo "python" + elif is_command_available python3; then + echo "python3" + else + echo "Error: Neither python nor python3 is installed. Please install Python to continue." >&2 + exit 1 + fi +} + +if [ -n "$run_config" ]; then + # Copy the run config to the build context since it's an absolute path + cp "$run_config" "$BUILD_CONTEXT_DIR/run.yaml" + + # Parse the run.yaml configuration to identify external provider directories + # If external providers are specified, copy their directory to the container + # and update the configuration to reference the new container path + python_cmd=$(get_python_cmd) + external_providers_dir=$($python_cmd -c "import yaml; config = yaml.safe_load(open('$run_config')); print(config.get('external_providers_dir') or '')") + external_providers_dir=$(eval echo "$external_providers_dir") + if [ -n "$external_providers_dir" ]; then + if [ -d "$external_providers_dir" ]; then + echo "Copying external providers directory: $external_providers_dir" + cp -r "$external_providers_dir" "$BUILD_CONTEXT_DIR/providers.d" + add_to_container << EOF +COPY providers.d /.llama/providers.d +EOF + fi + + # Edit the run.yaml file to change the external_providers_dir to /.llama/providers.d + if [ "$(uname)" = "Darwin" ]; then + sed -i.bak -e 's|external_providers_dir:.*|external_providers_dir: /.llama/providers.d|' "$BUILD_CONTEXT_DIR/run.yaml" + rm -f "$BUILD_CONTEXT_DIR/run.yaml.bak" + else + sed -i 's|external_providers_dir:.*|external_providers_dir: /.llama/providers.d|' "$BUILD_CONTEXT_DIR/run.yaml" + fi + fi + + # Copy run config into docker image + add_to_container << EOF +COPY run.yaml $RUN_CONFIG_PATH +EOF +fi + +stack_mount="/app/llama-stack-source" +client_mount="/app/llama-stack-client-source" + +install_local_package() { + local dir="$1" + local mount_point="$2" + local name="$3" + + if [ ! -d "$dir" ]; then + echo "${RED}Warning: $name is set but directory does not exist: $dir${NC}" >&2 + exit 1 + fi + + if [ "$USE_COPY_NOT_MOUNT" = "true" ]; then + add_to_container << EOF +COPY $dir $mount_point +EOF + fi + add_to_container << EOF +RUN uv pip install --no-cache -e $mount_point +EOF +} + + +if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then + install_local_package "$LLAMA_STACK_CLIENT_DIR" "$client_mount" "LLAMA_STACK_CLIENT_DIR" +fi + +if [ -n "$LLAMA_STACK_DIR" ]; then + install_local_package "$LLAMA_STACK_DIR" "$stack_mount" "LLAMA_STACK_DIR" +else + if [ -n "$TEST_PYPI_VERSION" ]; then + # these packages are damaged in test-pypi, so install them first + add_to_container << EOF +RUN uv pip install --no-cache fastapi libcst +EOF + add_to_container << EOF +RUN uv pip install --no-cache --extra-index-url https://test.pypi.org/simple/ \ + --index-strategy unsafe-best-match \ + llama-stack==$TEST_PYPI_VERSION + +EOF + else + if [ -n "$PYPI_VERSION" ]; then + SPEC_VERSION="llama-stack==${PYPI_VERSION}" + else + SPEC_VERSION="llama-stack" + fi + add_to_container << EOF +RUN uv pip install --no-cache $SPEC_VERSION +EOF + fi +fi + +# remove uv after installation + add_to_container << EOF +RUN pip uninstall -y uv +EOF + +# If a run config is provided, we use the llama stack CLI +if [[ -n "$run_config" ]]; then + add_to_container << EOF +ENTRYPOINT ["llama", "stack", "run", "$RUN_CONFIG_PATH"] +EOF +elif [[ "$distro_or_config" != *.yaml ]]; then + add_to_container << EOF +ENTRYPOINT ["llama", "stack", "run", "$distro_or_config"] +EOF +fi + +# Add other require item commands genearic to all containers +add_to_container << EOF + +RUN mkdir -p /.llama /.cache && chmod -R g+rw /.llama /.cache && (chmod -R g+rw /app 2>/dev/null || true) +EOF + +printf "Containerfile created successfully in %s/Containerfile\n\n" "$TEMP_DIR" +cat "$TEMP_DIR"/Containerfile +printf "\n" + +# Start building the CLI arguments +CLI_ARGS=() + +# Read CONTAINER_OPTS and put it in an array +read -ra CLI_ARGS <<< "$CONTAINER_OPTS" + +if [ "$USE_COPY_NOT_MOUNT" != "true" ]; then + if [ -n "$LLAMA_STACK_DIR" ]; then + CLI_ARGS+=("-v" "$(readlink -f "$LLAMA_STACK_DIR"):$stack_mount") + fi + if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then + CLI_ARGS+=("-v" "$(readlink -f "$LLAMA_STACK_CLIENT_DIR"):$client_mount") + fi +fi + +if is_command_available selinuxenabled && selinuxenabled; then + # Disable SELinux labels -- we don't want to relabel the llama-stack source dir + CLI_ARGS+=("--security-opt" "label=disable") +fi + +# Set version tag based on PyPI version +if [ -n "$PYPI_VERSION" ]; then + version_tag="$PYPI_VERSION" +elif [ -n "$TEST_PYPI_VERSION" ]; then + version_tag="test-$TEST_PYPI_VERSION" +elif [[ -n "$LLAMA_STACK_DIR" || -n "$LLAMA_STACK_CLIENT_DIR" ]]; then + version_tag="dev" +else + URL="https://pypi.org/pypi/llama-stack/json" + version_tag=$(curl -s $URL | jq -r '.info.version') +fi + +# Add version tag to image name +image_tag="$image_name:$version_tag" + +# Detect platform architecture +ARCH=$(uname -m) +if [ -n "$BUILD_PLATFORM" ]; then + CLI_ARGS+=("--platform" "$BUILD_PLATFORM") +elif [ "$ARCH" = "arm64" ] || [ "$ARCH" = "aarch64" ]; then + CLI_ARGS+=("--platform" "linux/arm64") +elif [ "$ARCH" = "x86_64" ]; then + CLI_ARGS+=("--platform" "linux/amd64") +else + echo "Unsupported architecture: $ARCH" + exit 1 +fi + +echo "PWD: $(pwd)" +echo "Containerfile: $TEMP_DIR/Containerfile" +set -x + +$CONTAINER_BINARY build \ + "${CLI_ARGS[@]}" \ + -t "$image_tag" \ + -f "$TEMP_DIR/Containerfile" \ + "$BUILD_CONTEXT_DIR" + +# clean up tmp/configs +rm -rf "$BUILD_CONTEXT_DIR/run.yaml" "$TEMP_DIR" +set +x + +echo "Success!" diff --git a/llama_stack/core/build_venv.sh b/llama_stack/core/build_venv.sh new file mode 100755 index 000000000..04927d71e --- /dev/null +++ b/llama_stack/core/build_venv.sh @@ -0,0 +1,220 @@ +#!/bin/bash + +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-} +LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-} +TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-} +# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out +# Reference: https://github.com/astral-sh/uv/pull/1694 +UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500} +UV_SYSTEM_PYTHON=${UV_SYSTEM_PYTHON:-} +VIRTUAL_ENV=${VIRTUAL_ENV:-} + +set -euo pipefail + +# Define color codes +RED='\033[0;31m' +NC='\033[0m' # No Color + +SCRIPT_DIR=$(dirname "$(readlink -f "$0")") +source "$SCRIPT_DIR/common.sh" + +# Usage function +usage() { + echo "Usage: $0 --env-name --normal-deps [--external-provider-deps ] [--optional-deps ]" + echo "Example: $0 --env-name mybuild --normal-deps 'numpy pandas scipy' --external-provider-deps 'foo' --optional-deps 'bar'" + exit 1 +} + +# Parse arguments +env_name="" +normal_deps="" +external_provider_deps="" +optional_deps="" + +while [[ $# -gt 0 ]]; do + key="$1" + case "$key" in + --env-name) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --env-name requires a string value" >&2 + usage + fi + env_name="$2" + shift 2 + ;; + --normal-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --normal-deps requires a string value" >&2 + usage + fi + normal_deps="$2" + shift 2 + ;; + --external-provider-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --external-provider-deps requires a string value" >&2 + usage + fi + external_provider_deps="$2" + shift 2 + ;; + --optional-deps) + if [[ -z "$2" || "$2" == --* ]]; then + echo "Error: --optional-deps requires a string value" >&2 + usage + fi + optional_deps="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" >&2 + usage + ;; + esac +done + +# Check required arguments +if [[ -z "$env_name" || -z "$normal_deps" ]]; then + echo "Error: --env-name and --normal-deps are required." >&2 + usage +fi + +if [ -n "$LLAMA_STACK_DIR" ]; then + echo "Using llama-stack-dir=$LLAMA_STACK_DIR" +fi +if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then + echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR" +fi + +ENVNAME="" + +# pre-run checks to make sure we can proceed with the installation +pre_run_checks() { + local env_name="$1" + + if ! is_command_available uv; then + echo "uv is not installed, trying to install it." + if ! is_command_available pip; then + echo "pip is not installed, cannot automatically install 'uv'." + echo "Follow this link to install it:" + echo "https://docs.astral.sh/uv/getting-started/installation/" + exit 1 + else + pip install uv + fi + fi + + # checking if an environment with the same name already exists + if [ -d "$env_name" ]; then + echo "Environment '$env_name' already exists, re-using it." + fi +} + +run() { + # Use only global variables set by flag parser + if [ -n "$UV_SYSTEM_PYTHON" ] || [ "$env_name" == "__system__" ]; then + echo "Installing dependencies in system Python environment" + export UV_SYSTEM_PYTHON=1 + elif [ "$VIRTUAL_ENV" == "$env_name" ]; then + echo "Virtual environment $env_name is already active" + else + echo "Using virtual environment $env_name" + uv venv "$env_name" + source "$env_name/bin/activate" + fi + + if [ -n "$TEST_PYPI_VERSION" ]; then + uv pip install fastapi libcst + uv pip install --extra-index-url https://test.pypi.org/simple/ \ + --index-strategy unsafe-best-match \ + llama-stack=="$TEST_PYPI_VERSION" \ + $normal_deps + if [ -n "$optional_deps" ]; then + IFS='#' read -ra parts <<<"$optional_deps" + for part in "${parts[@]}"; do + echo "$part" + uv pip install $part + done + fi + if [ -n "$external_provider_deps" ]; then + IFS='#' read -ra parts <<<"$external_provider_deps" + for part in "${parts[@]}"; do + echo "$part" + uv pip install "$part" + done + fi + else + if [ -n "$LLAMA_STACK_DIR" ]; then + # only warn if DIR does not start with "git+" + if [ ! -d "$LLAMA_STACK_DIR" ] && [[ "$LLAMA_STACK_DIR" != git+* ]]; then + printf "${RED}Warning: LLAMA_STACK_DIR is set but directory does not exist: %s${NC}\n" "$LLAMA_STACK_DIR" >&2 + exit 1 + fi + printf "Installing from LLAMA_STACK_DIR: %s\n" "$LLAMA_STACK_DIR" + # editable only if LLAMA_STACK_DIR does not start with "git+" + if [[ "$LLAMA_STACK_DIR" != git+* ]]; then + EDITABLE="-e" + else + EDITABLE="" + fi + uv pip install --no-cache-dir $EDITABLE "$LLAMA_STACK_DIR" + else + uv pip install --no-cache-dir llama-stack + fi + + if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then + # only warn if DIR does not start with "git+" + if [ ! -d "$LLAMA_STACK_CLIENT_DIR" ] && [[ "$LLAMA_STACK_CLIENT_DIR" != git+* ]]; then + printf "${RED}Warning: LLAMA_STACK_CLIENT_DIR is set but directory does not exist: %s${NC}\n" "$LLAMA_STACK_CLIENT_DIR" >&2 + exit 1 + fi + printf "Installing from LLAMA_STACK_CLIENT_DIR: %s\n" "$LLAMA_STACK_CLIENT_DIR" + # editable only if LLAMA_STACK_CLIENT_DIR does not start with "git+" + if [[ "$LLAMA_STACK_CLIENT_DIR" != git+* ]]; then + EDITABLE="-e" + else + EDITABLE="" + fi + uv pip install --no-cache-dir $EDITABLE "$LLAMA_STACK_CLIENT_DIR" + fi + + printf "Installing pip dependencies\n" + uv pip install $normal_deps + if [ -n "$optional_deps" ]; then + IFS='#' read -ra parts <<<"$optional_deps" + for part in "${parts[@]}"; do + echo "Installing special provider module: $part" + uv pip install $part + done + fi + if [ -n "$external_provider_deps" ]; then + IFS='#' read -ra parts <<<"$external_provider_deps" + for part in "${parts[@]}"; do + echo "Installing external provider module: $part" + uv pip install "$part" + echo "Getting provider spec for module: $part and installing dependencies" + package_name=$(echo "$part" | sed 's/[<>=!].*//') + python3 -c " +import importlib +import sys +try: + module = importlib.import_module(f'$package_name.provider') + spec = module.get_provider_spec() + if hasattr(spec, 'pip_packages') and spec.pip_packages: + print('\\n'.join(spec.pip_packages)) +except Exception as e: + print(f'Error getting provider spec for $package_name: {e}', file=sys.stderr) +" | uv pip install -r - + done + fi + fi +} + +pre_run_checks "$env_name" +run diff --git a/llama_stack/core/conversations/conversations.py b/llama_stack/core/conversations/conversations.py index 83a49e848..66880ca36 100644 --- a/llama_stack/core/conversations/conversations.py +++ b/llama_stack/core/conversations/conversations.py @@ -6,8 +6,9 @@ import secrets import time -from typing import Any, Literal +from typing import Any +from openai import NOT_GIVEN from pydantic import BaseModel, TypeAdapter from llama_stack.apis.conversations.conversations import ( @@ -15,7 +16,6 @@ from llama_stack.apis.conversations.conversations import ( ConversationDeletedResource, ConversationItem, ConversationItemDeletedResource, - ConversationItemInclude, ConversationItemList, Conversations, Metadata, @@ -247,14 +247,7 @@ class ConversationServiceImpl(Conversations): adapter: TypeAdapter[ConversationItem] = TypeAdapter(ConversationItem) return adapter.validate_python(record["item_data"]) - async def list_items( - self, - conversation_id: str, - after: str | None = None, - include: list[ConversationItemInclude] | None = None, - limit: int | None = None, - order: Literal["asc", "desc"] | None = None, - ) -> ConversationItemList: + async def list(self, conversation_id: str, after=NOT_GIVEN, include=NOT_GIVEN, limit=NOT_GIVEN, order=NOT_GIVEN): """List items in the conversation.""" if not conversation_id: raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}") @@ -265,12 +258,14 @@ class ConversationServiceImpl(Conversations): result = await self.sql_store.fetch_all(table="conversation_items", where={"conversation_id": conversation_id}) records = result.data - if order is not None and order == "asc": + if order != NOT_GIVEN and order == "asc": records.sort(key=lambda x: x["created_at"]) else: records.sort(key=lambda x: x["created_at"], reverse=True) - actual_limit = limit or 20 + actual_limit = 20 + if limit != NOT_GIVEN and isinstance(limit, int): + actual_limit = limit records = records[:actual_limit] items = [record["item_data"] for record in records] diff --git a/llama_stack/core/datatypes.py b/llama_stack/core/datatypes.py index d7175100e..e8cb36a02 100644 --- a/llama_stack/core/datatypes.py +++ b/llama_stack/core/datatypes.py @@ -23,15 +23,14 @@ from llama_stack.apis.scoring import Scoring from llama_stack.apis.scoring_functions import ScoringFn, ScoringFnInput from llama_stack.apis.shields import Shield, ShieldInput from llama_stack.apis.tools import ToolGroup, ToolGroupInput, ToolRuntime +from llama_stack.apis.vector_dbs import VectorDB, VectorDBInput from llama_stack.apis.vector_io import VectorIO -from llama_stack.apis.vector_stores import VectorStore, VectorStoreInput from llama_stack.core.access_control.datatypes import AccessRule from llama_stack.core.storage.datatypes import ( KVStoreReference, StorageBackendType, StorageConfig, ) -from llama_stack.log import LoggingConfig from llama_stack.providers.datatypes import Api, ProviderSpec LLAMA_STACK_BUILD_CONFIG_VERSION = 2 @@ -72,7 +71,7 @@ class ShieldWithOwner(Shield, ResourceWithOwner): pass -class VectorStoreWithOwner(VectorStore, ResourceWithOwner): +class VectorDBWithOwner(VectorDB, ResourceWithOwner): pass @@ -92,12 +91,12 @@ class ToolGroupWithOwner(ToolGroup, ResourceWithOwner): pass -RoutableObject = Model | Shield | VectorStore | Dataset | ScoringFn | Benchmark | ToolGroup +RoutableObject = Model | Shield | VectorDB | Dataset | ScoringFn | Benchmark | ToolGroup RoutableObjectWithProvider = Annotated[ ModelWithOwner | ShieldWithOwner - | VectorStoreWithOwner + | VectorDBWithOwner | DatasetWithOwner | ScoringFnWithOwner | BenchmarkWithOwner @@ -196,6 +195,14 @@ class TelemetryConfig(BaseModel): enabled: bool = Field(default=False, description="enable or disable telemetry") +class LoggingConfig(BaseModel): + category_levels: dict[str, str] = Field( + default_factory=dict, + description=""" + Dictionary of different logging configurations for different portions (ex: core, server) of llama stack""", + ) + + class OAuth2JWKSConfig(BaseModel): # The JWKS URI for collecting public keys uri: str @@ -367,15 +374,6 @@ class VectorStoresConfig(BaseModel): ) -class SafetyConfig(BaseModel): - """Configuration for default moderations model.""" - - default_shield_id: str | None = Field( - default=None, - description="ID of the shield to use for when `model` is not specified in the `moderations` API request.", - ) - - class QuotaPeriod(StrEnum): DAY = "day" @@ -429,7 +427,7 @@ class RegisteredResources(BaseModel): models: list[ModelInput] = Field(default_factory=list) shields: list[ShieldInput] = Field(default_factory=list) - vector_stores: list[VectorStoreInput] = Field(default_factory=list) + vector_dbs: list[VectorDBInput] = Field(default_factory=list) datasets: list[DatasetInput] = Field(default_factory=list) scoring_fns: list[ScoringFnInput] = Field(default_factory=list) benchmarks: list[BenchmarkInput] = Field(default_factory=list) @@ -534,11 +532,6 @@ can be instantiated multiple times (with different configs) if necessary. description="Configuration for vector stores, including default embedding model", ) - safety: SafetyConfig | None = Field( - default=None, - description="Configuration for default moderations model", - ) - @field_validator("external_providers_dir") @classmethod def validate_external_providers_dir(cls, v): diff --git a/llama_stack/core/distribution.py b/llama_stack/core/distribution.py index 9be5ffb49..59461f5d6 100644 --- a/llama_stack/core/distribution.py +++ b/llama_stack/core/distribution.py @@ -25,7 +25,7 @@ from llama_stack.providers.datatypes import ( logger = get_logger(name=__name__, category="core") -INTERNAL_APIS = {Api.inspect, Api.providers, Api.prompts, Api.conversations} +INTERNAL_APIS = {Api.inspect, Api.providers, Api.prompts, Api.conversations, Api.telemetry} def stack_apis() -> list[Api]: @@ -64,7 +64,7 @@ def builtin_automatically_routed_apis() -> list[AutoRoutedApiInfo]: router_api=Api.tool_runtime, ), AutoRoutedApiInfo( - routing_table_api=Api.vector_stores, + routing_table_api=Api.vector_dbs, router_api=Api.vector_io, ), ] diff --git a/llama_stack/core/library_client.py b/llama_stack/core/library_client.py index 6203b529e..1179075cd 100644 --- a/llama_stack/core/library_client.py +++ b/llama_stack/core/library_client.py @@ -32,7 +32,7 @@ from termcolor import cprint from llama_stack.core.build import print_pip_install_help from llama_stack.core.configure import parse_and_maybe_upgrade_config -from llama_stack.core.datatypes import BuildConfig, BuildProvider, DistributionSpec +from llama_stack.core.datatypes import Api, BuildConfig, BuildProvider, DistributionSpec from llama_stack.core.request_headers import ( PROVIDER_DATA_VAR, request_provider_data_context, @@ -44,12 +44,11 @@ from llama_stack.core.stack import ( get_stack_run_config_from_distro, replace_env_vars, ) -from llama_stack.core.telemetry import Telemetry -from llama_stack.core.telemetry.tracing import CURRENT_TRACE_CONTEXT, end_trace, setup_logger, start_trace from llama_stack.core.utils.config import redact_sensitive_fields from llama_stack.core.utils.context import preserve_contexts_async_generator from llama_stack.core.utils.exec import in_notebook -from llama_stack.log import get_logger, setup_logging +from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry.tracing import CURRENT_TRACE_CONTEXT, end_trace, setup_logger, start_trace from llama_stack.strong_typing.inspection import is_unwrapped_body_param logger = get_logger(name=__name__, category="core") @@ -201,9 +200,6 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient): skip_logger_removal: bool = False, ): super().__init__() - # Initialize logging from environment variables first - setup_logging() - # when using the library client, we should not log to console since many # of our logs are intended for server-side usage if sinks_from_env := os.environ.get("TELEMETRY_SINKS", None): @@ -282,7 +278,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient): else: prefix = "!" if in_notebook() else "" cprint( - f"Please run:\n\n{prefix}llama stack list-deps {self.config_path_or_distro_name} | xargs -L1 uv pip install\n\n", + f"Please run:\n\n{prefix}llama stack build --distro {self.config_path_or_distro_name} --image-type venv\n\n", "yellow", file=sys.stderr, ) @@ -294,8 +290,8 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient): raise _e assert self.impls is not None - if self.config.telemetry.enabled: - setup_logger(Telemetry()) + if Api.telemetry in self.impls: + setup_logger(self.impls[Api.telemetry]) if not os.environ.get("PYTEST_CURRENT_TEST"): console = Console() diff --git a/llama_stack/core/resolver.py b/llama_stack/core/resolver.py index 805d260fc..6e1843870 100644 --- a/llama_stack/core/resolver.py +++ b/llama_stack/core/resolver.py @@ -27,9 +27,10 @@ from llama_stack.apis.safety import Safety from llama_stack.apis.scoring import Scoring from llama_stack.apis.scoring_functions import ScoringFunctions from llama_stack.apis.shields import Shields +from llama_stack.apis.telemetry import Telemetry from llama_stack.apis.tools import ToolGroups, ToolRuntime +from llama_stack.apis.vector_dbs import VectorDBs from llama_stack.apis.vector_io import VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.apis.version import LLAMA_STACK_API_V1ALPHA from llama_stack.core.client import get_client_impl from llama_stack.core.datatypes import ( @@ -48,6 +49,7 @@ from llama_stack.providers.datatypes import ( Api, BenchmarksProtocolPrivate, DatasetsProtocolPrivate, + InlineProviderSpec, ModelsProtocolPrivate, ProviderSpec, RemoteProviderConfig, @@ -80,7 +82,7 @@ def api_protocol_map(external_apis: dict[Api, ExternalApiSpec] | None = None) -> Api.inspect: Inspect, Api.batches: Batches, Api.vector_io: VectorIO, - Api.vector_stores: VectorStore, + Api.vector_dbs: VectorDBs, Api.models: Models, Api.safety: Safety, Api.shields: Shields, @@ -96,6 +98,7 @@ def api_protocol_map(external_apis: dict[Api, ExternalApiSpec] | None = None) -> Api.files: Files, Api.prompts: Prompts, Api.conversations: Conversations, + Api.telemetry: Telemetry, } if external_apis: @@ -238,6 +241,24 @@ def validate_and_prepare_providers( key = api_str if api not in router_apis else f"inner-{api_str}" providers_with_specs[key] = specs + # TODO: remove this logic, telemetry should not have providers. + # if telemetry has been enabled in the config initialize our internal impl + # telemetry is not an external API so it SHOULD NOT be auto-routed. + if run_config.telemetry.enabled: + specs = {} + p = InlineProviderSpec( + api=Api.telemetry, + provider_type="inline::meta-reference", + pip_packages=[], + optional_api_dependencies=[Api.datasetio], + module="llama_stack.providers.inline.telemetry.meta_reference", + config_class="llama_stack.providers.inline.telemetry.meta_reference.config.TelemetryConfig", + description="Meta's reference implementation of telemetry and observability using OpenTelemetry.", + ) + spec = ProviderWithSpec(spec=p, provider_type="inline::meta-reference", provider_id="meta-reference") + specs["meta-reference"] = spec + providers_with_specs["telemetry"] = specs + return providers_with_specs diff --git a/llama_stack/core/routers/__init__.py b/llama_stack/core/routers/__init__.py index 204cbb87f..df4df0463 100644 --- a/llama_stack/core/routers/__init__.py +++ b/llama_stack/core/routers/__init__.py @@ -29,7 +29,7 @@ async def get_routing_table_impl( from ..routing_tables.scoring_functions import ScoringFunctionsRoutingTable from ..routing_tables.shields import ShieldsRoutingTable from ..routing_tables.toolgroups import ToolGroupsRoutingTable - from ..routing_tables.vector_stores import VectorStoresRoutingTable + from ..routing_tables.vector_dbs import VectorDBsRoutingTable api_to_tables = { "models": ModelsRoutingTable, @@ -38,7 +38,7 @@ async def get_routing_table_impl( "scoring_functions": ScoringFunctionsRoutingTable, "benchmarks": BenchmarksRoutingTable, "tool_groups": ToolGroupsRoutingTable, - "vector_stores": VectorStoresRoutingTable, + "vector_dbs": VectorDBsRoutingTable, } if api.value not in api_to_tables: @@ -72,6 +72,14 @@ async def get_auto_router_impl( raise ValueError(f"API {api.value} not found in router map") api_to_dep_impl = {} + if run_config.telemetry.enabled: + api_to_deps = { + "inference": {"telemetry": Api.telemetry}, + } + for dep_name, dep_api in api_to_deps.get(api.value, {}).items(): + if dep_api in deps: + api_to_dep_impl[dep_name] = deps[dep_api] + # TODO: move pass configs to routers instead if api == Api.inference: inference_ref = run_config.storage.stores.inference @@ -84,12 +92,9 @@ async def get_auto_router_impl( ) await inference_store.initialize() api_to_dep_impl["store"] = inference_store - api_to_dep_impl["telemetry_enabled"] = run_config.telemetry.enabled elif api == Api.vector_io: api_to_dep_impl["vector_stores_config"] = run_config.vector_stores - elif api == Api.safety: - api_to_dep_impl["safety_config"] = run_config.safety impl = api_to_routers[api.value](routing_table, **api_to_dep_impl) await impl.initialize() diff --git a/llama_stack/core/routers/inference.py b/llama_stack/core/routers/inference.py index d532bc622..b20ad44ca 100644 --- a/llama_stack/core/routers/inference.py +++ b/llama_stack/core/routers/inference.py @@ -44,22 +44,17 @@ from llama_stack.apis.inference import ( OpenAIEmbeddingsResponse, OpenAIMessageParam, Order, - RerankResponse, StopReason, ToolPromptFormat, ) -from llama_stack.apis.inference.inference import ( - OpenAIChatCompletionContentPartImageParam, - OpenAIChatCompletionContentPartTextParam, -) from llama_stack.apis.models import Model, ModelType -from llama_stack.apis.telemetry import MetricEvent, MetricInResponse -from llama_stack.core.telemetry.tracing import enqueue_event, get_current_span +from llama_stack.apis.telemetry import MetricEvent, MetricInResponse, Telemetry from llama_stack.log import get_logger from llama_stack.models.llama.llama3.chat_format import ChatFormat from llama_stack.models.llama.llama3.tokenizer import Tokenizer from llama_stack.providers.datatypes import HealthResponse, HealthStatus, RoutingTable from llama_stack.providers.utils.inference.inference_store import InferenceStore +from llama_stack.providers.utils.telemetry.tracing import enqueue_event, get_current_span logger = get_logger(name=__name__, category="core::routers") @@ -70,14 +65,14 @@ class InferenceRouter(Inference): def __init__( self, routing_table: RoutingTable, + telemetry: Telemetry | None = None, store: InferenceStore | None = None, - telemetry_enabled: bool = False, ) -> None: logger.debug("Initializing InferenceRouter") self.routing_table = routing_table - self.telemetry_enabled = telemetry_enabled + self.telemetry = telemetry self.store = store - if self.telemetry_enabled: + if self.telemetry: self.tokenizer = Tokenizer.get_instance() self.formatter = ChatFormat(self.tokenizer) @@ -159,7 +154,7 @@ class InferenceRouter(Inference): model: Model, ) -> list[MetricInResponse]: metrics = self._construct_metrics(prompt_tokens, completion_tokens, total_tokens, model) - if self.telemetry_enabled: + if self.telemetry: for metric in metrics: enqueue_event(metric) return [MetricInResponse(metric=metric.metric, value=metric.value) for metric in metrics] @@ -187,23 +182,6 @@ class InferenceRouter(Inference): raise ModelTypeError(model_id, model.model_type, expected_model_type) return model - async def rerank( - self, - model: str, - query: str | OpenAIChatCompletionContentPartTextParam | OpenAIChatCompletionContentPartImageParam, - items: list[str | OpenAIChatCompletionContentPartTextParam | OpenAIChatCompletionContentPartImageParam], - max_num_results: int | None = None, - ) -> RerankResponse: - logger.debug(f"InferenceRouter.rerank: {model}") - model_obj = await self._get_model(model, ModelType.rerank) - provider = await self.routing_table.get_provider_impl(model_obj.identifier) - return await provider.rerank( - model=model_obj.identifier, - query=query, - items=items, - max_num_results=max_num_results, - ) - async def openai_completion( self, params: Annotated[OpenAICompletionRequestWithExtraBody, Body(...)], @@ -223,7 +201,7 @@ class InferenceRouter(Inference): # that we do not return an AsyncIterator, our tests expect a stream of chunks we cannot intercept currently. response = await provider.openai_completion(params) - if self.telemetry_enabled: + if self.telemetry: metrics = self._construct_metrics( prompt_tokens=response.usage.prompt_tokens, completion_tokens=response.usage.completion_tokens, @@ -285,7 +263,7 @@ class InferenceRouter(Inference): if self.store: asyncio.create_task(self.store.store_chat_completion(response, params.messages)) - if self.telemetry_enabled: + if self.telemetry: metrics = self._construct_metrics( prompt_tokens=response.usage.prompt_tokens, completion_tokens=response.usage.completion_tokens, @@ -393,7 +371,7 @@ class InferenceRouter(Inference): else: if hasattr(chunk, "delta"): completion_text += chunk.delta - if hasattr(chunk, "stop_reason") and chunk.stop_reason and self.telemetry_enabled: + if hasattr(chunk, "stop_reason") and chunk.stop_reason and self.telemetry: complete = True completion_tokens = await self._count_tokens(completion_text) # if we are done receiving tokens @@ -401,7 +379,7 @@ class InferenceRouter(Inference): total_tokens = (prompt_tokens or 0) + (completion_tokens or 0) # Create a separate span for streaming completion metrics - if self.telemetry_enabled: + if self.telemetry: # Log metrics in the new span context completion_metrics = self._construct_metrics( prompt_tokens=prompt_tokens, @@ -450,7 +428,7 @@ class InferenceRouter(Inference): total_tokens = (prompt_tokens or 0) + (completion_tokens or 0) # Create a separate span for completion metrics - if self.telemetry_enabled: + if self.telemetry: # Log metrics in the new span context completion_metrics = self._construct_metrics( prompt_tokens=prompt_tokens, @@ -548,7 +526,7 @@ class InferenceRouter(Inference): completion_text += "".join(choice_data["content_parts"]) # Add metrics to the chunk - if self.telemetry_enabled and hasattr(chunk, "usage") and chunk.usage: + if self.telemetry and hasattr(chunk, "usage") and chunk.usage: metrics = self._construct_metrics( prompt_tokens=chunk.usage.prompt_tokens, completion_tokens=chunk.usage.completion_tokens, diff --git a/llama_stack/core/routers/safety.py b/llama_stack/core/routers/safety.py index 79eac8b46..9ba3327f1 100644 --- a/llama_stack/core/routers/safety.py +++ b/llama_stack/core/routers/safety.py @@ -10,7 +10,6 @@ from llama_stack.apis.inference import Message from llama_stack.apis.safety import RunShieldResponse, Safety from llama_stack.apis.safety.safety import ModerationObject from llama_stack.apis.shields import Shield -from llama_stack.core.datatypes import SafetyConfig from llama_stack.log import get_logger from llama_stack.providers.datatypes import RoutingTable @@ -21,11 +20,9 @@ class SafetyRouter(Safety): def __init__( self, routing_table: RoutingTable, - safety_config: SafetyConfig | None = None, ) -> None: logger.debug("Initializing SafetyRouter") self.routing_table = routing_table - self.safety_config = safety_config async def initialize(self) -> None: logger.debug("SafetyRouter.initialize") @@ -63,47 +60,26 @@ class SafetyRouter(Safety): params=params, ) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: - list_shields_response = await self.routing_table.list_shields() - shields = list_shields_response.data + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: + async def get_shield_id(self, model: str) -> str: + """Get Shield id from model (provider_resource_id) of shield.""" + list_shields_response = await self.routing_table.list_shields() - selected_shield: Shield | None = None - provider_model: str | None = model + matches = [s.identifier for s in list_shields_response.data if model == s.provider_resource_id] - if model: - matches: list[Shield] = [s for s in shields if model == s.provider_resource_id] if not matches: - raise ValueError( - f"No shield associated with provider_resource id {model}: choose from {[s.provider_resource_id for s in shields]}" - ) + raise ValueError(f"No shield associated with provider_resource id {model}") if len(matches) > 1: - raise ValueError( - f"Multiple shields associated with provider_resource id {model}: matched shields {[s.identifier for s in matches]}" - ) - selected_shield = matches[0] - else: - default_shield_id = self.safety_config.default_shield_id if self.safety_config else None - if not default_shield_id: - raise ValueError( - "No moderation model specified and no default_shield_id configured in safety config: select model " - f"from {[s.provider_resource_id or s.identifier for s in shields]}" - ) + raise ValueError(f"Multiple shields associated with provider_resource id {model}") + return matches[0] - selected_shield = next((s for s in shields if s.identifier == default_shield_id), None) - if selected_shield is None: - raise ValueError( - f"Default moderation model not found. Choose from {[s.provider_resource_id or s.identifier for s in shields]}." - ) - - provider_model = selected_shield.provider_resource_id - - shield_id = selected_shield.identifier + shield_id = await get_shield_id(self, model) logger.debug(f"SafetyRouter.run_moderation: {shield_id}") provider = await self.routing_table.get_provider_impl(shield_id) response = await provider.run_moderation( input=input, - model=provider_model, + model=model, ) return response diff --git a/llama_stack/core/routers/tool_runtime.py b/llama_stack/core/routers/tool_runtime.py index be4c13905..ad82293e5 100644 --- a/llama_stack/core/routers/tool_runtime.py +++ b/llama_stack/core/routers/tool_runtime.py @@ -37,24 +37,24 @@ class ToolRuntimeRouter(ToolRuntime): async def query( self, content: InterleavedContent, - vector_store_ids: list[str], + vector_db_ids: list[str], query_config: RAGQueryConfig | None = None, ) -> RAGQueryResult: - logger.debug(f"ToolRuntimeRouter.RagToolImpl.query: {vector_store_ids}") + logger.debug(f"ToolRuntimeRouter.RagToolImpl.query: {vector_db_ids}") provider = await self.routing_table.get_provider_impl("knowledge_search") - return await provider.query(content, vector_store_ids, query_config) + return await provider.query(content, vector_db_ids, query_config) async def insert( self, documents: list[RAGDocument], - vector_store_id: str, + vector_db_id: str, chunk_size_in_tokens: int = 512, ) -> None: logger.debug( - f"ToolRuntimeRouter.RagToolImpl.insert: {vector_store_id}, {len(documents)} documents, chunk_size={chunk_size_in_tokens}" + f"ToolRuntimeRouter.RagToolImpl.insert: {vector_db_id}, {len(documents)} documents, chunk_size={chunk_size_in_tokens}" ) provider = await self.routing_table.get_provider_impl("insert_into_memory") - return await provider.insert(documents, vector_store_id, chunk_size_in_tokens) + return await provider.insert(documents, vector_db_id, chunk_size_in_tokens) def __init__( self, diff --git a/llama_stack/core/routers/vector_io.py b/llama_stack/core/routers/vector_io.py index 2b1701dc2..bfc5f7164 100644 --- a/llama_stack/core/routers/vector_io.py +++ b/llama_stack/core/routers/vector_io.py @@ -71,6 +71,25 @@ class VectorIORouter(VectorIO): raise ValueError(f"Embedding model '{embedding_model_id}' not found or not an embedding model") + async def register_vector_db( + self, + vector_db_id: str, + embedding_model: str, + embedding_dimension: int | None = 384, + provider_id: str | None = None, + vector_db_name: str | None = None, + provider_vector_db_id: str | None = None, + ) -> None: + logger.debug(f"VectorIORouter.register_vector_db: {vector_db_id}, {embedding_model}") + await self.routing_table.register_vector_db( + vector_db_id, + embedding_model, + embedding_dimension, + provider_id, + vector_db_name, + provider_vector_db_id, + ) + async def insert_chunks( self, vector_db_id: str, @@ -146,22 +165,22 @@ class VectorIORouter(VectorIO): else: provider_id = list(self.routing_table.impls_by_provider_id.keys())[0] - vector_store_id = f"vs_{uuid.uuid4()}" - registered_vector_store = await self.routing_table.register_vector_store( - vector_store_id=vector_store_id, + vector_db_id = f"vs_{uuid.uuid4()}" + registered_vector_db = await self.routing_table.register_vector_db( + vector_db_id=vector_db_id, embedding_model=embedding_model, embedding_dimension=embedding_dimension, provider_id=provider_id, - provider_vector_store_id=vector_store_id, - vector_store_name=params.name, + provider_vector_db_id=vector_db_id, + vector_db_name=params.name, ) - provider = await self.routing_table.get_provider_impl(registered_vector_store.identifier) + provider = await self.routing_table.get_provider_impl(registered_vector_db.identifier) - # Update model_extra with registered values so provider uses the already-registered vector_store + # Update model_extra with registered values so provider uses the already-registered vector_db if params.model_extra is None: params.model_extra = {} - params.model_extra["provider_vector_store_id"] = registered_vector_store.provider_resource_id - params.model_extra["provider_id"] = registered_vector_store.provider_id + params.model_extra["provider_vector_db_id"] = registered_vector_db.provider_resource_id + params.model_extra["provider_id"] = registered_vector_db.provider_id if embedding_model is not None: params.model_extra["embedding_model"] = embedding_model if embedding_dimension is not None: @@ -179,15 +198,15 @@ class VectorIORouter(VectorIO): logger.debug(f"VectorIORouter.openai_list_vector_stores: limit={limit}") # Route to default provider for now - could aggregate from all providers in the future # call retrieve on each vector dbs to get list of vector stores - vector_stores = await self.routing_table.get_all_with_type("vector_store") + vector_dbs = await self.routing_table.get_all_with_type("vector_db") all_stores = [] - for vector_store in vector_stores: + for vector_db in vector_dbs: try: - provider = await self.routing_table.get_provider_impl(vector_store.identifier) - vector_store = await provider.openai_retrieve_vector_store(vector_store.identifier) + provider = await self.routing_table.get_provider_impl(vector_db.identifier) + vector_store = await provider.openai_retrieve_vector_store(vector_db.identifier) all_stores.append(vector_store) except Exception as e: - logger.error(f"Error retrieving vector store {vector_store.identifier}: {e}") + logger.error(f"Error retrieving vector store {vector_db.identifier}: {e}") continue # Sort by created_at diff --git a/llama_stack/core/routing_tables/common.py b/llama_stack/core/routing_tables/common.py index d6faf93c5..087483bb6 100644 --- a/llama_stack/core/routing_tables/common.py +++ b/llama_stack/core/routing_tables/common.py @@ -41,7 +41,7 @@ async def register_object_with_provider(obj: RoutableObject, p: Any) -> Routable elif api == Api.safety: return await p.register_shield(obj) elif api == Api.vector_io: - return await p.register_vector_store(obj) + return await p.register_vector_db(obj) elif api == Api.datasetio: return await p.register_dataset(obj) elif api == Api.scoring: @@ -57,7 +57,7 @@ async def register_object_with_provider(obj: RoutableObject, p: Any) -> Routable async def unregister_object_from_provider(obj: RoutableObject, p: Any) -> None: api = get_impl_api(p) if api == Api.vector_io: - return await p.unregister_vector_store(obj.identifier) + return await p.unregister_vector_db(obj.identifier) elif api == Api.inference: return await p.unregister_model(obj.identifier) elif api == Api.safety: @@ -108,7 +108,7 @@ class CommonRoutingTableImpl(RoutingTable): elif api == Api.safety: p.shield_store = self elif api == Api.vector_io: - p.vector_store_store = self + p.vector_db_store = self elif api == Api.datasetio: p.dataset_store = self elif api == Api.scoring: @@ -134,15 +134,15 @@ class CommonRoutingTableImpl(RoutingTable): from .scoring_functions import ScoringFunctionsRoutingTable from .shields import ShieldsRoutingTable from .toolgroups import ToolGroupsRoutingTable - from .vector_stores import VectorStoresRoutingTable + from .vector_dbs import VectorDBsRoutingTable def apiname_object(): if isinstance(self, ModelsRoutingTable): return ("Inference", "model") elif isinstance(self, ShieldsRoutingTable): return ("Safety", "shield") - elif isinstance(self, VectorStoresRoutingTable): - return ("VectorIO", "vector_store") + elif isinstance(self, VectorDBsRoutingTable): + return ("VectorIO", "vector_db") elif isinstance(self, DatasetsRoutingTable): return ("DatasetIO", "dataset") elif isinstance(self, ScoringFunctionsRoutingTable): diff --git a/llama_stack/core/routing_tables/vector_stores.py b/llama_stack/core/routing_tables/vector_dbs.py similarity index 70% rename from llama_stack/core/routing_tables/vector_stores.py rename to llama_stack/core/routing_tables/vector_dbs.py index c6c80a01e..e87fb61c6 100644 --- a/llama_stack/core/routing_tables/vector_stores.py +++ b/llama_stack/core/routing_tables/vector_dbs.py @@ -6,12 +6,15 @@ from typing import Any +from pydantic import TypeAdapter + from llama_stack.apis.common.errors import ModelNotFoundError, ModelTypeError from llama_stack.apis.models import ModelType from llama_stack.apis.resource import ResourceType -# Removed VectorStores import to avoid exposing public API +# Removed VectorDBs import to avoid exposing public API from llama_stack.apis.vector_io.vector_io import ( + OpenAICreateVectorStoreRequestWithExtraBody, SearchRankingOptions, VectorStoreChunkingStrategy, VectorStoreDeleteResponse, @@ -23,7 +26,7 @@ from llama_stack.apis.vector_io.vector_io import ( VectorStoreSearchResponsePage, ) from llama_stack.core.datatypes import ( - VectorStoreWithOwner, + VectorDBWithOwner, ) from llama_stack.log import get_logger @@ -32,23 +35,23 @@ from .common import CommonRoutingTableImpl, lookup_model logger = get_logger(name=__name__, category="core::routing_tables") -class VectorStoresRoutingTable(CommonRoutingTableImpl): - """Internal routing table for vector_store operations. +class VectorDBsRoutingTable(CommonRoutingTableImpl): + """Internal routing table for vector_db operations. - Does not inherit from VectorStores to avoid exposing public API endpoints. + Does not inherit from VectorDBs to avoid exposing public API endpoints. Only provides internal routing functionality for VectorIORouter. """ # Internal methods only - no public API exposure - async def register_vector_store( + async def register_vector_db( self, - vector_store_id: str, + vector_db_id: str, embedding_model: str, embedding_dimension: int | None = 384, provider_id: str | None = None, - provider_vector_store_id: str | None = None, - vector_store_name: str | None = None, + provider_vector_db_id: str | None = None, + vector_db_name: str | None = None, ) -> Any: if provider_id is None: if len(self.impls_by_provider_id) > 0: @@ -64,24 +67,52 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): raise ModelNotFoundError(embedding_model) if model.model_type != ModelType.embedding: raise ModelTypeError(embedding_model, model.model_type, ModelType.embedding) + if "embedding_dimension" not in model.metadata: + raise ValueError(f"Model {embedding_model} does not have an embedding dimension") - vector_store = VectorStoreWithOwner( - identifier=vector_store_id, - type=ResourceType.vector_store.value, - provider_id=provider_id, - provider_resource_id=provider_vector_store_id, - embedding_model=embedding_model, - embedding_dimension=embedding_dimension, - vector_store_name=vector_store_name, + try: + provider = self.impls_by_provider_id[provider_id] + except KeyError: + available_providers = list(self.impls_by_provider_id.keys()) + raise ValueError( + f"Provider '{provider_id}' not found in routing table. Available providers: {available_providers}" + ) from None + logger.warning( + "VectorDB is being deprecated in future releases in favor of VectorStore. Please migrate your usage accordingly." ) - await self.register_object(vector_store) - return vector_store + request = OpenAICreateVectorStoreRequestWithExtraBody( + name=vector_db_name or vector_db_id, + embedding_model=embedding_model, + embedding_dimension=model.metadata["embedding_dimension"], + provider_id=provider_id, + provider_vector_db_id=provider_vector_db_id, + ) + vector_store = await provider.openai_create_vector_store(request) + + vector_store_id = vector_store.id + actual_provider_vector_db_id = provider_vector_db_id or vector_store_id + logger.warning( + f"Ignoring vector_db_id {vector_db_id} and using vector_store_id {vector_store_id} instead. Setting VectorDB {vector_db_id} to VectorDB.vector_db_name" + ) + + vector_db_data = { + "identifier": vector_store_id, + "type": ResourceType.vector_db.value, + "provider_id": provider_id, + "provider_resource_id": actual_provider_vector_db_id, + "embedding_model": embedding_model, + "embedding_dimension": model.metadata["embedding_dimension"], + "vector_db_name": vector_store.name, + } + vector_db = TypeAdapter(VectorDBWithOwner).validate_python(vector_db_data) + await self.register_object(vector_db) + return vector_db async def openai_retrieve_vector_store( self, vector_store_id: str, ) -> VectorStoreObject: - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_retrieve_vector_store(vector_store_id) @@ -92,7 +123,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): expires_after: dict[str, Any] | None = None, metadata: dict[str, Any] | None = None, ) -> VectorStoreObject: - await self.assert_action_allowed("update", "vector_store", vector_store_id) + await self.assert_action_allowed("update", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_update_vector_store( vector_store_id=vector_store_id, @@ -105,18 +136,18 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): self, vector_store_id: str, ) -> VectorStoreDeleteResponse: - await self.assert_action_allowed("delete", "vector_store", vector_store_id) + await self.assert_action_allowed("delete", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) result = await provider.openai_delete_vector_store(vector_store_id) - await self.unregister_vector_store(vector_store_id) + await self.unregister_vector_db(vector_store_id) return result - async def unregister_vector_store(self, vector_store_id: str) -> None: + async def unregister_vector_db(self, vector_store_id: str) -> None: """Remove the vector store from the routing table registry.""" try: - vector_store_obj = await self.get_object_by_identifier("vector_store", vector_store_id) - if vector_store_obj: - await self.unregister_object(vector_store_obj) + vector_db_obj = await self.get_object_by_identifier("vector_db", vector_store_id) + if vector_db_obj: + await self.unregister_object(vector_db_obj) except Exception as e: # Log the error but don't fail the operation logger.warning(f"Failed to unregister vector store {vector_store_id} from routing table: {e}") @@ -131,7 +162,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): rewrite_query: bool | None = False, search_mode: str | None = "vector", ) -> VectorStoreSearchResponsePage: - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_search_vector_store( vector_store_id=vector_store_id, @@ -150,7 +181,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): attributes: dict[str, Any] | None = None, chunking_strategy: VectorStoreChunkingStrategy | None = None, ) -> VectorStoreFileObject: - await self.assert_action_allowed("update", "vector_store", vector_store_id) + await self.assert_action_allowed("update", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_attach_file_to_vector_store( vector_store_id=vector_store_id, @@ -168,7 +199,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): before: str | None = None, filter: VectorStoreFileStatus | None = None, ) -> list[VectorStoreFileObject]: - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_list_files_in_vector_store( vector_store_id=vector_store_id, @@ -184,7 +215,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): vector_store_id: str, file_id: str, ) -> VectorStoreFileObject: - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_retrieve_vector_store_file( vector_store_id=vector_store_id, @@ -196,7 +227,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): vector_store_id: str, file_id: str, ) -> VectorStoreFileContentsResponse: - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_retrieve_vector_store_file_contents( vector_store_id=vector_store_id, @@ -209,7 +240,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): file_id: str, attributes: dict[str, Any], ) -> VectorStoreFileObject: - await self.assert_action_allowed("update", "vector_store", vector_store_id) + await self.assert_action_allowed("update", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_update_vector_store_file( vector_store_id=vector_store_id, @@ -222,7 +253,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): vector_store_id: str, file_id: str, ) -> VectorStoreFileDeleteResponse: - await self.assert_action_allowed("delete", "vector_store", vector_store_id) + await self.assert_action_allowed("delete", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_delete_vector_store_file( vector_store_id=vector_store_id, @@ -236,7 +267,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): attributes: dict[str, Any] | None = None, chunking_strategy: Any | None = None, ): - await self.assert_action_allowed("update", "vector_store", vector_store_id) + await self.assert_action_allowed("update", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_create_vector_store_file_batch( vector_store_id=vector_store_id, @@ -250,7 +281,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): batch_id: str, vector_store_id: str, ): - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_retrieve_vector_store_file_batch( batch_id=batch_id, @@ -267,7 +298,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): limit: int | None = 20, order: str | None = "desc", ): - await self.assert_action_allowed("read", "vector_store", vector_store_id) + await self.assert_action_allowed("read", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_list_files_in_vector_store_file_batch( batch_id=batch_id, @@ -284,7 +315,7 @@ class VectorStoresRoutingTable(CommonRoutingTableImpl): batch_id: str, vector_store_id: str, ): - await self.assert_action_allowed("update", "vector_store", vector_store_id) + await self.assert_action_allowed("update", "vector_db", vector_store_id) provider = await self.get_provider_impl(vector_store_id) return await provider.openai_cancel_vector_store_file_batch( batch_id=batch_id, diff --git a/llama_stack/core/server/server.py b/llama_stack/core/server/server.py index 80505c3f9..db7584b01 100644 --- a/llama_stack/core/server/server.py +++ b/llama_stack/core/server/server.py @@ -36,6 +36,7 @@ from llama_stack.apis.common.responses import PaginatedResponse from llama_stack.core.access_control.access_control import AccessDeniedError from llama_stack.core.datatypes import ( AuthenticationRequiredError, + LoggingConfig, StackRunConfig, process_cors_config, ) @@ -52,13 +53,19 @@ from llama_stack.core.stack import ( cast_image_name_to_string, replace_env_vars, ) -from llama_stack.core.telemetry import Telemetry -from llama_stack.core.telemetry.tracing import CURRENT_TRACE_CONTEXT, setup_logger from llama_stack.core.utils.config import redact_sensitive_fields from llama_stack.core.utils.config_resolution import Mode, resolve_config_or_distro from llama_stack.core.utils.context import preserve_contexts_async_generator -from llama_stack.log import LoggingConfig, get_logger, setup_logging +from llama_stack.log import get_logger from llama_stack.providers.datatypes import Api +from llama_stack.providers.inline.telemetry.meta_reference.config import TelemetryConfig +from llama_stack.providers.inline.telemetry.meta_reference.telemetry import ( + TelemetryAdapter, +) +from llama_stack.providers.utils.telemetry.tracing import ( + CURRENT_TRACE_CONTEXT, + setup_logger, +) from .auth import AuthenticationMiddleware from .quota import QuotaMiddleware @@ -167,9 +174,7 @@ class StackApp(FastAPI): @asynccontextmanager async def lifespan(app: StackApp): - server_version = parse_version("llama-stack") - - logger.info(f"Starting up Llama Stack server (version: {server_version})") + logger.info("Starting up") assert app.stack is not None app.stack.create_registry_refresh_task() yield @@ -369,9 +374,6 @@ def create_app() -> StackApp: Returns: Configured StackApp instance. """ - # Initialize logging from environment variables first - setup_logging() - config_file = os.getenv("LLAMA_STACK_CONFIG") if config_file is None: raise ValueError("LLAMA_STACK_CONFIG environment variable is required") @@ -444,7 +446,9 @@ def create_app() -> StackApp: app.add_middleware(CORSMiddleware, **cors_config.model_dump()) if config.telemetry.enabled: - setup_logger(Telemetry()) + setup_logger(impls[Api.telemetry]) + else: + setup_logger(TelemetryAdapter(TelemetryConfig(), {})) # Load external APIs if configured external_apis = load_external_apis(config) @@ -502,8 +506,7 @@ def create_app() -> StackApp: app.exception_handler(RequestValidationError)(global_exception_handler) app.exception_handler(Exception)(global_exception_handler) - if config.telemetry.enabled: - app.add_middleware(TracingMiddleware, impls=impls, external_apis=external_apis) + app.add_middleware(TracingMiddleware, impls=impls, external_apis=external_apis) return app diff --git a/llama_stack/core/server/tracing.py b/llama_stack/core/server/tracing.py index c4901d9b1..4c6df5b42 100644 --- a/llama_stack/core/server/tracing.py +++ b/llama_stack/core/server/tracing.py @@ -7,8 +7,8 @@ from aiohttp import hdrs from llama_stack.core.external import ExternalApiSpec from llama_stack.core.server.routes import find_matching_route, initialize_route_impls -from llama_stack.core.telemetry.tracing import end_trace, start_trace from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry.tracing import end_trace, start_trace logger = get_logger(name=__name__, category="core::server") diff --git a/llama_stack/core/stack.py b/llama_stack/core/stack.py index ebfd59a05..4cf1d072d 100644 --- a/llama_stack/core/stack.py +++ b/llama_stack/core/stack.py @@ -35,7 +35,7 @@ from llama_stack.apis.telemetry import Telemetry from llama_stack.apis.tools import RAGToolRuntime, ToolGroups, ToolRuntime from llama_stack.apis.vector_io import VectorIO from llama_stack.core.conversations.conversations import ConversationServiceConfig, ConversationServiceImpl -from llama_stack.core.datatypes import Provider, SafetyConfig, StackRunConfig, VectorStoresConfig +from llama_stack.core.datatypes import Provider, StackRunConfig, VectorStoresConfig from llama_stack.core.distribution import get_provider_registry from llama_stack.core.inspect import DistributionInspectConfig, DistributionInspectImpl from llama_stack.core.prompts.prompts import PromptServiceConfig, PromptServiceImpl @@ -175,30 +175,6 @@ async def validate_vector_stores_config(vector_stores_config: VectorStoresConfig logger.debug(f"Validated default embedding model: {default_model_id} (dimension: {embedding_dimension})") -async def validate_safety_config(safety_config: SafetyConfig | None, impls: dict[Api, Any]): - if safety_config is None or safety_config.default_shield_id is None: - return - - if Api.shields not in impls: - raise ValueError("Safety configuration requires the shields API to be enabled") - - if Api.safety not in impls: - raise ValueError("Safety configuration requires the safety API to be enabled") - - shields_impl = impls[Api.shields] - response = await shields_impl.list_shields() - shields_by_id = {shield.identifier: shield for shield in response.data} - - default_shield_id = safety_config.default_shield_id - # don't validate if there are no shields registered - if shields_by_id and default_shield_id not in shields_by_id: - available = sorted(shields_by_id) - raise ValueError( - f"Configured default_shield_id '{default_shield_id}' not found among registered shields." - f" Available shields: {available}" - ) - - class EnvVarError(Exception): def __init__(self, var_name: str, path: str = ""): self.var_name = var_name @@ -436,7 +412,6 @@ class Stack: await register_resources(self.run_config, impls) await refresh_registry_once(impls) await validate_vector_stores_config(self.run_config.vector_stores, impls) - await validate_safety_config(self.run_config.safety, impls) self.impls = impls def create_registry_refresh_task(self): diff --git a/llama_stack/core/telemetry/__init__.py b/llama_stack/core/telemetry/__init__.py deleted file mode 100644 index bab612c0d..000000000 --- a/llama_stack/core/telemetry/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -from .telemetry import Telemetry -from .trace_protocol import serialize_value, trace_protocol -from .tracing import ( - CURRENT_TRACE_CONTEXT, - ROOT_SPAN_MARKERS, - end_trace, - enqueue_event, - get_current_span, - setup_logger, - span, - start_trace, -) - -__all__ = [ - "Telemetry", - "trace_protocol", - "serialize_value", - "CURRENT_TRACE_CONTEXT", - "ROOT_SPAN_MARKERS", - "end_trace", - "enqueue_event", - "get_current_span", - "setup_logger", - "span", - "start_trace", -] diff --git a/llama_stack/core/ui/README.md b/llama_stack/core/ui/README.md index 37f1501c9..f1d85454b 100644 --- a/llama_stack/core/ui/README.md +++ b/llama_stack/core/ui/README.md @@ -9,7 +9,7 @@ 1. Start up Llama Stack API server. More details [here](https://llamastack.github.io/latest/getting_started/index.htmll). ``` -llama stack list-deps together | xargs -L1 uv pip install +llama stack build --distro together --image-type venv llama stack run together ``` diff --git a/llama_stack/core/ui/page/playground/tools.py b/llama_stack/core/ui/page/playground/tools.py index 16fd464ee..4ee9d2204 100644 --- a/llama_stack/core/ui/page/playground/tools.py +++ b/llama_stack/core/ui/page/playground/tools.py @@ -32,7 +32,7 @@ def tool_chat_page(): tool_groups_list = [tool_group.identifier for tool_group in tool_groups] mcp_tools_list = [tool for tool in tool_groups_list if tool.startswith("mcp::")] builtin_tools_list = [tool for tool in tool_groups_list if not tool.startswith("mcp::")] - selected_vector_stores = [] + selected_vector_dbs = [] def reset_agent(): st.session_state.clear() @@ -55,13 +55,13 @@ def tool_chat_page(): ) if "builtin::rag" in toolgroup_selection: - vector_stores = llama_stack_api.client.vector_stores.list() or [] - if not vector_stores: + vector_dbs = llama_stack_api.client.vector_dbs.list() or [] + if not vector_dbs: st.info("No vector databases available for selection.") - vector_stores = [vector_store.identifier for vector_store in vector_stores] - selected_vector_stores = st.multiselect( + vector_dbs = [vector_db.identifier for vector_db in vector_dbs] + selected_vector_dbs = st.multiselect( label="Select Document Collections to use in RAG queries", - options=vector_stores, + options=vector_dbs, on_change=reset_agent, ) @@ -119,7 +119,7 @@ def tool_chat_page(): tool_dict = dict( name="builtin::rag", args={ - "vector_store_ids": list(selected_vector_stores), + "vector_db_ids": list(selected_vector_dbs), }, ) toolgroup_selection[i] = tool_dict diff --git a/llama_stack/distributions/ci-tests/run.yaml b/llama_stack/distributions/ci-tests/run.yaml index ed880d4a0..ecf9eed3b 100644 --- a/llama_stack/distributions/ci-tests/run.yaml +++ b/llama_stack/distributions/ci-tests/run.yaml @@ -274,5 +274,3 @@ vector_stores: default_embedding_model: provider_id: sentence-transformers model_id: nomic-ai/nomic-embed-text-v1.5 -safety: - default_shield_id: llama-guard diff --git a/llama_stack/distributions/dell/doc_template.md b/llama_stack/distributions/dell/doc_template.md index 4e28673e8..852e78d0e 100644 --- a/llama_stack/distributions/dell/doc_template.md +++ b/llama_stack/distributions/dell/doc_template.md @@ -157,7 +157,7 @@ docker run \ Make sure you have done `pip install llama-stack` and have the Llama Stack CLI available. ```bash -llama stack list-deps {{ name }} | xargs -L1 pip install +llama stack build --distro {{ name }} --image-type conda INFERENCE_MODEL=$INFERENCE_MODEL \ DEH_URL=$DEH_URL \ CHROMA_URL=$CHROMA_URL \ diff --git a/llama_stack/distributions/starter-gpu/run.yaml b/llama_stack/distributions/starter-gpu/run.yaml index 33e8c9b59..92483c78e 100644 --- a/llama_stack/distributions/starter-gpu/run.yaml +++ b/llama_stack/distributions/starter-gpu/run.yaml @@ -277,5 +277,3 @@ vector_stores: default_embedding_model: provider_id: sentence-transformers model_id: nomic-ai/nomic-embed-text-v1.5 -safety: - default_shield_id: llama-guard diff --git a/llama_stack/distributions/starter/run.yaml b/llama_stack/distributions/starter/run.yaml index 4ca0914af..3b9d8f890 100644 --- a/llama_stack/distributions/starter/run.yaml +++ b/llama_stack/distributions/starter/run.yaml @@ -274,5 +274,3 @@ vector_stores: default_embedding_model: provider_id: sentence-transformers model_id: nomic-ai/nomic-embed-text-v1.5 -safety: - default_shield_id: llama-guard diff --git a/llama_stack/distributions/starter/starter.py b/llama_stack/distributions/starter/starter.py index 49b7a2463..c8c7101a6 100644 --- a/llama_stack/distributions/starter/starter.py +++ b/llama_stack/distributions/starter/starter.py @@ -12,7 +12,6 @@ from llama_stack.core.datatypes import ( Provider, ProviderSpec, QualifiedModel, - SafetyConfig, ShieldInput, ToolGroupInput, VectorStoresConfig, @@ -257,9 +256,6 @@ def get_distribution_template(name: str = "starter") -> DistributionTemplate: model_id="nomic-ai/nomic-embed-text-v1.5", ), ), - safety_config=SafetyConfig( - default_shield_id="llama-guard", - ), ), }, run_config_env_vars={ diff --git a/llama_stack/distributions/template.py b/llama_stack/distributions/template.py index f0c4c6b9e..64f21e626 100644 --- a/llama_stack/distributions/template.py +++ b/llama_stack/distributions/template.py @@ -24,7 +24,6 @@ from llama_stack.core.datatypes import ( DistributionSpec, ModelInput, Provider, - SafetyConfig, ShieldInput, TelemetryConfig, ToolGroupInput, @@ -189,7 +188,6 @@ class RunConfigSettings(BaseModel): default_datasets: list[DatasetInput] | None = None default_benchmarks: list[BenchmarkInput] | None = None vector_stores_config: VectorStoresConfig | None = None - safety_config: SafetyConfig | None = None telemetry: TelemetryConfig = Field(default_factory=lambda: TelemetryConfig(enabled=True)) storage_backends: dict[str, Any] | None = None storage_stores: dict[str, Any] | None = None @@ -292,9 +290,6 @@ class RunConfigSettings(BaseModel): if self.vector_stores_config: config["vector_stores"] = self.vector_stores_config.model_dump(exclude_none=True) - if self.safety_config: - config["safety"] = self.safety_config.model_dump(exclude_none=True) - return config diff --git a/llama_stack/log.py b/llama_stack/log.py index c11c2c06f..ff54b2f7c 100644 --- a/llama_stack/log.py +++ b/llama_stack/log.py @@ -9,23 +9,15 @@ import os import re from logging.config import dictConfig # allow-direct-logging -from pydantic import BaseModel, Field from rich.console import Console from rich.errors import MarkupError from rich.logging import RichHandler +from llama_stack.core.datatypes import LoggingConfig + # Default log level DEFAULT_LOG_LEVEL = logging.INFO - -class LoggingConfig(BaseModel): - category_levels: dict[str, str] = Field( - default_factory=dict, - description=""" -Dictionary of different logging configurations for different portions (ex: core, server) of llama stack""", - ) - - # Predefined categories CATEGORIES = [ "core", @@ -145,8 +137,7 @@ class CustomRichHandler(RichHandler): # Set a reasonable default width for console output, especially when redirected to files console_width = int(os.environ.get("LLAMA_STACK_LOG_WIDTH", "120")) # Don't force terminal codes to avoid ANSI escape codes in log files - # Ensure logs go to stderr, not stdout - kwargs["console"] = Console(width=console_width, stderr=True) + kwargs["console"] = Console(width=console_width) super().__init__(*args, **kwargs) def emit(self, record): @@ -175,30 +166,14 @@ class CustomFileHandler(logging.FileHandler): super().emit(record) -def setup_logging(category_levels: dict[str, int] | None = None, log_file: str | None = None) -> None: +def setup_logging(category_levels: dict[str, int], log_file: str | None) -> None: """ Configure logging based on the provided category log levels and an optional log file. - If category_levels or log_file are not provided, they will be read from environment variables. Parameters: - category_levels (Dict[str, int] | None): A dictionary mapping categories to their log levels. - If None, reads from LLAMA_STACK_LOGGING environment variable and uses defaults. - log_file (str | None): Path to a log file to additionally pipe the logs into. - If None, reads from LLAMA_STACK_LOG_FILE environment variable. + category_levels (Dict[str, int]): A dictionary mapping categories to their log levels. + log_file (str): Path to a log file to additionally pipe the logs into """ - global _category_levels - # Read from environment variables if not explicitly provided - if category_levels is None: - category_levels = dict.fromkeys(CATEGORIES, DEFAULT_LOG_LEVEL) - env_config = os.environ.get("LLAMA_STACK_LOGGING", "") - if env_config: - category_levels.update(parse_environment_config(env_config)) - - # Update the module-level _category_levels so that already-created loggers pick up the new levels - _category_levels.update(category_levels) - - if log_file is None: - log_file = os.environ.get("LLAMA_STACK_LOG_FILE") log_format = "%(asctime)s %(name)s:%(lineno)d %(category)s: %(message)s" class CategoryFilter(logging.Filter): @@ -249,30 +224,12 @@ def setup_logging(category_levels: dict[str, int] | None = None, log_file: str | } }, "loggers": { - **{ - category: { - "handlers": list(handlers.keys()), # Apply all handlers - "level": category_levels.get(category, DEFAULT_LOG_LEVEL), - "propagate": False, # Disable propagation to root logger - } - for category in CATEGORIES - }, - # Explicitly configure uvicorn loggers to preserve their INFO level - "uvicorn": { - "handlers": list(handlers.keys()), - "level": logging.INFO, - "propagate": False, - }, - "uvicorn.error": { - "handlers": list(handlers.keys()), - "level": logging.INFO, - "propagate": False, - }, - "uvicorn.access": { - "handlers": list(handlers.keys()), - "level": logging.INFO, - "propagate": False, - }, + category: { + "handlers": list(handlers.keys()), # Apply all handlers + "level": category_levels.get(category, DEFAULT_LOG_LEVEL), + "propagate": False, # Disable propagation to root logger + } + for category in CATEGORIES }, "root": { "handlers": list(handlers.keys()), @@ -281,18 +238,10 @@ def setup_logging(category_levels: dict[str, int] | None = None, log_file: str | } dictConfig(logging_config) - # Update log levels for all loggers that were created before setup_logging was called - for name, logger in logging.root.manager.loggerDict.items(): + # Ensure third-party libraries follow the root log level + for _, logger in logging.root.manager.loggerDict.items(): if isinstance(logger, logging.Logger): - # Skip infrastructure loggers (uvicorn, fastapi) to preserve their configured levels - if name.startswith(("uvicorn", "fastapi")): - continue - # Update llama_stack loggers if root level was explicitly set (e.g., via all=CRITICAL) - if name.startswith("llama_stack") and "root" in category_levels: - logger.setLevel(root_level) - # Update third-party library loggers - elif not name.startswith("llama_stack"): - logger.setLevel(root_level) + logger.setLevel(root_level) def get_logger( @@ -329,3 +278,12 @@ def get_logger( log_level = _category_levels.get("root", DEFAULT_LOG_LEVEL) logger.setLevel(log_level) return logging.LoggerAdapter(logger, {"category": category}) + + +env_config = os.environ.get("LLAMA_STACK_LOGGING", "") +if env_config: + _category_levels.update(parse_environment_config(env_config)) + +log_file = os.environ.get("LLAMA_STACK_LOG_FILE") + +setup_logging(_category_levels, log_file) diff --git a/llama_stack/providers/datatypes.py b/llama_stack/providers/datatypes.py index 9be3edb8e..c8ff9cecb 100644 --- a/llama_stack/providers/datatypes.py +++ b/llama_stack/providers/datatypes.py @@ -17,7 +17,7 @@ from llama_stack.apis.models import Model from llama_stack.apis.scoring_functions import ScoringFn from llama_stack.apis.shields import Shield from llama_stack.apis.tools import ToolGroup -from llama_stack.apis.vector_stores import VectorStore +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.schema_utils import json_schema_type @@ -68,10 +68,10 @@ class ShieldsProtocolPrivate(Protocol): async def unregister_shield(self, identifier: str) -> None: ... -class VectorStoresProtocolPrivate(Protocol): - async def register_vector_store(self, vector_store: VectorStore) -> None: ... +class VectorDBsProtocolPrivate(Protocol): + async def register_vector_db(self, vector_db: VectorDB) -> None: ... - async def unregister_vector_store(self, vector_store_id: str) -> None: ... + async def unregister_vector_db(self, vector_db_id: str) -> None: ... class DatasetsProtocolPrivate(Protocol): diff --git a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py index 9fd3f7d0e..96f271669 100644 --- a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py +++ b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py @@ -67,7 +67,6 @@ from llama_stack.apis.safety import Safety from llama_stack.apis.tools import ToolGroups, ToolInvocationResult, ToolRuntime from llama_stack.apis.vector_io import VectorIO from llama_stack.core.datatypes import AccessRule -from llama_stack.core.telemetry import tracing from llama_stack.log import get_logger from llama_stack.models.llama.datatypes import ( BuiltinTool, @@ -79,6 +78,7 @@ from llama_stack.providers.utils.inference.openai_compat import ( convert_tooldef_to_openai_tool, ) from llama_stack.providers.utils.kvstore import KVStore +from llama_stack.providers.utils.telemetry import tracing from .persistence import AgentPersistence from .safety import SafetyException, ShieldRunnerMixin diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py index 9f5fa06a7..2360dafd9 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py @@ -131,7 +131,7 @@ class OpenAIResponsesImpl: tool_context.recover_tools_from_previous_response(previous_response) elif conversation is not None: - conversation_items = await self.conversations_api.list_items(conversation, order="asc") + conversation_items = await self.conversations_api.list(conversation, order="asc") # Use stored messages as source of truth (like previous_response.messages) stored_messages = await self.responses_store.get_conversation_messages(conversation) @@ -372,13 +372,14 @@ class OpenAIResponsesImpl: final_response = stream_chunk.response elif stream_chunk.type == "response.failed": failed_response = stream_chunk.response + yield stream_chunk if stream_chunk.type == "response.output_item.done": item = stream_chunk.item output_items.append(item) - # Store and sync before yielding terminal events - # This ensures the storage/syncing happens even if the consumer breaks after receiving the event + # Store and sync immediately after yielding terminal events + # This ensures the storage/syncing happens even if the consumer breaks early if ( stream_chunk.type in {"response.completed", "response.incomplete"} and final_response @@ -399,8 +400,6 @@ class OpenAIResponsesImpl: await self._sync_response_to_conversation(conversation, input, output_items) await self.responses_store.store_conversation_messages(conversation, messages_to_store) - yield stream_chunk - async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject: return await self.responses_store.delete_response_object(response_id) diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py index f0bafff21..e80ffcdd1 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py @@ -65,9 +65,9 @@ from llama_stack.apis.inference import ( OpenAIChoice, OpenAIMessageParam, ) -from llama_stack.core.telemetry import tracing from llama_stack.log import get_logger from llama_stack.providers.utils.inference.prompt_adapter import interleaved_content_as_str +from llama_stack.providers.utils.telemetry import tracing from .types import ChatCompletionContext, ChatCompletionResult from .utils import ( diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/tool_executor.py b/llama_stack/providers/inline/agents/meta_reference/responses/tool_executor.py index 8e0dc9ecb..659dc599e 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/tool_executor.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/tool_executor.py @@ -37,8 +37,8 @@ from llama_stack.apis.inference import ( ) from llama_stack.apis.tools import ToolGroups, ToolInvocationResult, ToolRuntime from llama_stack.apis.vector_io import VectorIO -from llama_stack.core.telemetry import tracing from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry import tracing from .types import ChatCompletionContext, ToolExecutionResult diff --git a/llama_stack/providers/inline/agents/meta_reference/safety.py b/llama_stack/providers/inline/agents/meta_reference/safety.py index 9baf5a14d..8f3ecf5c9 100644 --- a/llama_stack/providers/inline/agents/meta_reference/safety.py +++ b/llama_stack/providers/inline/agents/meta_reference/safety.py @@ -8,8 +8,8 @@ import asyncio from llama_stack.apis.inference import Message from llama_stack.apis.safety import Safety, SafetyViolation, ViolationLevel -from llama_stack.core.telemetry import tracing from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry import tracing log = get_logger(name=__name__, category="agents::meta_reference") diff --git a/llama_stack/providers/inline/safety/code_scanner/code_scanner.py b/llama_stack/providers/inline/safety/code_scanner/code_scanner.py index 7da9ea0d7..e1cd8c5e4 100644 --- a/llama_stack/providers/inline/safety/code_scanner/code_scanner.py +++ b/llama_stack/providers/inline/safety/code_scanner/code_scanner.py @@ -101,10 +101,7 @@ class MetaReferenceCodeScannerSafetyImpl(Safety): metadata=metadata, ) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: - if model is None: - raise ValueError("Code scanner moderation requires a model identifier.") - + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: inputs = input if isinstance(input, list) else [input] results = [] diff --git a/llama_stack/providers/inline/safety/llama_guard/llama_guard.py b/llama_stack/providers/inline/safety/llama_guard/llama_guard.py index 6f6346e82..47c6ccbed 100644 --- a/llama_stack/providers/inline/safety/llama_guard/llama_guard.py +++ b/llama_stack/providers/inline/safety/llama_guard/llama_guard.py @@ -200,10 +200,7 @@ class LlamaGuardSafetyImpl(Safety, ShieldsProtocolPrivate): return await impl.run(messages) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: - if model is None: - raise ValueError("Llama Guard moderation requires a model identifier.") - + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: if isinstance(input, list): messages = input.copy() else: diff --git a/llama_stack/providers/inline/safety/prompt_guard/prompt_guard.py b/llama_stack/providers/inline/safety/prompt_guard/prompt_guard.py index 2015e1150..8ca96300f 100644 --- a/llama_stack/providers/inline/safety/prompt_guard/prompt_guard.py +++ b/llama_stack/providers/inline/safety/prompt_guard/prompt_guard.py @@ -63,7 +63,7 @@ class PromptGuardSafetyImpl(Safety, ShieldsProtocolPrivate): return await self.shield.run(messages) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: raise NotImplementedError("run_moderation is not implemented for Prompt Guard") diff --git a/llama_stack/providers/inline/telemetry/__init__.py b/llama_stack/providers/inline/telemetry/__init__.py new file mode 100644 index 000000000..756f351d8 --- /dev/null +++ b/llama_stack/providers/inline/telemetry/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. diff --git a/llama_stack/providers/inline/telemetry/meta_reference/__init__.py b/llama_stack/providers/inline/telemetry/meta_reference/__init__.py new file mode 100644 index 000000000..21743b653 --- /dev/null +++ b/llama_stack/providers/inline/telemetry/meta_reference/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from typing import Any + +from llama_stack.core.datatypes import Api + +from .config import TelemetryConfig, TelemetrySink + +__all__ = ["TelemetryConfig", "TelemetrySink"] + + +async def get_provider_impl(config: TelemetryConfig, deps: dict[Api, Any]): + from .telemetry import TelemetryAdapter + + impl = TelemetryAdapter(config, deps) + await impl.initialize() + return impl diff --git a/llama_stack/providers/inline/telemetry/meta_reference/config.py b/llama_stack/providers/inline/telemetry/meta_reference/config.py new file mode 100644 index 000000000..088dd8439 --- /dev/null +++ b/llama_stack/providers/inline/telemetry/meta_reference/config.py @@ -0,0 +1,47 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from enum import StrEnum +from typing import Any + +from pydantic import BaseModel, Field, field_validator + + +class TelemetrySink(StrEnum): + OTEL_TRACE = "otel_trace" + OTEL_METRIC = "otel_metric" + CONSOLE = "console" + + +class TelemetryConfig(BaseModel): + otel_exporter_otlp_endpoint: str | None = Field( + default=None, + description="The OpenTelemetry collector endpoint URL (base URL for traces, metrics, and logs). If not set, the SDK will use OTEL_EXPORTER_OTLP_ENDPOINT environment variable.", + ) + service_name: str = Field( + # service name is always the same, use zero-width space to avoid clutter + default="\u200b", + description="The service name to use for telemetry", + ) + sinks: list[TelemetrySink] = Field( + default_factory=list, + description="List of telemetry sinks to enable (possible values: otel_trace, otel_metric, console)", + ) + + @field_validator("sinks", mode="before") + @classmethod + def validate_sinks(cls, v): + if isinstance(v, str): + return [TelemetrySink(sink.strip()) for sink in v.split(",")] + return v or [] + + @classmethod + def sample_run_config(cls, __distro_dir__: str) -> dict[str, Any]: + return { + "service_name": "${env.OTEL_SERVICE_NAME:=\u200b}", + "sinks": "${env.TELEMETRY_SINKS:=}", + "otel_exporter_otlp_endpoint": "${env.OTEL_EXPORTER_OTLP_ENDPOINT:=}", + } diff --git a/llama_stack/providers/inline/telemetry/meta_reference/console_span_processor.py b/llama_stack/providers/inline/telemetry/meta_reference/console_span_processor.py new file mode 100644 index 000000000..78e49af94 --- /dev/null +++ b/llama_stack/providers/inline/telemetry/meta_reference/console_span_processor.py @@ -0,0 +1,75 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +import json +from datetime import UTC, datetime + +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanProcessor +from opentelemetry.trace.status import StatusCode + +from llama_stack.log import get_logger + +logger = get_logger(name="console_span_processor", category="telemetry") + + +class ConsoleSpanProcessor(SpanProcessor): + def __init__(self, print_attributes: bool = False): + self.print_attributes = print_attributes + + def on_start(self, span: ReadableSpan, parent_context=None) -> None: + if span.attributes and span.attributes.get("__autotraced__"): + return + + timestamp = datetime.fromtimestamp(span.start_time / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3] + logger.info(f"[dim]{timestamp}[/dim] [bold magenta][START][/bold magenta] [dim]{span.name}[/dim]") + + def on_end(self, span: ReadableSpan) -> None: + timestamp = datetime.fromtimestamp(span.end_time / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3] + span_context = f"[dim]{timestamp}[/dim] [bold magenta][END][/bold magenta] [dim]{span.name}[/dim]" + if span.status.status_code == StatusCode.ERROR: + span_context += " [bold red][ERROR][/bold red]" + elif span.status.status_code != StatusCode.UNSET: + span_context += f" [{span.status.status_code}]" + duration_ms = (span.end_time - span.start_time) / 1e6 + span_context += f" ({duration_ms:.2f}ms)" + logger.info(span_context) + + if self.print_attributes and span.attributes: + for key, value in span.attributes.items(): + if key.startswith("__"): + continue + str_value = str(value) + if len(str_value) > 1000: + str_value = str_value[:997] + "..." + logger.info(f" [dim]{key}[/dim]: {str_value}") + + for event in span.events: + event_time = datetime.fromtimestamp(event.timestamp / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3] + severity = event.attributes.get("severity", "info") + message = event.attributes.get("message", event.name) + if isinstance(message, dict) or isinstance(message, list): + message = json.dumps(message, indent=2) + severity_color = { + "error": "red", + "warn": "yellow", + "info": "white", + "debug": "dim", + }.get(severity, "white") + logger.info(f" {event_time} [bold {severity_color}][{severity.upper()}][/bold {severity_color}] {message}") + if event.attributes: + for key, value in event.attributes.items(): + if key.startswith("__") or key in ["message", "severity"]: + continue + logger.info(f"[dim]{key}[/dim]: {value}") + + def shutdown(self) -> None: + """Shutdown the processor.""" + pass + + def force_flush(self, timeout_millis: float | None = None) -> bool: + """Force flush any pending spans.""" + return True diff --git a/llama_stack/core/telemetry/telemetry.py b/llama_stack/providers/inline/telemetry/meta_reference/telemetry.py similarity index 96% rename from llama_stack/core/telemetry/telemetry.py rename to llama_stack/providers/inline/telemetry/meta_reference/telemetry.py index f0cec08ec..2a225476b 100644 --- a/llama_stack/core/telemetry/telemetry.py +++ b/llama_stack/providers/inline/telemetry/meta_reference/telemetry.py @@ -24,13 +24,14 @@ from llama_stack.apis.telemetry import ( SpanStartPayload, SpanStatus, StructuredLogEvent, + Telemetry, UnstructuredLogEvent, ) -from llama_stack.apis.telemetry import ( - Telemetry as TelemetryBase, -) -from llama_stack.core.telemetry.tracing import ROOT_SPAN_MARKERS +from llama_stack.core.datatypes import Api from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry.tracing import ROOT_SPAN_MARKERS + +from .config import TelemetryConfig _GLOBAL_STORAGE: dict[str, dict[str | int, Any]] = { "active_spans": {}, @@ -49,8 +50,9 @@ def is_tracing_enabled(tracer): return span.is_recording() -class Telemetry(TelemetryBase): - def __init__(self) -> None: +class TelemetryAdapter(Telemetry): + def __init__(self, _config: TelemetryConfig, deps: dict[Api, Any]) -> None: + self.datasetio_api = deps.get(Api.datasetio) self.meter = None global _TRACER_PROVIDER @@ -77,10 +79,8 @@ class Telemetry(TelemetryBase): metric_reader = PeriodicExportingMetricReader(OTLPMetricExporter()) metric_provider = MeterProvider(metric_readers=[metric_reader]) metrics.set_meter_provider(metric_provider) - self.is_otel_endpoint_set = True else: logger.warning("OTEL_EXPORTER_OTLP_ENDPOINT is not set, skipping telemetry") - self.is_otel_endpoint_set = False self.meter = metrics.get_meter(__name__) self._lock = _global_lock @@ -89,8 +89,7 @@ class Telemetry(TelemetryBase): pass async def shutdown(self) -> None: - if self.is_otel_endpoint_set: - trace.get_tracer_provider().force_flush() + trace.get_tracer_provider().force_flush() async def log_event(self, event: Event, ttl_seconds: int = 604800) -> None: if isinstance(event, UnstructuredLogEvent): diff --git a/llama_stack/providers/inline/vector_io/faiss/faiss.py b/llama_stack/providers/inline/vector_io/faiss/faiss.py index 5e33d4ca3..f13eb3e96 100644 --- a/llama_stack/providers/inline/vector_io/faiss/faiss.py +++ b/llama_stack/providers/inline/vector_io/faiss/faiss.py @@ -17,21 +17,21 @@ from numpy.typing import NDArray from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference, InterleavedContent +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import HealthResponse, HealthStatus, VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import HealthResponse, HealthStatus, VectorDBsProtocolPrivate from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin -from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorStoreWithIndex +from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorDBWithIndex from .config import FaissVectorIOConfig logger = get_logger(name=__name__, category="vector_io") VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:{VERSION}::" FAISS_INDEX_PREFIX = f"faiss_index:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:{VERSION}::" @@ -176,28 +176,28 @@ class FaissIndex(EmbeddingIndex): ) -class FaissVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class FaissVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): def __init__(self, config: FaissVectorIOConfig, inference_api: Inference, files_api: Files | None) -> None: super().__init__(files_api=files_api, kvstore=None) self.config = config self.inference_api = inference_api - self.cache: dict[str, VectorStoreWithIndex] = {} + self.cache: dict[str, VectorDBWithIndex] = {} async def initialize(self) -> None: self.kvstore = await kvstore_impl(self.config.persistence) # Load existing banks from kvstore start_key = VECTOR_DBS_PREFIX end_key = f"{VECTOR_DBS_PREFIX}\xff" - stored_vector_stores = await self.kvstore.values_in_range(start_key, end_key) + stored_vector_dbs = await self.kvstore.values_in_range(start_key, end_key) - for vector_store_data in stored_vector_stores: - vector_store = VectorStore.model_validate_json(vector_store_data) - index = VectorStoreWithIndex( - vector_store, - await FaissIndex.create(vector_store.embedding_dimension, self.kvstore, vector_store.identifier), + for vector_db_data in stored_vector_dbs: + vector_db = VectorDB.model_validate_json(vector_db_data) + index = VectorDBWithIndex( + vector_db, + await FaissIndex.create(vector_db.embedding_dimension, self.kvstore, vector_db.identifier), self.inference_api, ) - self.cache[vector_store.identifier] = index + self.cache[vector_db.identifier] = index # Load existing OpenAI vector stores into the in-memory cache await self.initialize_openai_vector_stores() @@ -222,31 +222,32 @@ class FaissVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoco except Exception as e: return HealthResponse(status=HealthStatus.ERROR, message=f"Health check failed: {str(e)}") - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: assert self.kvstore is not None - key = f"{VECTOR_DBS_PREFIX}{vector_store.identifier}" - await self.kvstore.set(key=key, value=vector_store.model_dump_json()) + key = f"{VECTOR_DBS_PREFIX}{vector_db.identifier}" + await self.kvstore.set(key=key, value=vector_db.model_dump_json()) # Store in cache - self.cache[vector_store.identifier] = VectorStoreWithIndex( - vector_store=vector_store, - index=await FaissIndex.create(vector_store.embedding_dimension, self.kvstore, vector_store.identifier), + self.cache[vector_db.identifier] = VectorDBWithIndex( + vector_db=vector_db, + index=await FaissIndex.create(vector_db.embedding_dimension, self.kvstore, vector_db.identifier), inference_api=self.inference_api, ) - async def list_vector_stores(self) -> list[VectorStore]: - return [i.vector_store for i in self.cache.values()] + async def list_vector_dbs(self) -> list[VectorDB]: + return [i.vector_db for i in self.cache.values()] - async def unregister_vector_store(self, vector_store_id: str) -> None: + async def unregister_vector_db(self, vector_db_id: str) -> None: assert self.kvstore is not None - if vector_store_id not in self.cache: + if vector_db_id not in self.cache: + logger.warning(f"Vector DB {vector_db_id} not found") return - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] - await self.kvstore.delete(f"{VECTOR_DBS_PREFIX}{vector_store_id}") + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] + await self.kvstore.delete(f"{VECTOR_DBS_PREFIX}{vector_db_id}") async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: index = self.cache.get(vector_db_id) diff --git a/llama_stack/providers/inline/vector_io/sqlite_vec/sqlite_vec.py b/llama_stack/providers/inline/vector_io/sqlite_vec/sqlite_vec.py index 37294f173..cfe23bde5 100644 --- a/llama_stack/providers/inline/vector_io/sqlite_vec/sqlite_vec.py +++ b/llama_stack/providers/inline/vector_io/sqlite_vec/sqlite_vec.py @@ -17,10 +17,10 @@ from numpy.typing import NDArray from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin @@ -28,7 +28,7 @@ from llama_stack.providers.utils.memory.vector_store import ( RERANKER_TYPE_RRF, ChunkForDeletion, EmbeddingIndex, - VectorStoreWithIndex, + VectorDBWithIndex, ) from llama_stack.providers.utils.vector_io.vector_utils import WeightedInMemoryAggregator @@ -41,7 +41,7 @@ HYBRID_SEARCH = "hybrid" SEARCH_MODES = {VECTOR_SEARCH, KEYWORD_SEARCH, HYBRID_SEARCH} VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:sqlite_vec:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:sqlite_vec:{VERSION}::" VECTOR_INDEX_PREFIX = f"vector_index:sqlite_vec:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:sqlite_vec:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:sqlite_vec:{VERSION}::" @@ -374,32 +374,32 @@ class SQLiteVecIndex(EmbeddingIndex): await asyncio.to_thread(_delete_chunks) -class SQLiteVecVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class SQLiteVecVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): """ A VectorIO implementation using SQLite + sqlite_vec. - This class handles vector database registration (with metadata stored in a table named `vector_stores`) - and creates a cache of VectorStoreWithIndex instances (each wrapping a SQLiteVecIndex). + This class handles vector database registration (with metadata stored in a table named `vector_dbs`) + and creates a cache of VectorDBWithIndex instances (each wrapping a SQLiteVecIndex). """ def __init__(self, config, inference_api: Inference, files_api: Files | None) -> None: super().__init__(files_api=files_api, kvstore=None) self.config = config self.inference_api = inference_api - self.cache: dict[str, VectorStoreWithIndex] = {} - self.vector_store_table = None + self.cache: dict[str, VectorDBWithIndex] = {} + self.vector_db_store = None async def initialize(self) -> None: self.kvstore = await kvstore_impl(self.config.persistence) start_key = VECTOR_DBS_PREFIX end_key = f"{VECTOR_DBS_PREFIX}\xff" - stored_vector_stores = await self.kvstore.values_in_range(start_key, end_key) - for db_json in stored_vector_stores: - vector_store = VectorStore.model_validate_json(db_json) + stored_vector_dbs = await self.kvstore.values_in_range(start_key, end_key) + for db_json in stored_vector_dbs: + vector_db = VectorDB.model_validate_json(db_json) index = await SQLiteVecIndex.create( - vector_store.embedding_dimension, self.config.db_path, vector_store.identifier + vector_db.embedding_dimension, self.config.db_path, vector_db.identifier ) - self.cache[vector_store.identifier] = VectorStoreWithIndex(vector_store, index, self.inference_api) + self.cache[vector_db.identifier] = VectorDBWithIndex(vector_db, index, self.inference_api) # Load existing OpenAI vector stores into the in-memory cache await self.initialize_openai_vector_stores() @@ -408,64 +408,63 @@ class SQLiteVecVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresPro # Clean up mixin resources (file batch tasks) await super().shutdown() - async def list_vector_stores(self) -> list[VectorStore]: - return [v.vector_store for v in self.cache.values()] + async def list_vector_dbs(self) -> list[VectorDB]: + return [v.vector_db for v in self.cache.values()] - async def register_vector_store(self, vector_store: VectorStore) -> None: - index = await SQLiteVecIndex.create( - vector_store.embedding_dimension, self.config.db_path, vector_store.identifier - ) - self.cache[vector_store.identifier] = VectorStoreWithIndex(vector_store, index, self.inference_api) + async def register_vector_db(self, vector_db: VectorDB) -> None: + index = await SQLiteVecIndex.create(vector_db.embedding_dimension, self.config.db_path, vector_db.identifier) + self.cache[vector_db.identifier] = VectorDBWithIndex(vector_db, index, self.inference_api) - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex | None: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex | None: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - if self.vector_store_table is None: - raise VectorStoreNotFoundError(vector_store_id) + if self.vector_db_store is None: + raise VectorStoreNotFoundError(vector_db_id) - vector_store = self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise VectorStoreNotFoundError(vector_store_id) + vector_db = self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise VectorStoreNotFoundError(vector_db_id) - index = VectorStoreWithIndex( - vector_store=vector_store, + index = VectorDBWithIndex( + vector_db=vector_db, index=SQLiteVecIndex( - dimension=vector_store.embedding_dimension, + dimension=vector_db.embedding_dimension, db_path=self.config.db_path, - bank_id=vector_store.identifier, + bank_id=vector_db.identifier, kvstore=self.kvstore, ), inference_api=self.inference_api, ) - self.cache[vector_store_id] = index + self.cache[vector_db_id] = index return index - async def unregister_vector_store(self, vector_store_id: str) -> None: - if vector_store_id not in self.cache: + async def unregister_vector_db(self, vector_db_id: str) -> None: + if vector_db_id not in self.cache: + logger.warning(f"Vector DB {vector_db_id} not found") return - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) - # The VectorStoreWithIndex helper is expected to compute embeddings via the inference_api + # The VectorDBWithIndex helper is expected to compute embeddings via the inference_api # and then call our index's add_chunks. await index.insert_chunks(chunks) async def query_chunks( self, vector_db_id: str, query: Any, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) return await index.query_chunks(query, params) async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: """Delete chunks from a sqlite_vec index.""" - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise VectorStoreNotFoundError(store_id) diff --git a/llama_stack/providers/remote/datasetio/nvidia/README.md b/llama_stack/providers/remote/datasetio/nvidia/README.md index da57d5550..74e0895f4 100644 --- a/llama_stack/providers/remote/datasetio/nvidia/README.md +++ b/llama_stack/providers/remote/datasetio/nvidia/README.md @@ -20,7 +20,7 @@ This provider enables dataset management using NVIDIA's NeMo Customizer service. Build the NVIDIA environment: ```bash -uv run llama stack list-deps nvidia | xargs -L1 uv pip install +llama stack build --distro nvidia --image-type venv ``` ### Basic Usage using the LlamaStack Python Client diff --git a/llama_stack/providers/remote/inference/nvidia/NVIDIA.md b/llama_stack/providers/remote/inference/nvidia/NVIDIA.md index f1a828413..692b9125b 100644 --- a/llama_stack/providers/remote/inference/nvidia/NVIDIA.md +++ b/llama_stack/providers/remote/inference/nvidia/NVIDIA.md @@ -18,7 +18,7 @@ This provider enables running inference using NVIDIA NIM. Build the NVIDIA environment: ```bash -uv run llama stack list-deps nvidia | xargs -L1 uv pip install +llama stack build --distro nvidia --image-type venv ``` ### Basic Usage using the LlamaStack Python Client diff --git a/llama_stack/providers/remote/inference/nvidia/__init__.py b/llama_stack/providers/remote/inference/nvidia/__init__.py index b4926f33e..1869cb748 100644 --- a/llama_stack/providers/remote/inference/nvidia/__init__.py +++ b/llama_stack/providers/remote/inference/nvidia/__init__.py @@ -10,7 +10,7 @@ from .config import NVIDIAConfig async def get_adapter_impl(config: NVIDIAConfig, _deps) -> Inference: - # import dynamically so `llama stack list-deps` does not fail due to missing dependencies + # import dynamically so `llama stack build` does not fail due to missing dependencies from .nvidia import NVIDIAInferenceAdapter if not isinstance(config, NVIDIAConfig): diff --git a/llama_stack/providers/remote/inference/watsonx/watsonx.py b/llama_stack/providers/remote/inference/watsonx/watsonx.py index b31f1f5e8..2c051719b 100644 --- a/llama_stack/providers/remote/inference/watsonx/watsonx.py +++ b/llama_stack/providers/remote/inference/watsonx/watsonx.py @@ -22,11 +22,11 @@ from llama_stack.apis.inference.inference import ( ) from llama_stack.apis.models import Model from llama_stack.apis.models.models import ModelType -from llama_stack.core.telemetry.tracing import get_current_span from llama_stack.log import get_logger from llama_stack.providers.remote.inference.watsonx.config import WatsonXConfig from llama_stack.providers.utils.inference.litellm_openai_mixin import LiteLLMOpenAIMixin from llama_stack.providers.utils.inference.openai_compat import prepare_openai_completion_params +from llama_stack.providers.utils.telemetry.tracing import get_current_span logger = get_logger(name=__name__, category="providers::remote::watsonx") diff --git a/llama_stack/providers/remote/post_training/nvidia/README.md b/llama_stack/providers/remote/post_training/nvidia/README.md index 789514b1e..9b088a615 100644 --- a/llama_stack/providers/remote/post_training/nvidia/README.md +++ b/llama_stack/providers/remote/post_training/nvidia/README.md @@ -22,7 +22,7 @@ This provider enables fine-tuning of LLMs using NVIDIA's NeMo Customizer service Build the NVIDIA environment: ```bash -uv run llama stack list-deps nvidia | xargs -L1 uv pip install +llama stack build --distro nvidia --image-type venv ``` ### Basic Usage using the LlamaStack Python Client diff --git a/llama_stack/providers/remote/safety/nvidia/README.md b/llama_stack/providers/remote/safety/nvidia/README.md index e589afe84..784ab464f 100644 --- a/llama_stack/providers/remote/safety/nvidia/README.md +++ b/llama_stack/providers/remote/safety/nvidia/README.md @@ -19,7 +19,7 @@ This provider enables safety checks and guardrails for LLM interactions using NV Build the NVIDIA environment: ```bash -uv run llama stack list-deps nvidia | xargs -L1 uv pip install +llama stack build --distro nvidia --image-type venv ``` ### Basic Usage using the LlamaStack Python Client diff --git a/llama_stack/providers/remote/safety/nvidia/nvidia.py b/llama_stack/providers/remote/safety/nvidia/nvidia.py index 236f16207..c0df8f095 100644 --- a/llama_stack/providers/remote/safety/nvidia/nvidia.py +++ b/llama_stack/providers/remote/safety/nvidia/nvidia.py @@ -66,7 +66,7 @@ class NVIDIASafetyAdapter(Safety, ShieldsProtocolPrivate): self.shield = NeMoGuardrails(self.config, shield.shield_id) return await self.shield.run(messages) - async def run_moderation(self, input: str | list[str], model: str | None = None) -> ModerationObject: + async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: raise NotImplementedError("NVIDIA safety provider currently does not implement run_moderation") diff --git a/llama_stack/providers/remote/vector_io/chroma/chroma.py b/llama_stack/providers/remote/vector_io/chroma/chroma.py index 2663ad43e..0aa728c32 100644 --- a/llama_stack/providers/remote/vector_io/chroma/chroma.py +++ b/llama_stack/providers/remote/vector_io/chroma/chroma.py @@ -13,15 +13,15 @@ from numpy.typing import NDArray from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference, InterleavedContent +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.inline.vector_io.chroma import ChromaVectorIOConfig as InlineChromaVectorIOConfig from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin -from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorStoreWithIndex +from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorDBWithIndex from .config import ChromaVectorIOConfig as RemoteChromaVectorIOConfig @@ -30,7 +30,7 @@ log = get_logger(name=__name__, category="vector_io::chroma") ChromaClientType = chromadb.api.AsyncClientAPI | chromadb.api.ClientAPI VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:chroma:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:chroma:{VERSION}::" VECTOR_INDEX_PREFIX = f"vector_index:chroma:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:chroma:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:chroma:{VERSION}::" @@ -114,7 +114,7 @@ class ChromaIndex(EmbeddingIndex): raise NotImplementedError("Hybrid search is not supported in Chroma") -class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): def __init__( self, config: RemoteChromaVectorIOConfig | InlineChromaVectorIOConfig, @@ -127,11 +127,11 @@ class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc self.inference_api = inference_api self.client = None self.cache = {} - self.vector_store_table = None + self.vector_db_store = None async def initialize(self) -> None: self.kvstore = await kvstore_impl(self.config.persistence) - self.vector_store_table = self.kvstore + self.vector_db_store = self.kvstore if isinstance(self.config, RemoteChromaVectorIOConfig): log.info(f"Connecting to Chroma server at: {self.config.url}") @@ -151,26 +151,26 @@ class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc # Clean up mixin resources (file batch tasks) await super().shutdown() - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: collection = await maybe_await( self.client.get_or_create_collection( - name=vector_store.identifier, metadata={"vector_store": vector_store.model_dump_json()} + name=vector_db.identifier, metadata={"vector_db": vector_db.model_dump_json()} ) ) - self.cache[vector_store.identifier] = VectorStoreWithIndex( - vector_store, ChromaIndex(self.client, collection), self.inference_api + self.cache[vector_db.identifier] = VectorDBWithIndex( + vector_db, ChromaIndex(self.client, collection), self.inference_api ) - async def unregister_vector_store(self, vector_store_id: str) -> None: - if vector_store_id not in self.cache: - log.warning(f"Vector DB {vector_store_id} not found") + async def unregister_vector_db(self, vector_db_id: str) -> None: + if vector_db_id not in self.cache: + log.warning(f"Vector DB {vector_db_id} not found") return - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if index is None: raise ValueError(f"Vector DB {vector_db_id} not found in Chroma") @@ -179,30 +179,30 @@ class ChromaVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc async def query_chunks( self, vector_db_id: str, query: InterleavedContent, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if index is None: raise ValueError(f"Vector DB {vector_db_id} not found in Chroma") return await index.query_chunks(query, params) - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - vector_store = await self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise ValueError(f"Vector DB {vector_store_id} not found in Llama Stack") - collection = await maybe_await(self.client.get_collection(vector_store_id)) + vector_db = await self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise ValueError(f"Vector DB {vector_db_id} not found in Llama Stack") + collection = await maybe_await(self.client.get_collection(vector_db_id)) if not collection: - raise ValueError(f"Vector DB {vector_store_id} not found in Chroma") - index = VectorStoreWithIndex(vector_store, ChromaIndex(self.client, collection), self.inference_api) - self.cache[vector_store_id] = index + raise ValueError(f"Vector DB {vector_db_id} not found in Chroma") + index = VectorDBWithIndex(vector_db, ChromaIndex(self.client, collection), self.inference_api) + self.cache[vector_db_id] = index return index async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: """Delete chunks from a Chroma vector store.""" - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise ValueError(f"Vector DB {store_id} not found") diff --git a/llama_stack/providers/remote/vector_io/milvus/milvus.py b/llama_stack/providers/remote/vector_io/milvus/milvus.py index cccf13816..d7c34163d 100644 --- a/llama_stack/providers/remote/vector_io/milvus/milvus.py +++ b/llama_stack/providers/remote/vector_io/milvus/milvus.py @@ -14,10 +14,10 @@ from pymilvus import AnnSearchRequest, DataType, Function, FunctionType, MilvusC from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference, InterleavedContent +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.inline.vector_io.milvus import MilvusVectorIOConfig as InlineMilvusVectorIOConfig from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore @@ -26,7 +26,7 @@ from llama_stack.providers.utils.memory.vector_store import ( RERANKER_TYPE_WEIGHTED, ChunkForDeletion, EmbeddingIndex, - VectorStoreWithIndex, + VectorDBWithIndex, ) from llama_stack.providers.utils.vector_io.vector_utils import sanitize_collection_name @@ -35,7 +35,7 @@ from .config import MilvusVectorIOConfig as RemoteMilvusVectorIOConfig logger = get_logger(name=__name__, category="vector_io::milvus") VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:milvus:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:milvus:{VERSION}::" VECTOR_INDEX_PREFIX = f"vector_index:milvus:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:milvus:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:milvus:{VERSION}::" @@ -261,7 +261,7 @@ class MilvusIndex(EmbeddingIndex): raise -class MilvusVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class MilvusVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): def __init__( self, config: RemoteMilvusVectorIOConfig | InlineMilvusVectorIOConfig, @@ -273,28 +273,28 @@ class MilvusVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc self.cache = {} self.client = None self.inference_api = inference_api - self.vector_store_table = None + self.vector_db_store = None self.metadata_collection_name = "openai_vector_stores_metadata" async def initialize(self) -> None: self.kvstore = await kvstore_impl(self.config.persistence) start_key = VECTOR_DBS_PREFIX end_key = f"{VECTOR_DBS_PREFIX}\xff" - stored_vector_stores = await self.kvstore.values_in_range(start_key, end_key) + stored_vector_dbs = await self.kvstore.values_in_range(start_key, end_key) - for vector_store_data in stored_vector_stores: - vector_store = VectorStore.model_validate_json(vector_store_data) - index = VectorStoreWithIndex( - vector_store, + for vector_db_data in stored_vector_dbs: + vector_db = VectorDB.model_validate_json(vector_db_data) + index = VectorDBWithIndex( + vector_db, index=MilvusIndex( client=self.client, - collection_name=vector_store.identifier, + collection_name=vector_db.identifier, consistency_level=self.config.consistency_level, kvstore=self.kvstore, ), inference_api=self.inference_api, ) - self.cache[vector_store.identifier] = index + self.cache[vector_db.identifier] = index if isinstance(self.config, RemoteMilvusVectorIOConfig): logger.info(f"Connecting to Milvus server at {self.config.uri}") self.client = MilvusClient(**self.config.model_dump(exclude_none=True)) @@ -311,45 +311,45 @@ class MilvusVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc # Clean up mixin resources (file batch tasks) await super().shutdown() - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: if isinstance(self.config, RemoteMilvusVectorIOConfig): consistency_level = self.config.consistency_level else: consistency_level = "Strong" - index = VectorStoreWithIndex( - vector_store=vector_store, - index=MilvusIndex(self.client, vector_store.identifier, consistency_level=consistency_level), + index = VectorDBWithIndex( + vector_db=vector_db, + index=MilvusIndex(self.client, vector_db.identifier, consistency_level=consistency_level), inference_api=self.inference_api, ) - self.cache[vector_store.identifier] = index + self.cache[vector_db.identifier] = index - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex | None: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex | None: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - if self.vector_store_table is None: - raise VectorStoreNotFoundError(vector_store_id) + if self.vector_db_store is None: + raise VectorStoreNotFoundError(vector_db_id) - vector_store = await self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise VectorStoreNotFoundError(vector_store_id) + vector_db = await self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise VectorStoreNotFoundError(vector_db_id) - index = VectorStoreWithIndex( - vector_store=vector_store, - index=MilvusIndex(client=self.client, collection_name=vector_store.identifier, kvstore=self.kvstore), + index = VectorDBWithIndex( + vector_db=vector_db, + index=MilvusIndex(client=self.client, collection_name=vector_db.identifier, kvstore=self.kvstore), inference_api=self.inference_api, ) - self.cache[vector_store_id] = index + self.cache[vector_db_id] = index return index - async def unregister_vector_store(self, vector_store_id: str) -> None: - if vector_store_id in self.cache: - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + async def unregister_vector_db(self, vector_db_id: str) -> None: + if vector_db_id in self.cache: + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) @@ -358,14 +358,14 @@ class MilvusVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc async def query_chunks( self, vector_db_id: str, query: InterleavedContent, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) return await index.query_chunks(query, params) async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: """Delete a chunk from a milvus vector store.""" - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise VectorStoreNotFoundError(store_id) diff --git a/llama_stack/providers/remote/vector_io/pgvector/pgvector.py b/llama_stack/providers/remote/vector_io/pgvector/pgvector.py index f28bd3cd9..703a47843 100644 --- a/llama_stack/providers/remote/vector_io/pgvector/pgvector.py +++ b/llama_stack/providers/remote/vector_io/pgvector/pgvector.py @@ -16,15 +16,15 @@ from pydantic import BaseModel, TypeAdapter from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference, InterleavedContent +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.utils.inference.prompt_adapter import interleaved_content_as_str from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin -from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorStoreWithIndex +from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorDBWithIndex from llama_stack.providers.utils.vector_io.vector_utils import WeightedInMemoryAggregator, sanitize_collection_name from .config import PGVectorVectorIOConfig @@ -32,7 +32,7 @@ from .config import PGVectorVectorIOConfig log = get_logger(name=__name__, category="vector_io::pgvector") VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:pgvector:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:pgvector:{VERSION}::" VECTOR_INDEX_PREFIX = f"vector_index:pgvector:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:pgvector:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:pgvector:{VERSION}::" @@ -79,13 +79,13 @@ class PGVectorIndex(EmbeddingIndex): def __init__( self, - vector_store: VectorStore, + vector_db: VectorDB, dimension: int, conn: psycopg2.extensions.connection, kvstore: KVStore | None = None, distance_metric: str = "COSINE", ): - self.vector_store = vector_store + self.vector_db = vector_db self.dimension = dimension self.conn = conn self.kvstore = kvstore @@ -97,9 +97,9 @@ class PGVectorIndex(EmbeddingIndex): try: with self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur: # Sanitize the table name by replacing hyphens with underscores - # SQL doesn't allow hyphens in table names, and vector_store.identifier may contain hyphens + # SQL doesn't allow hyphens in table names, and vector_db.identifier may contain hyphens # when created with patterns like "test-vector-db-{uuid4()}" - sanitized_identifier = sanitize_collection_name(self.vector_store.identifier) + sanitized_identifier = sanitize_collection_name(self.vector_db.identifier) self.table_name = f"vs_{sanitized_identifier}" cur.execute( @@ -122,8 +122,8 @@ class PGVectorIndex(EmbeddingIndex): """ ) except Exception as e: - log.exception(f"Error creating PGVectorIndex for vector_store: {self.vector_store.identifier}") - raise RuntimeError(f"Error creating PGVectorIndex for vector_store: {self.vector_store.identifier}") from e + log.exception(f"Error creating PGVectorIndex for vector_db: {self.vector_db.identifier}") + raise RuntimeError(f"Error creating PGVectorIndex for vector_db: {self.vector_db.identifier}") from e async def add_chunks(self, chunks: list[Chunk], embeddings: NDArray): assert len(chunks) == len(embeddings), ( @@ -323,7 +323,7 @@ class PGVectorIndex(EmbeddingIndex): ) -class PGVectorVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class PGVectorVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): def __init__( self, config: PGVectorVectorIOConfig, inference_api: Inference, files_api: Files | None = None ) -> None: @@ -332,7 +332,7 @@ class PGVectorVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProt self.inference_api = inference_api self.conn = None self.cache = {} - self.vector_store_table = None + self.vector_db_store = None self.metadata_collection_name = "openai_vector_stores_metadata" async def initialize(self) -> None: @@ -375,59 +375,59 @@ class PGVectorVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProt # Clean up mixin resources (file batch tasks) await super().shutdown() - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: # Persist vector DB metadata in the KV store assert self.kvstore is not None # Upsert model metadata in Postgres - upsert_models(self.conn, [(vector_store.identifier, vector_store)]) + upsert_models(self.conn, [(vector_db.identifier, vector_db)]) # Create and cache the PGVector index table for the vector DB pgvector_index = PGVectorIndex( - vector_store=vector_store, dimension=vector_store.embedding_dimension, conn=self.conn, kvstore=self.kvstore + vector_db=vector_db, dimension=vector_db.embedding_dimension, conn=self.conn, kvstore=self.kvstore ) await pgvector_index.initialize() - index = VectorStoreWithIndex(vector_store, index=pgvector_index, inference_api=self.inference_api) - self.cache[vector_store.identifier] = index + index = VectorDBWithIndex(vector_db, index=pgvector_index, inference_api=self.inference_api) + self.cache[vector_db.identifier] = index - async def unregister_vector_store(self, vector_store_id: str) -> None: + async def unregister_vector_db(self, vector_db_id: str) -> None: # Remove provider index and cache - if vector_store_id in self.cache: - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + if vector_db_id in self.cache: + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] # Delete vector DB metadata from KV store assert self.kvstore is not None - await self.kvstore.delete(key=f"{VECTOR_DBS_PREFIX}{vector_store_id}") + await self.kvstore.delete(key=f"{VECTOR_DBS_PREFIX}{vector_db_id}") async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) await index.insert_chunks(chunks) async def query_chunks( self, vector_db_id: str, query: InterleavedContent, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) return await index.query_chunks(query, params) - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - if self.vector_store_table is None: - raise VectorStoreNotFoundError(vector_store_id) + if self.vector_db_store is None: + raise VectorStoreNotFoundError(vector_db_id) - vector_store = await self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise VectorStoreNotFoundError(vector_store_id) + vector_db = await self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise VectorStoreNotFoundError(vector_db_id) - index = PGVectorIndex(vector_store, vector_store.embedding_dimension, self.conn) + index = PGVectorIndex(vector_db, vector_db.embedding_dimension, self.conn) await index.initialize() - self.cache[vector_store_id] = VectorStoreWithIndex(vector_store, index, self.inference_api) - return self.cache[vector_store_id] + self.cache[vector_db_id] = VectorDBWithIndex(vector_db, index, self.inference_api) + return self.cache[vector_db_id] async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: """Delete a chunk from a PostgreSQL vector store.""" - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise VectorStoreNotFoundError(store_id) diff --git a/llama_stack/providers/remote/vector_io/qdrant/qdrant.py b/llama_stack/providers/remote/vector_io/qdrant/qdrant.py index 93d0894a6..6838d69e9 100644 --- a/llama_stack/providers/remote/vector_io/qdrant/qdrant.py +++ b/llama_stack/providers/remote/vector_io/qdrant/qdrant.py @@ -16,6 +16,7 @@ from qdrant_client.models import PointStruct from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference, InterleavedContent +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import ( Chunk, QueryChunksResponse, @@ -23,13 +24,12 @@ from llama_stack.apis.vector_io import ( VectorStoreChunkingStrategy, VectorStoreFileObject, ) -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.inline.vector_io.qdrant import QdrantVectorIOConfig as InlineQdrantVectorIOConfig from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin -from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorStoreWithIndex +from llama_stack.providers.utils.memory.vector_store import ChunkForDeletion, EmbeddingIndex, VectorDBWithIndex from .config import QdrantVectorIOConfig as RemoteQdrantVectorIOConfig @@ -38,7 +38,7 @@ CHUNK_ID_KEY = "_chunk_id" # KV store prefixes for vector databases VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:qdrant:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:qdrant:{VERSION}::" def convert_id(_id: str) -> str: @@ -145,7 +145,7 @@ class QdrantIndex(EmbeddingIndex): await self.client.delete_collection(collection_name=self.collection_name) -class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtocolPrivate): +class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolPrivate): def __init__( self, config: RemoteQdrantVectorIOConfig | InlineQdrantVectorIOConfig, @@ -157,7 +157,7 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc self.client: AsyncQdrantClient = None self.cache = {} self.inference_api = inference_api - self.vector_store_table = None + self.vector_db_store = None self._qdrant_lock = asyncio.Lock() async def initialize(self) -> None: @@ -167,14 +167,12 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc start_key = VECTOR_DBS_PREFIX end_key = f"{VECTOR_DBS_PREFIX}\xff" - stored_vector_stores = await self.kvstore.values_in_range(start_key, end_key) + stored_vector_dbs = await self.kvstore.values_in_range(start_key, end_key) - for vector_store_data in stored_vector_stores: - vector_store = VectorStore.model_validate_json(vector_store_data) - index = VectorStoreWithIndex( - vector_store, QdrantIndex(self.client, vector_store.identifier), self.inference_api - ) - self.cache[vector_store.identifier] = index + for vector_db_data in stored_vector_dbs: + vector_db = VectorDB.model_validate_json(vector_db_data) + index = VectorDBWithIndex(vector_db, QdrantIndex(self.client, vector_db.identifier), self.inference_api) + self.cache[vector_db.identifier] = index self.openai_vector_stores = await self._load_openai_vector_stores() async def shutdown(self) -> None: @@ -182,48 +180,46 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc # Clean up mixin resources (file batch tasks) await super().shutdown() - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: assert self.kvstore is not None - key = f"{VECTOR_DBS_PREFIX}{vector_store.identifier}" - await self.kvstore.set(key=key, value=vector_store.model_dump_json()) + key = f"{VECTOR_DBS_PREFIX}{vector_db.identifier}" + await self.kvstore.set(key=key, value=vector_db.model_dump_json()) - index = VectorStoreWithIndex( - vector_store=vector_store, - index=QdrantIndex(self.client, vector_store.identifier), - inference_api=self.inference_api, + index = VectorDBWithIndex( + vector_db=vector_db, index=QdrantIndex(self.client, vector_db.identifier), inference_api=self.inference_api ) - self.cache[vector_store.identifier] = index + self.cache[vector_db.identifier] = index - async def unregister_vector_store(self, vector_store_id: str) -> None: - if vector_store_id in self.cache: - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + async def unregister_vector_db(self, vector_db_id: str) -> None: + if vector_db_id in self.cache: + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] assert self.kvstore is not None - await self.kvstore.delete(f"{VECTOR_DBS_PREFIX}{vector_store_id}") + await self.kvstore.delete(f"{VECTOR_DBS_PREFIX}{vector_db_id}") - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex | None: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex | None: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - if self.vector_store_table is None: - raise ValueError(f"Vector DB not found {vector_store_id}") + if self.vector_db_store is None: + raise ValueError(f"Vector DB not found {vector_db_id}") - vector_store = await self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise VectorStoreNotFoundError(vector_store_id) + vector_db = await self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise VectorStoreNotFoundError(vector_db_id) - index = VectorStoreWithIndex( - vector_store=vector_store, - index=QdrantIndex(client=self.client, collection_name=vector_store.identifier), + index = VectorDBWithIndex( + vector_db=vector_db, + index=QdrantIndex(client=self.client, collection_name=vector_db.identifier), inference_api=self.inference_api, ) - self.cache[vector_store_id] = index + self.cache[vector_db_id] = index return index async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) @@ -232,7 +228,7 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc async def query_chunks( self, vector_db_id: str, query: InterleavedContent, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) @@ -253,7 +249,7 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorStoresProtoc async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: """Delete chunks from a Qdrant vector store.""" - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise ValueError(f"Vector DB {store_id} not found") diff --git a/llama_stack/providers/remote/vector_io/weaviate/weaviate.py b/llama_stack/providers/remote/vector_io/weaviate/weaviate.py index 66922aa3f..8e7eb7267 100644 --- a/llama_stack/providers/remote/vector_io/weaviate/weaviate.py +++ b/llama_stack/providers/remote/vector_io/weaviate/weaviate.py @@ -16,11 +16,11 @@ from llama_stack.apis.common.content_types import InterleavedContent from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files from llama_stack.apis.inference import Inference +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO -from llama_stack.apis.vector_stores import VectorStore from llama_stack.core.request_headers import NeedsRequestProviderData from llama_stack.log import get_logger -from llama_stack.providers.datatypes import VectorStoresProtocolPrivate +from llama_stack.providers.datatypes import VectorDBsProtocolPrivate from llama_stack.providers.utils.kvstore import kvstore_impl from llama_stack.providers.utils.kvstore.api import KVStore from llama_stack.providers.utils.memory.openai_vector_store_mixin import OpenAIVectorStoreMixin @@ -28,7 +28,7 @@ from llama_stack.providers.utils.memory.vector_store import ( RERANKER_TYPE_RRF, ChunkForDeletion, EmbeddingIndex, - VectorStoreWithIndex, + VectorDBWithIndex, ) from llama_stack.providers.utils.vector_io.vector_utils import sanitize_collection_name @@ -37,7 +37,7 @@ from .config import WeaviateVectorIOConfig log = get_logger(name=__name__, category="vector_io::weaviate") VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:weaviate:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:weaviate:{VERSION}::" VECTOR_INDEX_PREFIX = f"vector_index:weaviate:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:weaviate:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:weaviate:{VERSION}::" @@ -257,14 +257,14 @@ class WeaviateIndex(EmbeddingIndex): return QueryChunksResponse(chunks=chunks, scores=scores) -class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProviderData, VectorStoresProtocolPrivate): +class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProviderData, VectorDBsProtocolPrivate): def __init__(self, config: WeaviateVectorIOConfig, inference_api: Inference, files_api: Files | None) -> None: super().__init__(files_api=files_api, kvstore=None) self.config = config self.inference_api = inference_api self.client_cache = {} self.cache = {} - self.vector_store_table = None + self.vector_db_store = None self.metadata_collection_name = "openai_vector_stores_metadata" def _get_client(self) -> weaviate.WeaviateClient: @@ -300,11 +300,11 @@ class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProv end_key = f"{VECTOR_DBS_PREFIX}\xff" stored = await self.kvstore.values_in_range(start_key, end_key) for raw in stored: - vector_store = VectorStore.model_validate_json(raw) + vector_db = VectorDB.model_validate_json(raw) client = self._get_client() - idx = WeaviateIndex(client=client, collection_name=vector_store.identifier, kvstore=self.kvstore) - self.cache[vector_store.identifier] = VectorStoreWithIndex( - vector_store=vector_store, index=idx, inference_api=self.inference_api + idx = WeaviateIndex(client=client, collection_name=vector_db.identifier, kvstore=self.kvstore) + self.cache[vector_db.identifier] = VectorDBWithIndex( + vector_db=vector_db, index=idx, inference_api=self.inference_api ) # Load OpenAI vector stores metadata into cache @@ -316,9 +316,9 @@ class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProv # Clean up mixin resources (file batch tasks) await super().shutdown() - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: client = self._get_client() - sanitized_collection_name = sanitize_collection_name(vector_store.identifier, weaviate_format=True) + sanitized_collection_name = sanitize_collection_name(vector_db.identifier, weaviate_format=True) # Create collection if it doesn't exist if not client.collections.exists(sanitized_collection_name): client.collections.create( @@ -329,45 +329,45 @@ class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProv ], ) - self.cache[vector_store.identifier] = VectorStoreWithIndex( - vector_store, WeaviateIndex(client=client, collection_name=sanitized_collection_name), self.inference_api + self.cache[vector_db.identifier] = VectorDBWithIndex( + vector_db, WeaviateIndex(client=client, collection_name=sanitized_collection_name), self.inference_api ) - async def unregister_vector_store(self, vector_store_id: str) -> None: + async def unregister_vector_db(self, vector_db_id: str) -> None: client = self._get_client() - sanitized_collection_name = sanitize_collection_name(vector_store_id, weaviate_format=True) - if vector_store_id not in self.cache or client.collections.exists(sanitized_collection_name) is False: + sanitized_collection_name = sanitize_collection_name(vector_db_id, weaviate_format=True) + if vector_db_id not in self.cache or client.collections.exists(sanitized_collection_name) is False: return client.collections.delete(sanitized_collection_name) - await self.cache[vector_store_id].index.delete() - del self.cache[vector_store_id] + await self.cache[vector_db_id].index.delete() + del self.cache[vector_db_id] - async def _get_and_cache_vector_store_index(self, vector_store_id: str) -> VectorStoreWithIndex | None: - if vector_store_id in self.cache: - return self.cache[vector_store_id] + async def _get_and_cache_vector_db_index(self, vector_db_id: str) -> VectorDBWithIndex | None: + if vector_db_id in self.cache: + return self.cache[vector_db_id] - if self.vector_store_table is None: - raise VectorStoreNotFoundError(vector_store_id) + if self.vector_db_store is None: + raise VectorStoreNotFoundError(vector_db_id) - vector_store = await self.vector_store_table.get_vector_store(vector_store_id) - if not vector_store: - raise VectorStoreNotFoundError(vector_store_id) + vector_db = await self.vector_db_store.get_vector_db(vector_db_id) + if not vector_db: + raise VectorStoreNotFoundError(vector_db_id) client = self._get_client() - sanitized_collection_name = sanitize_collection_name(vector_store.identifier, weaviate_format=True) + sanitized_collection_name = sanitize_collection_name(vector_db.identifier, weaviate_format=True) if not client.collections.exists(sanitized_collection_name): raise ValueError(f"Collection with name `{sanitized_collection_name}` not found") - index = VectorStoreWithIndex( - vector_store=vector_store, - index=WeaviateIndex(client=client, collection_name=vector_store.identifier), + index = VectorDBWithIndex( + vector_db=vector_db, + index=WeaviateIndex(client=client, collection_name=vector_db.identifier), inference_api=self.inference_api, ) - self.cache[vector_store_id] = index + self.cache[vector_db_id] = index return index async def insert_chunks(self, vector_db_id: str, chunks: list[Chunk], ttl_seconds: int | None = None) -> None: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) @@ -376,14 +376,14 @@ class WeaviateVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, NeedsRequestProv async def query_chunks( self, vector_db_id: str, query: InterleavedContent, params: dict[str, Any] | None = None ) -> QueryChunksResponse: - index = await self._get_and_cache_vector_store_index(vector_db_id) + index = await self._get_and_cache_vector_db_index(vector_db_id) if not index: raise VectorStoreNotFoundError(vector_db_id) return await index.query_chunks(query, params) async def delete_chunks(self, store_id: str, chunks_for_deletion: list[ChunkForDeletion]) -> None: - index = await self._get_and_cache_vector_store_index(store_id) + index = await self._get_and_cache_vector_db_index(store_id) if not index: raise ValueError(f"Vector DB {store_id} not found") diff --git a/llama_stack/providers/utils/inference/embedding_mixin.py b/llama_stack/providers/utils/inference/embedding_mixin.py index c959b9c19..67ce8b532 100644 --- a/llama_stack/providers/utils/inference/embedding_mixin.py +++ b/llama_stack/providers/utils/inference/embedding_mixin.py @@ -6,12 +6,9 @@ import asyncio import base64 -import platform import struct from typing import TYPE_CHECKING -import torch - from llama_stack.log import get_logger if TYPE_CHECKING: @@ -27,8 +24,6 @@ from llama_stack.apis.inference import ( EMBEDDING_MODELS = {} -DARWIN = "Darwin" - log = get_logger(name=__name__, category="providers::utils") @@ -88,13 +83,6 @@ class SentenceTransformerEmbeddingMixin: def _load_model(): from sentence_transformers import SentenceTransformer - platform_name = platform.system() - if platform_name == DARWIN: - # PyTorch's OpenMP kernels can segfault on macOS when spawned from background - # threads with the default parallel settings, so force a single-threaded CPU run. - log.debug(f"Constraining torch threads on {platform_name} to a single worker") - torch.set_num_threads(1) - return SentenceTransformer(model, trust_remote_code=True) loaded_model = await asyncio.to_thread(_load_model) diff --git a/llama_stack/providers/utils/inference/litellm_openai_mixin.py b/llama_stack/providers/utils/inference/litellm_openai_mixin.py index 3eef1f272..42b89f897 100644 --- a/llama_stack/providers/utils/inference/litellm_openai_mixin.py +++ b/llama_stack/providers/utils/inference/litellm_openai_mixin.py @@ -256,7 +256,7 @@ class LiteLLMOpenAIMixin( params: OpenAIChatCompletionRequestWithExtraBody, ) -> OpenAIChatCompletion | AsyncIterator[OpenAIChatCompletionChunk]: # Add usage tracking for streaming when telemetry is active - from llama_stack.core.telemetry.tracing import get_current_span + from llama_stack.providers.utils.telemetry.tracing import get_current_span stream_options = params.stream_options if params.stream and get_current_span() is not None: diff --git a/llama_stack/providers/utils/inference/openai_mixin.py b/llama_stack/providers/utils/inference/openai_mixin.py index bbd3d2e10..a9ccc8091 100644 --- a/llama_stack/providers/utils/inference/openai_mixin.py +++ b/llama_stack/providers/utils/inference/openai_mixin.py @@ -48,7 +48,6 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel): - overwrite_completion_id: If True, overwrites the 'id' field in OpenAI responses - download_images: If True, downloads images and converts to base64 for providers that require it - embedding_model_metadata: A dictionary mapping model IDs to their embedding metadata - - construct_model_from_identifier: Method to construct a Model instance corresponding to the given identifier - provider_data_api_key_field: Optional field name in provider data to look for API key - list_provider_model_ids: Method to list available models from the provider - get_extra_client_params: Method to provide extra parameters to the AsyncOpenAI client @@ -122,30 +121,6 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel): """ return {} - def construct_model_from_identifier(self, identifier: str) -> Model: - """ - Construct a Model instance corresponding to the given identifier - - Child classes can override this to customize model typing/metadata. - - :param identifier: The provider's model identifier - :return: A Model instance - """ - if metadata := self.embedding_model_metadata.get(identifier): - return Model( - provider_id=self.__provider_id__, # type: ignore[attr-defined] - provider_resource_id=identifier, - identifier=identifier, - model_type=ModelType.embedding, - metadata=metadata, - ) - return Model( - provider_id=self.__provider_id__, # type: ignore[attr-defined] - provider_resource_id=identifier, - identifier=identifier, - model_type=ModelType.llm, - ) - async def list_provider_model_ids(self) -> Iterable[str]: """ List available models from the provider. @@ -441,7 +416,21 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel): if self.allowed_models and provider_model_id not in self.allowed_models: logger.info(f"Skipping model {provider_model_id} as it is not in the allowed models list") continue - model = self.construct_model_from_identifier(provider_model_id) + if metadata := self.embedding_model_metadata.get(provider_model_id): + model = Model( + provider_id=self.__provider_id__, # type: ignore[attr-defined] + provider_resource_id=provider_model_id, + identifier=provider_model_id, + model_type=ModelType.embedding, + metadata=metadata, + ) + else: + model = Model( + provider_id=self.__provider_id__, # type: ignore[attr-defined] + provider_resource_id=provider_model_id, + identifier=provider_model_id, + model_type=ModelType.llm, + ) self._model_cache[provider_model_id] = model return list(self._model_cache.values()) diff --git a/llama_stack/providers/utils/memory/openai_vector_store_mixin.py b/llama_stack/providers/utils/memory/openai_vector_store_mixin.py index 8f9fb9fb4..7806d98c1 100644 --- a/llama_stack/providers/utils/memory/openai_vector_store_mixin.py +++ b/llama_stack/providers/utils/memory/openai_vector_store_mixin.py @@ -17,6 +17,7 @@ from pydantic import TypeAdapter from llama_stack.apis.common.errors import VectorStoreNotFoundError from llama_stack.apis.files import Files, OpenAIFileObject +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import ( Chunk, OpenAICreateVectorStoreFileBatchRequestWithExtraBody, @@ -42,7 +43,6 @@ from llama_stack.apis.vector_io import ( VectorStoreSearchResponse, VectorStoreSearchResponsePage, ) -from llama_stack.apis.vector_stores import VectorStore from llama_stack.core.id_generation import generate_object_id from llama_stack.log import get_logger from llama_stack.providers.utils.kvstore.api import KVStore @@ -63,7 +63,7 @@ MAX_CONCURRENT_FILES_PER_BATCH = 3 # Maximum concurrent file processing within FILE_BATCH_CHUNK_SIZE = 10 # Process files in chunks of this size VERSION = "v3" -VECTOR_DBS_PREFIX = f"vector_stores:{VERSION}::" +VECTOR_DBS_PREFIX = f"vector_dbs:{VERSION}::" OPENAI_VECTOR_STORES_PREFIX = f"openai_vector_stores:{VERSION}::" OPENAI_VECTOR_STORES_FILES_PREFIX = f"openai_vector_stores_files:{VERSION}::" OPENAI_VECTOR_STORES_FILES_CONTENTS_PREFIX = f"openai_vector_stores_files_contents:{VERSION}::" @@ -321,12 +321,12 @@ class OpenAIVectorStoreMixin(ABC): pass @abstractmethod - async def register_vector_store(self, vector_store: VectorStore) -> None: + async def register_vector_db(self, vector_db: VectorDB) -> None: """Register a vector database (provider-specific implementation).""" pass @abstractmethod - async def unregister_vector_store(self, vector_store_id: str) -> None: + async def unregister_vector_db(self, vector_db_id: str) -> None: """Unregister a vector database (provider-specific implementation).""" pass @@ -358,7 +358,7 @@ class OpenAIVectorStoreMixin(ABC): extra_body = params.model_extra or {} metadata = params.metadata or {} - provider_vector_store_id = extra_body.get("provider_vector_store_id") + provider_vector_db_id = extra_body.get("provider_vector_db_id") # Use embedding info from metadata if available, otherwise from extra_body if metadata.get("embedding_model"): @@ -370,6 +370,16 @@ class OpenAIVectorStoreMixin(ABC): logger.debug( f"Using embedding config from metadata (takes precedence over extra_body): model='{embedding_model}', dimension={embedding_dimension}" ) + + # Check for conflicts with extra_body + if extra_body.get("embedding_model") and extra_body["embedding_model"] != embedding_model: + raise ValueError( + f"Embedding model inconsistent between metadata ('{embedding_model}') and extra_body ('{extra_body['embedding_model']}')" + ) + if extra_body.get("embedding_dimension") and extra_body["embedding_dimension"] != embedding_dimension: + raise ValueError( + f"Embedding dimension inconsistent between metadata ({embedding_dimension}) and extra_body ({extra_body['embedding_dimension']})" + ) else: embedding_model = extra_body.get("embedding_model") embedding_dimension = extra_body.get("embedding_dimension", EMBEDDING_DIMENSION) @@ -379,8 +389,8 @@ class OpenAIVectorStoreMixin(ABC): # use provider_id set by router; fallback to provider's own ID when used directly via --stack-config provider_id = extra_body.get("provider_id") or getattr(self, "__provider_id__", None) - # Derive the canonical vector_store_id (allow override, else generate) - vector_store_id = provider_vector_store_id or generate_object_id("vector_store", lambda: f"vs_{uuid.uuid4()}") + # Derive the canonical vector_db_id (allow override, else generate) + vector_db_id = provider_vector_db_id or generate_object_id("vector_store", lambda: f"vs_{uuid.uuid4()}") if embedding_model is None: raise ValueError("embedding_model is required") @@ -388,20 +398,19 @@ class OpenAIVectorStoreMixin(ABC): if embedding_dimension is None: raise ValueError("Embedding dimension is required") - # Register the VectorStore backing this vector store + # Register the VectorDB backing this vector store if provider_id is None: raise ValueError("Provider ID is required but was not provided") - # call to the provider to create any index, etc. - vector_store = VectorStore( - identifier=vector_store_id, + vector_db = VectorDB( + identifier=vector_db_id, embedding_dimension=embedding_dimension, embedding_model=embedding_model, provider_id=provider_id, - provider_resource_id=vector_store_id, - vector_store_name=params.name, + provider_resource_id=vector_db_id, + vector_db_name=params.name, ) - await self.register_vector_store(vector_store) + await self.register_vector_db(vector_db) # Create OpenAI vector store metadata status = "completed" @@ -415,7 +424,7 @@ class OpenAIVectorStoreMixin(ABC): total=0, ) store_info: dict[str, Any] = { - "id": vector_store_id, + "id": vector_db_id, "object": "vector_store", "created_at": created_at, "name": params.name, @@ -432,23 +441,23 @@ class OpenAIVectorStoreMixin(ABC): # Add provider information to metadata if provided if provider_id: metadata["provider_id"] = provider_id - if provider_vector_store_id: - metadata["provider_vector_store_id"] = provider_vector_store_id + if provider_vector_db_id: + metadata["provider_vector_db_id"] = provider_vector_db_id store_info["metadata"] = metadata # Save to persistent storage (provider-specific) - await self._save_openai_vector_store(vector_store_id, store_info) + await self._save_openai_vector_store(vector_db_id, store_info) # Store in memory cache - self.openai_vector_stores[vector_store_id] = store_info + self.openai_vector_stores[vector_db_id] = store_info # Now that our vector store is created, attach any files that were provided file_ids = params.file_ids or [] - tasks = [self.openai_attach_file_to_vector_store(vector_store_id, file_id) for file_id in file_ids] + tasks = [self.openai_attach_file_to_vector_store(vector_db_id, file_id) for file_id in file_ids] await asyncio.gather(*tasks) # Get the updated store info and return it - store_info = self.openai_vector_stores[vector_store_id] + store_info = self.openai_vector_stores[vector_db_id] return VectorStoreObject.model_validate(store_info) async def openai_list_vector_stores( @@ -558,7 +567,7 @@ class OpenAIVectorStoreMixin(ABC): # Also delete the underlying vector DB try: - await self.unregister_vector_store(vector_store_id) + await self.unregister_vector_db(vector_store_id) except Exception as e: logger.warning(f"Failed to delete underlying vector DB {vector_store_id}: {e}") diff --git a/llama_stack/providers/utils/memory/vector_store.py b/llama_stack/providers/utils/memory/vector_store.py index 6c8746e92..0375ecaaa 100644 --- a/llama_stack/providers/utils/memory/vector_store.py +++ b/llama_stack/providers/utils/memory/vector_store.py @@ -23,8 +23,8 @@ from llama_stack.apis.common.content_types import ( ) from llama_stack.apis.inference import OpenAIEmbeddingsRequestWithExtraBody from llama_stack.apis.tools import RAGDocument +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, ChunkMetadata, QueryChunksResponse -from llama_stack.apis.vector_stores import VectorStore from llama_stack.log import get_logger from llama_stack.models.llama.llama3.tokenizer import Tokenizer from llama_stack.providers.datatypes import Api @@ -187,7 +187,7 @@ def make_overlapped_chunks( updated_timestamp=int(time.time()), chunk_window=chunk_window, chunk_tokenizer=default_tokenizer, - chunk_embedding_model=None, # This will be set in `VectorStoreWithIndex.insert_chunks` + chunk_embedding_model=None, # This will be set in `VectorDBWithIndex.insert_chunks` content_token_count=len(toks), metadata_token_count=len(metadata_tokens), ) @@ -255,8 +255,8 @@ class EmbeddingIndex(ABC): @dataclass -class VectorStoreWithIndex: - vector_store: VectorStore +class VectorDBWithIndex: + vector_db: VectorDB index: EmbeddingIndex inference_api: Api.inference @@ -269,14 +269,14 @@ class VectorStoreWithIndex: if c.embedding is None: chunks_to_embed.append(c) if c.chunk_metadata: - c.chunk_metadata.chunk_embedding_model = self.vector_store.embedding_model - c.chunk_metadata.chunk_embedding_dimension = self.vector_store.embedding_dimension + c.chunk_metadata.chunk_embedding_model = self.vector_db.embedding_model + c.chunk_metadata.chunk_embedding_dimension = self.vector_db.embedding_dimension else: - _validate_embedding(c.embedding, i, self.vector_store.embedding_dimension) + _validate_embedding(c.embedding, i, self.vector_db.embedding_dimension) if chunks_to_embed: params = OpenAIEmbeddingsRequestWithExtraBody( - model=self.vector_store.embedding_model, + model=self.vector_db.embedding_model, input=[c.content for c in chunks_to_embed], ) resp = await self.inference_api.openai_embeddings(params) @@ -319,7 +319,7 @@ class VectorStoreWithIndex: return await self.index.query_keyword(query_string, k, score_threshold) params = OpenAIEmbeddingsRequestWithExtraBody( - model=self.vector_store.embedding_model, + model=self.vector_db.embedding_model, input=[query_string], ) embeddings_response = await self.inference_api.openai_embeddings(params) diff --git a/llama_stack/providers/utils/telemetry/__init__.py b/llama_stack/providers/utils/telemetry/__init__.py new file mode 100644 index 000000000..756f351d8 --- /dev/null +++ b/llama_stack/providers/utils/telemetry/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. diff --git a/llama_stack/core/telemetry/trace_protocol.py b/llama_stack/providers/utils/telemetry/trace_protocol.py similarity index 78% rename from llama_stack/core/telemetry/trace_protocol.py rename to llama_stack/providers/utils/telemetry/trace_protocol.py index 807b8e2a9..e9320b7a8 100644 --- a/llama_stack/core/telemetry/trace_protocol.py +++ b/llama_stack/providers/utils/telemetry/trace_protocol.py @@ -9,29 +9,27 @@ import inspect import json from collections.abc import AsyncGenerator, Callable from functools import wraps -from typing import Any, cast +from typing import Any from pydantic import BaseModel from llama_stack.models.llama.datatypes import Primitive -type JSONValue = Primitive | list["JSONValue"] | dict[str, "JSONValue"] - -def serialize_value(value: Any) -> str: +def serialize_value(value: Any) -> Primitive: return str(_prepare_for_json(value)) -def _prepare_for_json(value: Any) -> JSONValue: +def _prepare_for_json(value: Any) -> str: """Serialize a single value into JSON-compatible format.""" if value is None: return "" elif isinstance(value, str | int | float | bool): return value elif hasattr(value, "_name_"): - return cast(str, value._name_) + return value._name_ elif isinstance(value, BaseModel): - return cast(JSONValue, json.loads(value.model_dump_json())) + return json.loads(value.model_dump_json()) elif isinstance(value, list | tuple | set): return [_prepare_for_json(item) for item in value] elif isinstance(value, dict): @@ -39,35 +37,35 @@ def _prepare_for_json(value: Any) -> JSONValue: else: try: json.dumps(value) - return cast(JSONValue, value) + return value except Exception: return str(value) -def trace_protocol[T: type[Any]](cls: T) -> T: +def trace_protocol[T](cls: type[T]) -> type[T]: """ A class decorator that automatically traces all methods in a protocol/base class and its inheriting classes. """ - def trace_method(method: Callable[..., Any]) -> Callable[..., Any]: + def trace_method(method: Callable) -> Callable: is_async = asyncio.iscoroutinefunction(method) is_async_gen = inspect.isasyncgenfunction(method) - def create_span_context(self: Any, *args: Any, **kwargs: Any) -> tuple[str, str, dict[str, Primitive]]: + def create_span_context(self: Any, *args: Any, **kwargs: Any) -> tuple: class_name = self.__class__.__name__ method_name = method.__name__ span_type = "async_generator" if is_async_gen else "async" if is_async else "sync" sig = inspect.signature(method) param_names = list(sig.parameters.keys())[1:] # Skip 'self' - combined_args: dict[str, str] = {} + combined_args = {} for i, arg in enumerate(args): param_name = param_names[i] if i < len(param_names) else f"position_{i + 1}" combined_args[param_name] = serialize_value(arg) for k, v in kwargs.items(): combined_args[str(k)] = serialize_value(v) - span_attributes: dict[str, Primitive] = { + span_attributes = { "__autotraced__": True, "__class__": class_name, "__method__": method_name, @@ -78,8 +76,8 @@ def trace_protocol[T: type[Any]](cls: T) -> T: return class_name, method_name, span_attributes @wraps(method) - async def async_gen_wrapper(self: Any, *args: Any, **kwargs: Any) -> AsyncGenerator[Any, None]: - from llama_stack.core.telemetry import tracing + async def async_gen_wrapper(self: Any, *args: Any, **kwargs: Any) -> AsyncGenerator: + from llama_stack.providers.utils.telemetry import tracing class_name, method_name, span_attributes = create_span_context(self, *args, **kwargs) @@ -94,7 +92,7 @@ def trace_protocol[T: type[Any]](cls: T) -> T: @wraps(method) async def async_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: - from llama_stack.core.telemetry import tracing + from llama_stack.providers.utils.telemetry import tracing class_name, method_name, span_attributes = create_span_context(self, *args, **kwargs) @@ -109,7 +107,7 @@ def trace_protocol[T: type[Any]](cls: T) -> T: @wraps(method) def sync_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: - from llama_stack.core.telemetry import tracing + from llama_stack.providers.utils.telemetry import tracing class_name, method_name, span_attributes = create_span_context(self, *args, **kwargs) @@ -129,17 +127,16 @@ def trace_protocol[T: type[Any]](cls: T) -> T: else: return sync_wrapper - original_init_subclass = cast(Callable[..., Any] | None, getattr(cls, "__init_subclass__", None)) + original_init_subclass = getattr(cls, "__init_subclass__", None) - def __init_subclass__(cls_child: type[Any], **kwargs: Any) -> None: # noqa: N807 + def __init_subclass__(cls_child, **kwargs): # noqa: N807 if original_init_subclass: - cast(Callable[..., None], original_init_subclass)(**kwargs) + original_init_subclass(**kwargs) for name, method in vars(cls_child).items(): if inspect.isfunction(method) and not name.startswith("_"): setattr(cls_child, name, trace_method(method)) # noqa: B010 - cls_any = cast(Any, cls) - cls_any.__init_subclass__ = classmethod(__init_subclass__) + cls.__init_subclass__ = classmethod(__init_subclass__) return cls diff --git a/llama_stack/core/telemetry/tracing.py b/llama_stack/providers/utils/telemetry/tracing.py similarity index 87% rename from llama_stack/core/telemetry/tracing.py rename to llama_stack/providers/utils/telemetry/tracing.py index 7742ea0f4..62cceb13e 100644 --- a/llama_stack/core/telemetry/tracing.py +++ b/llama_stack/providers/utils/telemetry/tracing.py @@ -15,7 +15,7 @@ import time from collections.abc import Callable from datetime import UTC, datetime from functools import wraps -from typing import Any, Self +from typing import Any from llama_stack.apis.telemetry import ( Event, @@ -28,8 +28,8 @@ from llama_stack.apis.telemetry import ( Telemetry, UnstructuredLogEvent, ) -from llama_stack.core.telemetry.trace_protocol import serialize_value from llama_stack.log import get_logger +from llama_stack.providers.utils.telemetry.trace_protocol import serialize_value logger = get_logger(__name__, category="core") @@ -89,6 +89,9 @@ def generate_trace_id() -> str: return trace_id_to_str(trace_id) +CURRENT_TRACE_CONTEXT = contextvars.ContextVar("trace_context", default=None) +BACKGROUND_LOGGER = None + LOG_QUEUE_FULL_LOG_INTERVAL_SECONDS = 60.0 @@ -101,7 +104,7 @@ class BackgroundLogger: self._last_queue_full_log_time: float = 0.0 self._dropped_since_last_notice: int = 0 - def log_event(self, event: Event) -> None: + def log_event(self, event): try: self.log_queue.put_nowait(event) except queue.Full: @@ -134,13 +137,10 @@ class BackgroundLogger: finally: self.log_queue.task_done() - def __del__(self) -> None: + def __del__(self): self.log_queue.join() -BACKGROUND_LOGGER: BackgroundLogger | None = None - - def enqueue_event(event: Event) -> None: """Enqueue a telemetry event to the background logger if available. @@ -155,12 +155,13 @@ def enqueue_event(event: Event) -> None: class TraceContext: + spans: list[Span] = [] + def __init__(self, logger: BackgroundLogger, trace_id: str): self.logger = logger self.trace_id = trace_id - self.spans: list[Span] = [] - def push_span(self, name: str, attributes: dict[str, Any] | None = None) -> Span: + def push_span(self, name: str, attributes: dict[str, Any] = None) -> Span: current_span = self.get_current_span() span = Span( span_id=generate_span_id(), @@ -187,7 +188,7 @@ class TraceContext: self.spans.append(span) return span - def pop_span(self, status: SpanStatus = SpanStatus.OK) -> None: + def pop_span(self, status: SpanStatus = SpanStatus.OK): span = self.spans.pop() if span is not None: self.logger.log_event( @@ -202,15 +203,10 @@ class TraceContext: ) ) - def get_current_span(self) -> Span | None: + def get_current_span(self): return self.spans[-1] if self.spans else None -CURRENT_TRACE_CONTEXT: contextvars.ContextVar[TraceContext | None] = contextvars.ContextVar( - "trace_context", default=None -) - - def setup_logger(api: Telemetry, level: int = logging.INFO): global BACKGROUND_LOGGER @@ -221,12 +217,12 @@ def setup_logger(api: Telemetry, level: int = logging.INFO): root_logger.addHandler(TelemetryHandler()) -async def start_trace(name: str, attributes: dict[str, Any] | None = None) -> TraceContext | None: +async def start_trace(name: str, attributes: dict[str, Any] = None) -> TraceContext: global CURRENT_TRACE_CONTEXT, BACKGROUND_LOGGER if BACKGROUND_LOGGER is None: logger.debug("No Telemetry implementation set. Skipping trace initialization...") - return None + return trace_id = generate_trace_id() context = TraceContext(BACKGROUND_LOGGER, trace_id) @@ -273,7 +269,7 @@ def severity(levelname: str) -> LogSeverity: # TODO: ideally, the actual emitting should be done inside a separate daemon # process completely isolated from the server class TelemetryHandler(logging.Handler): - def emit(self, record: logging.LogRecord) -> None: + def emit(self, record: logging.LogRecord): # horrendous hack to avoid logging from asyncio and getting into an infinite loop if record.module in ("asyncio", "selector_events"): return @@ -297,17 +293,17 @@ class TelemetryHandler(logging.Handler): ) ) - def close(self) -> None: + def close(self): pass class SpanContextManager: - def __init__(self, name: str, attributes: dict[str, Any] | None = None): + def __init__(self, name: str, attributes: dict[str, Any] = None): self.name = name self.attributes = attributes - self.span: Span | None = None + self.span = None - def __enter__(self) -> Self: + def __enter__(self): global CURRENT_TRACE_CONTEXT context = CURRENT_TRACE_CONTEXT.get() if not context: @@ -317,7 +313,7 @@ class SpanContextManager: self.span = context.push_span(self.name, self.attributes) return self - def __exit__(self, exc_type, exc_value, traceback) -> None: + def __exit__(self, exc_type, exc_value, traceback): global CURRENT_TRACE_CONTEXT context = CURRENT_TRACE_CONTEXT.get() if not context: @@ -326,13 +322,13 @@ class SpanContextManager: context.pop_span() - def set_attribute(self, key: str, value: Any) -> None: + def set_attribute(self, key: str, value: Any): if self.span: if self.span.attributes is None: self.span.attributes = {} self.span.attributes[key] = serialize_value(value) - async def __aenter__(self) -> Self: + async def __aenter__(self): global CURRENT_TRACE_CONTEXT context = CURRENT_TRACE_CONTEXT.get() if not context: @@ -342,7 +338,7 @@ class SpanContextManager: self.span = context.push_span(self.name, self.attributes) return self - async def __aexit__(self, exc_type, exc_value, traceback) -> None: + async def __aexit__(self, exc_type, exc_value, traceback): global CURRENT_TRACE_CONTEXT context = CURRENT_TRACE_CONTEXT.get() if not context: @@ -351,19 +347,19 @@ class SpanContextManager: context.pop_span() - def __call__(self, func: Callable[..., Any]) -> Callable[..., Any]: + def __call__(self, func: Callable): @wraps(func) - def sync_wrapper(*args: Any, **kwargs: Any) -> Any: + def sync_wrapper(*args, **kwargs): with self: return func(*args, **kwargs) @wraps(func) - async def async_wrapper(*args: Any, **kwargs: Any) -> Any: + async def async_wrapper(*args, **kwargs): async with self: return await func(*args, **kwargs) @wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> Any: + def wrapper(*args, **kwargs): if asyncio.iscoroutinefunction(func): return async_wrapper(*args, **kwargs) else: @@ -372,7 +368,7 @@ class SpanContextManager: return wrapper -def span(name: str, attributes: dict[str, Any] | None = None) -> SpanContextManager: +def span(name: str, attributes: dict[str, Any] = None): return SpanContextManager(name, attributes) diff --git a/llama_stack/ui/package-lock.json b/llama_stack/ui/package-lock.json index 8e93fc5ab..c138de535 100644 --- a/llama_stack/ui/package-lock.json +++ b/llama_stack/ui/package-lock.json @@ -18,7 +18,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "^0.3.0", + "llama-stack-client": "^0.2.23", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", @@ -75,6 +75,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@asamuzakjp/css-color": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz", @@ -2058,9 +2072,9 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", "dev": true, "license": "MIT" }, @@ -3185,54 +3199,61 @@ } }, "node_modules/@tailwindcss/node": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.14.tgz", - "integrity": "sha512-hpz+8vFk3Ic2xssIA3e01R6jkmsAhvkQdXlEbRTk6S10xDAtiQiM3FyvZVGsucefq764euO/b8WUW9ysLdThHw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.6.tgz", + "integrity": "sha512-ed6zQbgmKsjsVvodAS1q1Ld2BolEuxJOSyyNc+vhkjdmfNUDCmQnlXBfQkHrlzNmslxHsQU/bFmzcEbv4xXsLg==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/remapping": "^2.3.4", - "enhanced-resolve": "^5.18.3", - "jiti": "^2.6.0", - "lightningcss": "1.30.1", - "magic-string": "^0.30.19", + "@ampproject/remapping": "^2.3.0", + "enhanced-resolve": "^5.18.1", + "jiti": "^2.4.2", + "lightningcss": "1.29.2", + "magic-string": "^0.30.17", "source-map-js": "^1.2.1", - "tailwindcss": "4.1.14" + "tailwindcss": "4.1.6" } }, + "node_modules/@tailwindcss/node/node_modules/tailwindcss": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.6.tgz", + "integrity": "sha512-j0cGLTreM6u4OWzBeLBpycK0WIh8w7kSwcUsQZoGLHZ7xDTdM69lN64AgoIEEwFi0tnhs4wSykUa5YWxAzgFYg==", + "dev": true, + "license": "MIT" + }, "node_modules/@tailwindcss/oxide": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.14.tgz", - "integrity": "sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.6.tgz", + "integrity": "sha512-0bpEBQiGx+227fW4G0fLQ8vuvyy5rsB1YIYNapTq3aRsJ9taF3f5cCaovDjN5pUGKKzcpMrZst/mhNaKAPOHOA==", "dev": true, "hasInstallScript": true, "license": "MIT", "dependencies": { "detect-libc": "^2.0.4", - "tar": "^7.5.1" + "tar": "^7.4.3" }, "engines": { "node": ">= 10" }, "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.14", - "@tailwindcss/oxide-darwin-arm64": "4.1.14", - "@tailwindcss/oxide-darwin-x64": "4.1.14", - "@tailwindcss/oxide-freebsd-x64": "4.1.14", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.14", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.14", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.14", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.14", - "@tailwindcss/oxide-linux-x64-musl": "4.1.14", - "@tailwindcss/oxide-wasm32-wasi": "4.1.14", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.14", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.14" + "@tailwindcss/oxide-android-arm64": "4.1.6", + "@tailwindcss/oxide-darwin-arm64": "4.1.6", + "@tailwindcss/oxide-darwin-x64": "4.1.6", + "@tailwindcss/oxide-freebsd-x64": "4.1.6", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.6", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.6", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.6", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.6", + "@tailwindcss/oxide-linux-x64-musl": "4.1.6", + "@tailwindcss/oxide-wasm32-wasi": "4.1.6", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.6", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.6" } }, "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.14.tgz", - "integrity": "sha512-a94ifZrGwMvbdeAxWoSuGcIl6/DOP5cdxagid7xJv6bwFp3oebp7y2ImYsnZBMTwjn5Ev5xESvS3FFYUGgPODQ==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.6.tgz", + "integrity": "sha512-VHwwPiwXtdIvOvqT/0/FLH/pizTVu78FOnI9jQo64kSAikFSZT7K4pjyzoDpSMaveJTGyAKvDjuhxJxKfmvjiQ==", "cpu": [ "arm64" ], @@ -3247,9 +3268,9 @@ } }, "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.14.tgz", - "integrity": "sha512-HkFP/CqfSh09xCnrPJA7jud7hij5ahKyWomrC3oiO2U9i0UjP17o9pJbxUN0IJ471GTQQmzwhp0DEcpbp4MZTA==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.6.tgz", + "integrity": "sha512-weINOCcqv1HVBIGptNrk7c6lWgSFFiQMcCpKM4tnVi5x8OY2v1FrV76jwLukfT6pL1hyajc06tyVmZFYXoxvhQ==", "cpu": [ "arm64" ], @@ -3264,9 +3285,9 @@ } }, "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.14.tgz", - "integrity": "sha512-eVNaWmCgdLf5iv6Qd3s7JI5SEFBFRtfm6W0mphJYXgvnDEAZ5sZzqmI06bK6xo0IErDHdTA5/t7d4eTfWbWOFw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.6.tgz", + "integrity": "sha512-3FzekhHG0ww1zQjQ1lPoq0wPrAIVXAbUkWdWM8u5BnYFZgb9ja5ejBqyTgjpo5mfy0hFOoMnMuVDI+7CXhXZaQ==", "cpu": [ "x64" ], @@ -3281,9 +3302,9 @@ } }, "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.14.tgz", - "integrity": "sha512-QWLoRXNikEuqtNb0dhQN6wsSVVjX6dmUFzuuiL09ZeXju25dsei2uIPl71y2Ic6QbNBsB4scwBoFnlBfabHkEw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.6.tgz", + "integrity": "sha512-4m5F5lpkBZhVQJq53oe5XgJ+aFYWdrgkMwViHjRsES3KEu2m1udR21B1I77RUqie0ZYNscFzY1v9aDssMBZ/1w==", "cpu": [ "x64" ], @@ -3298,9 +3319,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.14.tgz", - "integrity": "sha512-VB4gjQni9+F0VCASU+L8zSIyjrLLsy03sjcR3bM0V2g4SNamo0FakZFKyUQ96ZVwGK4CaJsc9zd/obQy74o0Fw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.6.tgz", + "integrity": "sha512-qU0rHnA9P/ZoaDKouU1oGPxPWzDKtIfX7eOGi5jOWJKdxieUJdVV+CxWZOpDWlYTd4N3sFQvcnVLJWJ1cLP5TA==", "cpu": [ "arm" ], @@ -3315,9 +3336,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.14.tgz", - "integrity": "sha512-qaEy0dIZ6d9vyLnmeg24yzA8XuEAD9WjpM5nIM1sUgQ/Zv7cVkharPDQcmm/t/TvXoKo/0knI3me3AGfdx6w1w==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.6.tgz", + "integrity": "sha512-jXy3TSTrbfgyd3UxPQeXC3wm8DAgmigzar99Km9Sf6L2OFfn/k+u3VqmpgHQw5QNfCpPe43em6Q7V76Wx7ogIQ==", "cpu": [ "arm64" ], @@ -3332,9 +3353,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.14.tgz", - "integrity": "sha512-ISZjT44s59O8xKsPEIesiIydMG/sCXoMBCqsphDm/WcbnuWLxxb+GcvSIIA5NjUw6F8Tex7s5/LM2yDy8RqYBQ==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.6.tgz", + "integrity": "sha512-8kjivE5xW0qAQ9HX9reVFmZj3t+VmljDLVRJpVBEoTR+3bKMnvC7iLcoSGNIUJGOZy1mLVq7x/gerVg0T+IsYw==", "cpu": [ "arm64" ], @@ -3349,9 +3370,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.14.tgz", - "integrity": "sha512-02c6JhLPJj10L2caH4U0zF8Hji4dOeahmuMl23stk0MU1wfd1OraE7rOloidSF8W5JTHkFdVo/O7uRUJJnUAJg==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.6.tgz", + "integrity": "sha512-A4spQhwnWVpjWDLXnOW9PSinO2PTKJQNRmL/aIl2U/O+RARls8doDfs6R41+DAXK0ccacvRyDpR46aVQJJCoCg==", "cpu": [ "x64" ], @@ -3366,9 +3387,9 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.14.tgz", - "integrity": "sha512-TNGeLiN1XS66kQhxHG/7wMeQDOoL0S33x9BgmydbrWAb9Qw0KYdd8o1ifx4HOGDWhVmJ+Ul+JQ7lyknQFilO3Q==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.6.tgz", + "integrity": "sha512-YRee+6ZqdzgiQAHVSLfl3RYmqeeaWVCk796MhXhLQu2kJu2COHBkqlqsqKYx3p8Hmk5pGCQd2jTAoMWWFeyG2A==", "cpu": [ "x64" ], @@ -3383,9 +3404,9 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.14.tgz", - "integrity": "sha512-uZYAsaW/jS/IYkd6EWPJKW/NlPNSkWkBlaeVBi/WsFQNP05/bzkebUL8FH1pdsqx4f2fH/bWFcUABOM9nfiJkQ==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.6.tgz", + "integrity": "sha512-qAp4ooTYrBQ5pk5jgg54/U1rCJ/9FLYOkkQ/nTE+bVMseMfB6O7J8zb19YTpWuu4UdfRf5zzOrNKfl6T64MNrQ==", "bundleDependencies": [ "@napi-rs/wasm-runtime", "@emnapi/core", @@ -3401,81 +3422,21 @@ "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.5.0", - "@emnapi/runtime": "^1.5.0", - "@emnapi/wasi-threads": "^1.1.0", - "@napi-rs/wasm-runtime": "^1.0.5", - "@tybys/wasm-util": "^0.10.1", - "tslib": "^2.4.0" + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@emnapi/wasi-threads": "^1.0.2", + "@napi-rs/wasm-runtime": "^0.2.9", + "@tybys/wasm-util": "^0.9.0", + "tslib": "^2.8.0" }, "engines": { "node": ">=14.0.0" } }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { - "version": "1.5.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/wasi-threads": "1.1.0", - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { - "version": "1.5.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { - "version": "1.1.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { - "version": "1.0.5", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/core": "^1.5.0", - "@emnapi/runtime": "^1.5.0", - "@tybys/wasm-util": "^0.10.1" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { - "version": "0.10.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { - "version": "2.8.1", - "dev": true, - "inBundle": true, - "license": "0BSD", - "optional": true - }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.14.tgz", - "integrity": "sha512-Az0RnnkcvRqsuoLH2Z4n3JfAef0wElgzHD5Aky/e+0tBUxUhIeIqFBTMNQvmMRSP15fWwmvjBxZ3Q8RhsDnxAA==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.6.tgz", + "integrity": "sha512-nqpDWk0Xr8ELO/nfRUDjk1pc9wDJ3ObeDdNMHLaymc4PJBWj11gdPCWZFKSK2AVKjJQC7J2EfmSmf47GN7OuLg==", "cpu": [ "arm64" ], @@ -3490,9 +3451,9 @@ } }, "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.14.tgz", - "integrity": "sha512-ttblVGHgf68kEE4om1n/n44I0yGPkCPbLsqzjvybhpwa6mKKtgFfAzy6btc3HRmuW7nHe0OOrSeNP9sQmmH9XA==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.6.tgz", + "integrity": "sha512-5k9xF33xkfKpo9wCvYcegQ21VwIBU1/qEbYlVukfEIyQbEA47uK8AAwS7NVjNE3vHzcmxMYwd0l6L4pPjjm1rQ==", "cpu": [ "x64" ], @@ -3507,19 +3468,26 @@ } }, "node_modules/@tailwindcss/postcss": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.14.tgz", - "integrity": "sha512-BdMjIxy7HUNThK87C7BC8I1rE8BVUsfNQSI5siQ4JK3iIa3w0XyVvVL9SXLWO//CtYTcp1v7zci0fYwJOjB+Zg==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.6.tgz", + "integrity": "sha512-ELq+gDMBuRXPJlpE3PEen+1MhnHAQQrh2zF0dI1NXOlEWfr2qWf2CQdr5jl9yANv8RErQaQ2l6nIFO9OSCVq/g==", "dev": true, "license": "MIT", "dependencies": { "@alloc/quick-lru": "^5.2.0", - "@tailwindcss/node": "4.1.14", - "@tailwindcss/oxide": "4.1.14", + "@tailwindcss/node": "4.1.6", + "@tailwindcss/oxide": "4.1.6", "postcss": "^8.4.41", - "tailwindcss": "4.1.14" + "tailwindcss": "4.1.6" } }, + "node_modules/@tailwindcss/postcss/node_modules/tailwindcss": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.6.tgz", + "integrity": "sha512-j0cGLTreM6u4OWzBeLBpycK0WIh8w7kSwcUsQZoGLHZ7xDTdM69lN64AgoIEEwFi0tnhs4wSykUa5YWxAzgFYg==", + "dev": true, + "license": "MIT" + }, "node_modules/@testing-library/dom": { "version": "10.4.1", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz", @@ -3633,6 +3601,17 @@ "dev": true, "license": "MIT" }, + "node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.9.0.tgz", + "integrity": "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/aria-query": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", @@ -3833,12 +3812,12 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "24.8.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.8.1.tgz", - "integrity": "sha512-alv65KGRadQVfVcG69MuB4IzdYVpRwMG/mq8KWOaoOdyY617P5ivaDiMCGOFDWD2sAn5Q0mR3mRtUOgm99hL9Q==", + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.0.tgz", + "integrity": "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==", "license": "MIT", "dependencies": { - "undici-types": "~7.14.0" + "undici-types": "~7.10.0" } }, "node_modules/@types/node-fetch": { @@ -5871,9 +5850,9 @@ "license": "MIT" }, "node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "version": "5.18.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz", + "integrity": "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==", "dev": true, "license": "MIT", "dependencies": { @@ -9149,9 +9128,9 @@ } }, "node_modules/jiti": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", - "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", "dev": true, "license": "MIT", "bin": { @@ -9389,9 +9368,9 @@ } }, "node_modules/lightningcss": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", - "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.29.2.tgz", + "integrity": "sha512-6b6gd/RUXKaw5keVdSEtqFVdzWnU5jMxTUjA2bVcMNPLwSQ08Sv/UodBVtETLCn7k4S1Ibxwh7k68IwLZPgKaA==", "dev": true, "license": "MPL-2.0", "dependencies": { @@ -9405,22 +9384,22 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "lightningcss-darwin-arm64": "1.30.1", - "lightningcss-darwin-x64": "1.30.1", - "lightningcss-freebsd-x64": "1.30.1", - "lightningcss-linux-arm-gnueabihf": "1.30.1", - "lightningcss-linux-arm64-gnu": "1.30.1", - "lightningcss-linux-arm64-musl": "1.30.1", - "lightningcss-linux-x64-gnu": "1.30.1", - "lightningcss-linux-x64-musl": "1.30.1", - "lightningcss-win32-arm64-msvc": "1.30.1", - "lightningcss-win32-x64-msvc": "1.30.1" + "lightningcss-darwin-arm64": "1.29.2", + "lightningcss-darwin-x64": "1.29.2", + "lightningcss-freebsd-x64": "1.29.2", + "lightningcss-linux-arm-gnueabihf": "1.29.2", + "lightningcss-linux-arm64-gnu": "1.29.2", + "lightningcss-linux-arm64-musl": "1.29.2", + "lightningcss-linux-x64-gnu": "1.29.2", + "lightningcss-linux-x64-musl": "1.29.2", + "lightningcss-win32-arm64-msvc": "1.29.2", + "lightningcss-win32-x64-msvc": "1.29.2" } }, "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", - "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.29.2.tgz", + "integrity": "sha512-cK/eMabSViKn/PG8U/a7aCorpeKLMlK0bQeNHmdb7qUnBkNPnL+oV5DjJUo0kqWsJUapZsM4jCfYItbqBDvlcA==", "cpu": [ "arm64" ], @@ -9439,9 +9418,9 @@ } }, "node_modules/lightningcss-darwin-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", - "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.29.2.tgz", + "integrity": "sha512-j5qYxamyQw4kDXX5hnnCKMf3mLlHvG44f24Qyi2965/Ycz829MYqjrVg2H8BidybHBp9kom4D7DR5VqCKDXS0w==", "cpu": [ "x64" ], @@ -9460,9 +9439,9 @@ } }, "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", - "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.29.2.tgz", + "integrity": "sha512-wDk7M2tM78Ii8ek9YjnY8MjV5f5JN2qNVO+/0BAGZRvXKtQrBC4/cn4ssQIpKIPP44YXw6gFdpUF+Ps+RGsCwg==", "cpu": [ "x64" ], @@ -9481,9 +9460,9 @@ } }, "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", - "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.29.2.tgz", + "integrity": "sha512-IRUrOrAF2Z+KExdExe3Rz7NSTuuJ2HvCGlMKoquK5pjvo2JY4Rybr+NrKnq0U0hZnx5AnGsuFHjGnNT14w26sg==", "cpu": [ "arm" ], @@ -9502,9 +9481,9 @@ } }, "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", - "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.29.2.tgz", + "integrity": "sha512-KKCpOlmhdjvUTX/mBuaKemp0oeDIBBLFiU5Fnqxh1/DZ4JPZi4evEH7TKoSBFOSOV3J7iEmmBaw/8dpiUvRKlQ==", "cpu": [ "arm64" ], @@ -9523,9 +9502,9 @@ } }, "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", - "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.29.2.tgz", + "integrity": "sha512-Q64eM1bPlOOUgxFmoPUefqzY1yV3ctFPE6d/Vt7WzLW4rKTv7MyYNky+FWxRpLkNASTnKQUaiMJ87zNODIrrKQ==", "cpu": [ "arm64" ], @@ -9544,9 +9523,9 @@ } }, "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", - "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.29.2.tgz", + "integrity": "sha512-0v6idDCPG6epLXtBH/RPkHvYx74CVziHo6TMYga8O2EiQApnUPZsbR9nFNrg2cgBzk1AYqEd95TlrsL7nYABQg==", "cpu": [ "x64" ], @@ -9565,9 +9544,9 @@ } }, "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", - "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.29.2.tgz", + "integrity": "sha512-rMpz2yawkgGT8RULc5S4WiZopVMOFWjiItBT7aSfDX4NQav6M44rhn5hjtkKzB+wMTRlLLqxkeYEtQ3dd9696w==", "cpu": [ "x64" ], @@ -9586,9 +9565,9 @@ } }, "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", - "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.29.2.tgz", + "integrity": "sha512-nL7zRW6evGQqYVu/bKGK+zShyz8OVzsCotFgc7judbt6wnB2KbiKKJwBE4SGoDBQ1O94RjW4asrCjQL4i8Fhbw==", "cpu": [ "arm64" ], @@ -9607,9 +9586,9 @@ } }, "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", - "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.29.2.tgz", + "integrity": "sha512-EdIUW3B2vLuHmv7urfzMI/h2fmlnOQBk1xlsDxkN1tCWKjNFjfLhGxYk8C8mzpSfr+A6jFFIi8fU6LbQGsRWjA==", "cpu": [ "x64" ], @@ -9635,9 +9614,9 @@ "license": "MIT" }, "node_modules/llama-stack-client": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.0.tgz", - "integrity": "sha512-76K/t1doaGmlBbDxCADaral9Vccvys9P8pqAMIhwBhMAqWudCEORrMMhUSg+pjhamWmEKj3wa++d4zeOGbfN/w==", + "version": "0.2.23", + "resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.2.23.tgz", + "integrity": "sha512-J3YFH1HW2K70capejQxGlCyTgKdfx+sQf8Ab+HFi1j2Q00KtpHXB79RxejvBxjWC3X2E++P9iU57KdU2Tp/rIQ==", "license": "MIT", "dependencies": { "@types/node": "^18.11.18", @@ -9747,13 +9726,13 @@ } }, "node_modules/magic-string": { - "version": "0.30.19", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", - "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.5" + "@jridgewell/sourcemap-codec": "^1.5.0" } }, "node_modules/make-dir": { @@ -10738,9 +10717,9 @@ } }, "node_modules/minizlib": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", - "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", + "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", "dev": true, "license": "MIT", "dependencies": { @@ -10750,6 +10729,22 @@ "node": ">= 18" } }, + "node_modules/mkdirp": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, + "license": "MIT", + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/motion-dom": { "version": "12.23.23", "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.23.tgz", @@ -12994,37 +12989,34 @@ } }, "node_modules/tailwindcss": { - "version": "4.1.14", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.14.tgz", - "integrity": "sha512-b7pCxjGO98LnxVkKjaZSDeNuljC4ueKUddjENJOADtubtdo8llTaJy7HwBMeLNSSo2N5QIAgklslK1+Ir8r6CA==", + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.13.tgz", + "integrity": "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w==", "dev": true, "license": "MIT" }, "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", "dev": true, "license": "MIT", "engines": { "node": ">=6" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" } }, "node_modules/tar": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.1.tgz", - "integrity": "sha512-nlGpxf+hv0v7GkWBK2V9spgactGOp0qvfWRxUMjqHyzrt3SgwE48DIv/FhqPHJYLHpgW1opq3nERbz5Anq7n1g==", + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", "dev": true, "license": "ISC", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.1.0", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", "yallist": "^5.0.0" }, "engines": { @@ -13426,9 +13418,9 @@ } }, "node_modules/undici-types": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", - "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", "license": "MIT" }, "node_modules/unified": { diff --git a/llama_stack/ui/package.json b/llama_stack/ui/package.json index 9350be16a..07b3465e5 100644 --- a/llama_stack/ui/package.json +++ b/llama_stack/ui/package.json @@ -23,7 +23,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "^0.3.0", + "llama-stack-client": "^0.2.23", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", diff --git a/pyproject.toml b/pyproject.toml index 741dd17e5..7e709e405 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ required-version = ">=0.7.0" [project] name = "llama_stack" -version = "0.3.0" +version = "0.3.0rc4" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] description = "Llama Stack" readme = "README.md" @@ -30,7 +30,7 @@ dependencies = [ "httpx", "jinja2>=3.1.6", "jsonschema", - "llama-stack-client>=0.3.0", + "llama-stack-client>=0.3.0rc4", "openai>=1.107", # for expires_after support "prompt-toolkit", "python-dotenv", @@ -55,7 +55,7 @@ dependencies = [ ui = [ "streamlit", "pandas", - "llama-stack-client>=0.3.0", + "llama-stack-client>=0.3.0rc4", "streamlit-option-menu", ] diff --git a/scripts/docker.sh b/scripts/docker.sh index a0690c8a9..7a5c3e6e0 100755 --- a/scripts/docker.sh +++ b/scripts/docker.sh @@ -156,16 +156,6 @@ DISTRO=$(echo "$DISTRO" | sed 's/^docker://') CONTAINER_NAME="llama-stack-test-$DISTRO" -should_copy_source() { - if [[ "$USE_COPY_NOT_MOUNT" == "true" ]]; then - return 0 - fi - if [[ "${CI:-false}" == "true" ]] || [[ "${GITHUB_ACTIONS:-false}" == "true" ]]; then - return 0 - fi - return 1 -} - # Function to check if container is running is_container_running() { docker ps --filter "name=^${CONTAINER_NAME}$" --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$" @@ -193,29 +183,20 @@ stop_container() { build_image() { echo "=== Building Docker Image for distribution: $DISTRO ===" # Get the repo root (parent of scripts directory) - local script_dir - script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) - local repo_root - repo_root=$(cd "$script_dir/.." && pwd) + SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + REPO_ROOT=$(cd "$SCRIPT_DIR/.." && pwd) - local containerfile="$repo_root/containers/Containerfile" - if [[ ! -f "$containerfile" ]]; then - echo "❌ Containerfile not found at $containerfile" - exit 1 + # Determine whether to copy or mount source + # Copy in CI or if explicitly requested, otherwise mount for live development + BUILD_ENV="LLAMA_STACK_DIR=$REPO_ROOT" + if [[ "$USE_COPY_NOT_MOUNT" == "true" ]] || [[ "${CI:-false}" == "true" ]] || [[ "${GITHUB_ACTIONS:-false}" == "true" ]]; then + echo "Copying source into image (USE_COPY_NOT_MOUNT=true, CI=${CI:-false}, GITHUB_ACTIONS=${GITHUB_ACTIONS:-false})" + BUILD_ENV="USE_COPY_NOT_MOUNT=true $BUILD_ENV" + else + echo "Will mount source for live development" fi - local build_cmd=( - docker - build - "$repo_root" - -f "$containerfile" - --tag "localhost/distribution-$DISTRO:dev" - --build-arg "DISTRO_NAME=$DISTRO" - --build-arg "INSTALL_MODE=editable" - --build-arg "LLAMA_STACK_DIR=/workspace" - ) - - if ! "${build_cmd[@]}"; then + if ! eval "$BUILD_ENV llama stack build --distro '$DISTRO' --image-type container"; then echo "❌ Failed to build Docker image" exit 1 fi @@ -243,7 +224,7 @@ start_container() { # Check if image exists (with or without localhost/ prefix) if ! docker images --format "{{.Repository}}:{{.Tag}}" | grep -q "distribution-$DISTRO:dev$"; then echo "❌ Error: Image distribution-$DISTRO:dev does not exist" - echo "Either build it first without --no-rebuild, or run: docker build . -f containers/Containerfile --build-arg DISTRO_NAME=$DISTRO --tag localhost/distribution-$DISTRO:dev" + echo "Either build it first without --no-rebuild, or run: llama stack build --distro $DISTRO --image-type container" exit 1 fi echo "βœ… Found existing image for distribution-$DISTRO:dev" @@ -255,10 +236,8 @@ start_container() { echo "=== Starting Docker Container ===" # Get the repo root for volume mount - local script_dir - script_dir=$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd) - local repo_root - repo_root=$(cd "$script_dir/.." && pwd) + SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd) + REPO_ROOT=$(cd "$SCRIPT_DIR/.." && pwd) # Determine the actual image name (may have localhost/ prefix) IMAGE_NAME=$(docker images --format "{{.Repository}}:{{.Tag}}" | grep "distribution-$DISTRO:dev$" | head -1) @@ -300,18 +279,10 @@ start_container() { NETWORK_MODE="--network host" fi - local source_mount="" - if should_copy_source; then - echo "Source baked into image (no volume mount)" - else - source_mount="-v \"$repo_root\":/workspace" - echo "Mounting $repo_root into /workspace" - fi - docker run -d $NETWORK_MODE --name "$CONTAINER_NAME" \ -p $PORT:$PORT \ $DOCKER_ENV_VARS \ - $source_mount \ + -v "$REPO_ROOT":/app/llama-stack-source \ "$IMAGE_NAME" \ --port $PORT diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh index 93739052b..e19a5cc55 100755 --- a/scripts/integration-tests.sh +++ b/scripts/integration-tests.sh @@ -238,8 +238,6 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then echo "Stopping Docker container..." container_name="llama-stack-test-$DISTRO" if docker ps -a --format '{{.Names}}' | grep -q "^${container_name}$"; then - echo "Dumping container logs before stopping..." - docker logs "$container_name" > "docker-${DISTRO}-${INFERENCE_MODE}.log" 2>&1 || true echo "Stopping and removing container: $container_name" docker stop "$container_name" 2>/dev/null || true docker rm "$container_name" 2>/dev/null || true @@ -254,24 +252,19 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then export LLAMA_STACK_PORT=8321 echo "=== Building Docker Image for distribution: $DISTRO ===" - containerfile="$ROOT_DIR/containers/Containerfile" - if [[ ! -f "$containerfile" ]]; then - echo "❌ Containerfile not found at $containerfile" - exit 1 + # Set LLAMA_STACK_DIR to repo root + # USE_COPY_NOT_MOUNT copies files into image (for CI), otherwise mounts for live development + BUILD_ENV="LLAMA_STACK_DIR=$ROOT_DIR" + if [[ "${CI:-false}" == "true" ]] || [[ "${GITHUB_ACTIONS:-false}" == "true" ]]; then + echo "CI detected (CI=$CI, GITHUB_ACTIONS=$GITHUB_ACTIONS): copying source into image" + BUILD_ENV="USE_COPY_NOT_MOUNT=true $BUILD_ENV" + else + echo "Local mode: will mount source for live development" fi - build_cmd=( - docker - build - "$ROOT_DIR" - -f "$containerfile" - --tag "localhost/distribution-$DISTRO:dev" - --build-arg "DISTRO_NAME=$DISTRO" - --build-arg "INSTALL_MODE=editable" - --build-arg "LLAMA_STACK_DIR=/workspace" - ) + eval "$BUILD_ENV llama stack build --distro '$DISTRO' --image-type container" - if ! "${build_cmd[@]}"; then + if [ $? -ne 0 ]; then echo "❌ Failed to build Docker image" exit 1 fi @@ -311,6 +304,7 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then docker run -d --network host --name "$container_name" \ -p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT \ $DOCKER_ENV_VARS \ + -v $ROOT_DIR:/app/llama-stack-source \ "$IMAGE_NAME" \ --port $LLAMA_STACK_PORT @@ -410,21 +404,6 @@ elif [ $exit_code -eq 5 ]; then echo "⚠️ No tests collected (pattern matched no tests)" else echo "❌ Tests failed" - echo "" - echo "=== Dumping last 100 lines of logs for debugging ===" - - # Output server or container logs based on stack config - if [[ "$STACK_CONFIG" == *"server:"* && -f "server.log" ]]; then - echo "--- Last 100 lines of server.log ---" - tail -100 server.log - elif [[ "$STACK_CONFIG" == *"docker:"* ]]; then - docker_log_file="docker-${DISTRO}-${INFERENCE_MODE}.log" - if [[ -f "$docker_log_file" ]]; then - echo "--- Last 100 lines of $docker_log_file ---" - tail -100 "$docker_log_file" - fi - fi - exit 1 fi diff --git a/scripts/telemetry/setup_telemetry.sh b/scripts/telemetry/setup_telemetry.sh index ab855e8db..ecdd56175 100755 --- a/scripts/telemetry/setup_telemetry.sh +++ b/scripts/telemetry/setup_telemetry.sh @@ -16,59 +16,14 @@ set -Eeuo pipefail -# Parse arguments -CONTAINER_RUNTIME="" - -print_usage() { - echo "Usage: $0 [--container docker|podman]" - echo "" - echo "Options:" - echo " -c, --container Choose container runtime (docker or podman)." - echo " -h, --help Show this help." -} - -while [[ $# -gt 0 ]]; do - case "$1" in - -c|--container) - if [[ $# -lt 2 ]]; then - echo "🚨 --container requires a value: docker or podman" - exit 1 - fi - case "$2" in - docker|podman) - CONTAINER_RUNTIME="$2" - shift 2 - ;; - *) - echo "🚨 Invalid container runtime: $2" - echo "Valid options are: docker, podman" - exit 1 - ;; - esac - ;; - -h|--help) - print_usage - exit 0 - ;; - *) - echo "🚨 Unknown argument: $1" - print_usage - exit 1 - ;; - esac -done - -# Detect container runtime if not specified -if [[ -z "$CONTAINER_RUNTIME" ]]; then - if command -v podman &> /dev/null; then - CONTAINER_RUNTIME="podman" - elif command -v docker &> /dev/null; then - CONTAINER_RUNTIME="docker" - else - echo "🚨 Neither Podman nor Docker could be found" - echo "Install Docker: https://docs.docker.com/get-docker/ or Podman: https://podman.io/getting-started/installation" - exit 1 - fi +if command -v podman &> /dev/null; then + CONTAINER_RUNTIME="podman" +elif command -v docker &> /dev/null; then + CONTAINER_RUNTIME="docker" +else + echo "🚨 Neither Podman nor Docker could be found" + echo "Install Docker: https://docs.docker.com/get-docker/ or Podman: https://podman.io/getting-started/installation" + exit 1 fi echo "πŸš€ Setting up telemetry stack for Llama Stack using $CONTAINER_RUNTIME..." diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index d86fafed2..a258eb1a0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -5,7 +5,6 @@ # the root directory of this source tree. import inspect import itertools -import logging # allow-direct-logging import os import tempfile import textwrap @@ -38,12 +37,8 @@ def pytest_sessionstart(session): if "LLAMA_STACK_TEST_INFERENCE_MODE" not in os.environ: os.environ["LLAMA_STACK_TEST_INFERENCE_MODE"] = "replay" - if "LLAMA_STACK_LOGGING" not in os.environ: - os.environ["LLAMA_STACK_LOGGING"] = "all=warning" - if "SQLITE_STORE_DIR" not in os.environ: os.environ["SQLITE_STORE_DIR"] = tempfile.mkdtemp() - logger.info(f"Setting SQLITE_STORE_DIR: {os.environ['SQLITE_STORE_DIR']}") # Set test stack config type for api_recorder test isolation stack_config = session.config.getoption("--stack-config", default=None) @@ -59,12 +54,6 @@ def pytest_sessionstart(session): patch_httpx_for_test_id() -@pytest.fixture(autouse=True) -def suppress_httpx_logs(caplog): - """Suppress httpx INFO logs for all integration tests""" - caplog.set_level(logging.WARNING, logger="httpx") - - @pytest.fixture(autouse=True) def _track_test_context(request): """Automatically track current test context for isolated recordings. diff --git a/tests/integration/datasets/test_datasets.py b/tests/integration/datasets/test_datasets.py index 3ad5570f0..60db95f30 100644 --- a/tests/integration/datasets/test_datasets.py +++ b/tests/integration/datasets/test_datasets.py @@ -78,18 +78,18 @@ def data_url_from_file(file_path: str) -> str: ], ) def test_register_and_iterrows(llama_stack_client, purpose, source, provider_id, limit): - dataset = llama_stack_client.beta.datasets.register( + dataset = llama_stack_client.datasets.register( purpose=purpose, source=source, ) assert dataset.identifier is not None assert dataset.provider_id == provider_id - iterrow_response = llama_stack_client.beta.datasets.iterrows(dataset.identifier, limit=limit) + iterrow_response = llama_stack_client.datasets.iterrows(dataset.identifier, limit=limit) assert len(iterrow_response.data) == limit - dataset_list = llama_stack_client.beta.datasets.list() + dataset_list = llama_stack_client.datasets.list() assert dataset.identifier in [d.identifier for d in dataset_list] - llama_stack_client.beta.datasets.unregister(dataset.identifier) - dataset_list = llama_stack_client.beta.datasets.list() + llama_stack_client.datasets.unregister(dataset.identifier) + dataset_list = llama_stack_client.datasets.list() assert dataset.identifier not in [d.identifier for d in dataset_list] diff --git a/tests/integration/eval/test_eval.py b/tests/integration/eval/test_eval.py index 98b3302e0..01581e829 100644 --- a/tests/integration/eval/test_eval.py +++ b/tests/integration/eval/test_eval.py @@ -17,17 +17,17 @@ from ..datasets.test_datasets import data_url_from_file @pytest.mark.parametrize("scoring_fn_id", ["basic::equality"]) def test_evaluate_rows(llama_stack_client, text_model_id, scoring_fn_id): - dataset = llama_stack_client.beta.datasets.register( + dataset = llama_stack_client.datasets.register( purpose="eval/messages-answer", source={ "type": "uri", "uri": data_url_from_file(Path(__file__).parent.parent / "datasets" / "test_dataset.csv"), }, ) - response = llama_stack_client.beta.datasets.list() + response = llama_stack_client.datasets.list() assert any(x.identifier == dataset.identifier for x in response) - rows = llama_stack_client.beta.datasets.iterrows( + rows = llama_stack_client.datasets.iterrows( dataset_id=dataset.identifier, limit=3, ) @@ -37,12 +37,12 @@ def test_evaluate_rows(llama_stack_client, text_model_id, scoring_fn_id): scoring_fn_id, ] benchmark_id = str(uuid.uuid4()) - llama_stack_client.alpha.benchmarks.register( + llama_stack_client.benchmarks.register( benchmark_id=benchmark_id, dataset_id=dataset.identifier, scoring_functions=scoring_functions, ) - list_benchmarks = llama_stack_client.alpha.benchmarks.list() + list_benchmarks = llama_stack_client.benchmarks.list() assert any(x.identifier == benchmark_id for x in list_benchmarks) response = llama_stack_client.alpha.eval.evaluate_rows( @@ -66,7 +66,7 @@ def test_evaluate_rows(llama_stack_client, text_model_id, scoring_fn_id): @pytest.mark.parametrize("scoring_fn_id", ["basic::subset_of"]) def test_evaluate_benchmark(llama_stack_client, text_model_id, scoring_fn_id): - dataset = llama_stack_client.beta.datasets.register( + dataset = llama_stack_client.datasets.register( purpose="eval/messages-answer", source={ "type": "uri", @@ -74,7 +74,7 @@ def test_evaluate_benchmark(llama_stack_client, text_model_id, scoring_fn_id): }, ) benchmark_id = str(uuid.uuid4()) - llama_stack_client.alpha.benchmarks.register( + llama_stack_client.benchmarks.register( benchmark_id=benchmark_id, dataset_id=dataset.identifier, scoring_functions=[scoring_fn_id], diff --git a/tests/integration/fixtures/common.py b/tests/integration/fixtures/common.py index 5fbf2c099..ffd49033d 100644 --- a/tests/integration/fixtures/common.py +++ b/tests/integration/fixtures/common.py @@ -40,12 +40,7 @@ def is_port_available(port: int, host: str = "localhost") -> bool: def start_llama_stack_server(config_name: str) -> subprocess.Popen: """Start a llama stack server with the given config.""" - - # remove server.log if it exists - if os.path.exists("server.log"): - os.remove("server.log") - - cmd = f"llama stack run {config_name}" + cmd = f"uv run --with llama-stack llama stack build --distro {config_name} --image-type venv --run" devnull = open(os.devnull, "w") process = subprocess.Popen( shlex.split(cmd), diff --git a/tests/integration/responses/recordings/00913934356fb4f196f8c21081154c67a7197dcc26e0326cf72448334c21424d.json b/tests/integration/responses/recordings/00913934356fb4f196f8c21081154c67a7197dcc26e0326cf72448334c21424d.json deleted file mode 100644 index c6ffb34ae..000000000 --- a/tests/integration/responses/recordings/00913934356fb4f196f8c21081154c67a7197dcc26e0326cf72448334c21424d.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts_pdf]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_M8gyYiB39MwYdJKc4aHIGbfA", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8anw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "99M" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ptMnH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ue" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Euqoc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zdLoy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Dq4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nAZItljCEdlp1VF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9VZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JqBWo7hfEH8khh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hls" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KHtw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-00913934356f", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "94SeGMt8IytX52R" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/0c81f5d5c9e09daecd5eceaeae22bf228addd43f95f4a9ede8bd5a3cfbd091af.json b/tests/integration/responses/recordings/0c81f5d5c9e09daecd5eceaeae22bf228addd43f95f4a9ede8bd5a3cfbd091af.json deleted file mode 100644 index 124ac58f8..000000000 --- a/tests/integration/responses/recordings/0c81f5d5c9e09daecd5eceaeae22bf228addd43f95f4a9ede8bd5a3cfbd091af.json +++ /dev/null @@ -1,1060 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_and[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the engineering updates from the US?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_rST37XuKuJQcEBfmoTnNQzNe", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"engineering updates from the US\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_rST37XuKuJQcEBfmoTnNQzNe", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-710505118847, score: 0.0053456084826297495, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-710505118847', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-710505118847|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"engineering updates from the US\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "z02myeBc6IWePG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kUl1Dgvb1xGk0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " latest", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mFba9527A" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " engineering", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CuR9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b33WbQ32" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ybBU6K61695" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CS7fwXovMF3s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9SyeqBDvQ2F3A" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " include", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "w3TeXaqI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " new", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Fz7AHFsFIYoJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " features", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "In5n8Bd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " that", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n1o5yYMnIMN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " were", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bg19HslIylq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " deployed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JmwcZYf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b2vPax6xpus1H" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "atkHzevglgp8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "i7GGwqsH3vDMq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HCEDdIsLl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XGsBo410Yxpw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qGOs1cXLinho8b" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Fi1yFeDxgl9HIpw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HHz8qM9mtMGqDCp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aqfdHJ5pgR6zr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rpqQ271sC7BNlof" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5SaeSMndmAH3Rk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FRggMDh3RIYByfw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VO19FFBENZdX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ff4aGYwzBDQQl6V" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "710", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1JmjekVJz175r" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "505", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uXUPxMmKofAwR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "118", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b9FFeyPKWaapF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "847", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "48t9A4JN46znF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "60SAkGxGOFZsqKW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1OYEmPtXR2Q2n8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "37pmsVabd3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-0c81f5d5c9e0", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 34, - "prompt_tokens": 340, - "total_tokens": 374, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "9uLZBXCb5sbUa" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/1489591da67e505cdd2c1fddb0c3fb8372e880782d7783dd24b13689686a54e1.json b/tests/integration/responses/recordings/1489591da67e505cdd2c1fddb0c3fb8372e880782d7783dd24b13689686a54e1.json deleted file mode 100644 index 749f9a995..000000000 --- a/tests/integration/responses/recordings/1489591da67e505cdd2c1fddb0c3fb8372e880782d7783dd24b13689686a54e1.json +++ /dev/null @@ -1,668 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-528246887823, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-528246887823|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "assistant", - "content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-528246887823|>." - }, - { - "role": "user", - "content": "Can you tell me more about the architecture?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_rmMfpryevUEhVly9yXhlsVG0", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "34Bp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZFY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tJ1px" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "E6ImC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "h3NL0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "18C" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " architecture", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "l4ejZ7nxo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " details", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1i292QtpWi65ci" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EWU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hNDG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1489591da67e", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 23, - "prompt_tokens": 377, - "total_tokens": 400, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "TajyNPPmLXMy1" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/15f4d677d61a2eecaedd1aafaa3925d76f5e98199f0c6baf6736bb140e008cff.json b/tests/integration/responses/recordings/15f4d677d61a2eecaedd1aafaa3925d76f5e98199f0c6baf6736bb140e008cff.json deleted file mode 100644 index 54aac096c..000000000 --- a/tests/integration/responses/recordings/15f4d677d61a2eecaedd1aafaa3925d76f5e98199f0c6baf6736bb140e008cff.json +++ /dev/null @@ -1,763 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_UKFNZA0eSkL6fZHbs8ygBd5W", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_UKFNZA0eSkL6fZHbs8ygBd5W", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-861837565219, score: 0.015252756439527764, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'document_id': 'file-861837565219', 'token_count': 10.0, 'metadata_token_count': 13.0} (cite as <|file-861837565219|>)\nLlama 4 Maverick has 128 experts\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t286ibBdBtIrgr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "U9mPebT93jaD6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JJ5Xp0JfYMykC6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oxaRGhfy0xhA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DSR2mw4mJJpT9TQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QR8qbwLCtyBsNPE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lb79bMu1aH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "siOOzlWO8l75q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7azbsqKmkb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " has", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AGmv3xp6PWIL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fZjTPPdOZO2fpsH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7jbYm9zpbFY8u" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GKSrKriL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "U9Rqtd9YhQ2ARK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2HSPrSu1pk90PIF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OPPb0thERVPI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dLA839tLWqtODM5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "861", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sRR5OHXHSzinH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "837", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r3aqOeXUsyjjZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "565", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gsi9YE8xBivSU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "219", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zGSdQm7vspYCY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m8Wm3YWMwlQ8zl5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lCLlH34nfjTsEB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rTGBImqnxH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-15f4d677d61a", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 23, - "prompt_tokens": 326, - "total_tokens": 349, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "HWpBZq9m6jgEu" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/1ce6d4a8ec438d4a6a3618e5e6929b2db20e03a629092274039f33f909bd481d.json b/tests/integration/responses/recordings/1ce6d4a8ec438d4a6a3618e5e6929b2db20e03a629092274039f33f909bd481d.json deleted file mode 100644 index f3ec20028..000000000 --- a/tests/integration/responses/recordings/1ce6d4a8ec438d4a6a3618e5e6929b2db20e03a629092274039f33f909bd481d.json +++ /dev/null @@ -1,6723 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_or[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "Show me marketing and sales documents" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_u8kC9t9arrSV758kavJmcZE7", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\": \"marketing documents\"}" - } - }, - { - "index": 1, - "id": "call_jnsNgiHFvrr6kNsUUFsiCSdh", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\": \"sales documents\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_u8kC9t9arrSV758kavJmcZE7", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-862946202234, score: 0.002369960119027428, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-862946202234', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-862946202234|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-862946202236, score: 0.002158784645354334, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-862946202236', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-862946202236|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-862946202237, score: 0.0018325740484518083, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-862946202237', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-862946202237|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"marketing documents\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_jnsNgiHFvrr6kNsUUFsiCSdh", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-862946202234, score: 0.0020592709044779337, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-862946202234', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-862946202234|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-862946202237, score: 0.0017944759843167356, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-862946202237', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-862946202237|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-862946202236, score: 0.0017688937698533602, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-862946202236', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-862946202236|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"sales documents\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZSn5MU697qcVQ4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "Here", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "N5rvfm91T309" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BnE3ln7FQFHR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " some", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ld48GYDc6in" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NPDnkz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " related", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UvoOoM6v" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2IKW0KTWR8zQr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xoBkRY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "01vXxfssKxLV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EmtDdapoDN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ":\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eSQwScAWiDX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "###", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kuY4neoRsfkxF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GtWbWs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sSa6sO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yvIC7j5hxVVfxc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "D56Q6NAhOLJxxJV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uuxSWHYLaWRoxcZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "W4q7u2IGJ1cqW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UCut703pWstb5C" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nLdC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wj7Ybyv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "s", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BRB0ZPvRFgYBq5J" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "daThBK1Hd73d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YFtRkxbVcP9eRI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IzQGtpVbMWGJEA5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ub2Sh1354LjHYIM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Wa1FlElzIeKC6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j4ZkGA52KHr1o5r" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YVZfVuIMH81Fw5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pJbNyltqBt775dA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WY6mDwv7AaM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cC3BxMT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " details", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "z7xvM6yp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WNnqIL0fWlDq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LZrx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " activities", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tQrV6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oLVIPYnHkP5dr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3gISF78q0DRu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ls69lpHG3rRx6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tMvk4e249ILg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8sIH3McPjuym" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " first", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "axSYBsqVbX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HBHla0nN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k9acPLLCzOmDs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZjCNQxh4RmGiaVP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yAQJW35m3Z0ja" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RdbikHwApG8FoYJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "daA1mimSDXfi5j9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " highlighting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ohL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FYZpg4zMI5XJUH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FoXIfbNG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Rn8Gsba" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " by", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iRgBjHh7HKzTC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gyVLAZl8FTQYHVO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Dbdz3ymUwLPDZ4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Fjn0sTn07sIFE41" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q3dOBzJqrifsk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " this", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "c82LOs4HUhX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DoWDJqW4B" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EEpzsNyTvIddgo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "69bYP78C7LdVThc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MGUjVE6I56xN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FVA0doOgyhUC9bE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "862", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qyvunLxeXuZAI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "946", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4IBwieJ5z61NJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HqCtGPtnhmBmW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "234", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GgJzwrzKyS473" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Qn9Myp6Naw2xyQC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ">.\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DtgLgc2QDB3h" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " \n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LRxo3rHkDD0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5ACr0Kg4v5j5vzQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DrIgN8Ww5rVp8RR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dSzOTxzgIrch7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Cb28jVDi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jUO1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZHIMVAb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q3kSrmTN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RD44hoH1DTiM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ztFlaZWQyZELws" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gkUdqOgD489rlgh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8pHGGfwX5bupAiu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I5RI8t99fTAnT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TPGCVZhdORYyUso" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NKYb4ZofJRoO65" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uWHRYYt2AAO80Vp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " It", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VX6xRNnELKJiP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " discusses", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9BtLn8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6G73KnJPioIt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " outcomes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eVgLMpV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pN6K4uLcdpHFe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0JRA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SL6QA6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5dD1qSwkbfbeK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Europe", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zvTQh175d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " during", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "D4NcIH1fg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FjhfLgXetRhw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " first", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xnacPUExMF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CYFtn9cZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Tynjh655EpxTK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "f45nWaexWZUMqtN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bqDO5l1Sp2hTO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LWHGxd0sgAuHzyP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "F4pRJcsW6hfIFFz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " noting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dcU3WwS9n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4HCofu7jO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zF1Rh7iFX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AMCVMvK5fU7xg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e5nPK6tn3mVc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " EU", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "476R1ts4iH8Gi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3uWY64P6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Zf84rhoyo7hcyp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NF5n6ghSkfXuE5A" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ewuNDF8jJ7Uo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Euckmu1Vho0CnUw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "862", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HMTb2RxboE8wx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "946", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QXGbLrEGs3AbQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6ASPJQ2K7l60j" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "236", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4qO1Fx4Rn0WA2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xIlg7gvAj8o6bHF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "X35mrv2YIZgmszg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UPvfKr3wR80" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "###", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nh05VUr5PvpAE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YhFb2tctXf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "juVXu9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GuBMFMot5Okurj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iVlWQlpYwHZ0rMF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0uHQl8LisHK3jhv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HYAgTIo5f99uD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XSEJzZ5P0aKf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Pacific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0nxQ1yPS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AC9XfL2Q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Figures", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YpOIIz7w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SbN1o0MwMeQr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZRXCR7wXxKdXJ1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yBUQXXuqZ6dBsPd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yGj7EQw6tP3LywU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "srG2DC0vZdB37" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TjPJmmCBd2fuDs0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VIZh5eno1bbsFB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aKwDKEJADUJORoz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FretDy4Futo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vbYCHw9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " covers", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oj8liB3kv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "itK5tlPl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " statistics", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2SL0D" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ktLnqRi127I5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bTyFNrCgfI6N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y3qyzQmTyH0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Pacific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nlXxtMB0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4pC1KKh0w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xcBBzIHkT6Fd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kITvhZmoukMA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " third", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qHQcQlCLpv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q3KgEC7j" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WRR6vlJQwjdTl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JG07QWwlfVeco7x" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4NXAthMUA4Qzl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "td2IvdQUzIOXpqr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pCJBykZRe4wZPJR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " mentioning", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5pXKA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pTOCoRBWAUBOKJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " record", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IChoBPnsP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "-breaking", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vl5dwhC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dCDYP3a9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q13RfKMkp4jwG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uOvX3cLc4FX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gLXYXZeBf4uMpD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aI5Qz9VhQXMONoS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EGh7O7FPZO3w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AQlzDuMDeqViVrI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "862", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "34Zt9YaRkG003" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "946", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e6OZUZsfDPUAD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gDc70adNF6PJp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "237", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bOoJ3hFFRsvy8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DcdTUsRqijIv1J7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ">.\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gGUrBIod9AgX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " \n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ijXZF1gNJvS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n4icsblmJuBmca4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gIDXROijGQcx5ZI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hdOQoT47ezZqP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1eQZ168LK4VOhh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3oZ8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8ZTPtnu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "s", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CEWFDGeUTJkAgGP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gC4ViREbO3ot" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wahQTx95ri1gdS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fVJx0RllRW0Sahq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "89XlLGJA13AAI8D" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RmlGtxMuOpdh4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Le90Cz7Inbx4RMw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ogN5Ew0YI4CAYy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WDxTolMvtXnpdfX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " Although", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vxgyj3i" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " primarily", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O2IYtt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " categorized", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wF46" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " under", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Z3JrsP0GbX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "npJRat" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6ZU02R0AQ2jDTGq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " this", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cyWEzi1CSnQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mOBT6Ab" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " also", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hMz3iSEsAh2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " has", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r1BBblrkREmz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " relevance", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ermGzl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VTVxOmOxgGknP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mbkSyaLXkG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " as", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "g13POliiVwayx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " it", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3AwnFnlXUfX4s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " involves", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VaON0Vx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QhErgFM3vYAF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " analysis", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Uyf3TSV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OqWJXtRf4gcIx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gdnTayeW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " changes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5iNpMfbY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZuhnRvLuqpzC4w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kLfrVErEbhir8Do" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9dhpXM9ztANt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EUrwM0uCZILPwbE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "862", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vjDqmKD84fyJ6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "946", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SMipXWMp3pEEX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nyGIshkiDvwCd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "234", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tE16biIaPEMTe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j15mOPb6b1unsyF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ozEB87JDmu00NE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " \n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Uj0Vb5VOrcg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": "These", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WYLTY6e2vno" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jxjdUT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " provide", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KrQNppag" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " insights", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YLUPb4b" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " into", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7axYpHyeAWe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "02Agp69cShi6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " regional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JHjQo8g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PgIzcu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " strategies", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Cd8cL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Jy9Dy0psteq2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t4hjR7XoVT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " outcomes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CWvmgkQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " across", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TVj652ly7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " different", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rjGeSA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " quarters", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FPMjFh3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PHpv4jRUd7bK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": " regions", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mUBTbAYP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EGgMOrYAAh08mtZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hY8fxXrfEG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-1ce6d4a8ec43", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 242, - "prompt_tokens": 1132, - "total_tokens": 1374, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "WBmBP4MZXT" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/302881895eae2384296096186c02053183a19c75d8a55f0d19ae51e36c7aa673.json b/tests/integration/responses/recordings/302881895eae2384296096186c02053183a19c75d8a55f0d19ae51e36c7aa673.json deleted file mode 100644 index c466608fa..000000000 --- a/tests/integration/responses/recordings/302881895eae2384296096186c02053183a19c75d8a55f0d19ae51e36c7aa673.json +++ /dev/null @@ -1,2098 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_streaming_events[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the marketing updates?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_yrecP7RchFwSrzeZd9oqtJEN", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"marketing updates\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_yrecP7RchFwSrzeZd9oqtJEN", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 4 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-450428750203, score: 0.0030630316348127453, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-450428750203', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-450428750203|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-450428750202, score: 0.00301975206110427, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-450428750202', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-450428750202|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-450428750204, score: 0.002866717244815973, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-450428750204', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-450428750204|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[4] document_id: file-450428750205, score: 0.0019597433267324765, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-450428750205', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-450428750205|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"marketing updates\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Xrpaygosn3510z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "Here", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BDhCBGQGwHks" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1YOHEXVDsAru" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " some", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zZZZOpKAJrJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " recent", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2MtDOZdjv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j1d6lW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2bQFZIbH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ":\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3InKOEf7d3o" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JKvbPCoVtsniccE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qnkkTohRT7WYkBH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " In", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZlSVQRmmCBS66" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2Ztjdp9QczZw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7wAKBKHBJphPI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T3H80NXNTxv8bbl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " during", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Mxe1tPuKr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OmFjU6i9BJmOrF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y4v82Z8RabRd65u" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V2ciLYK8qMR6Rzl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0yv5NxNRvva48" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZdB24d9AeAaxlqZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pc8uBQ8vk4xe99b" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pyop" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mZ619w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " led", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Flzbq5h5oWUm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "csLmQWyQS7yE2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4SfMx0zFKjlA07" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m3fMpBht4RUPbRE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kpOYKfyP50Jy8h" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "h30qslU2SXZlEf5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Eoygzpq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "f4mMz9JFlwYov" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "22kmNpJ3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MGCBeVQsEwzbj5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1AYK7MCKVvhCfat" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1jCpbAxQCTNM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fwX0vWKzabyT1Ce" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "450", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6eNOFlqExVRbL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "428", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O4MO1BpdX45AX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "750", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b36tIskCwCBM7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WVog3EE4vswE6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BmYKOpwDCEJ0bC0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ">.\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "svmsQJDnvg0v" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fro1EcEmv8FGmRr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yA7QHUxA9ts6gzl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " In", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GmIpQJVXKWriu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " Europe", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HDunCtP3I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "M1pwq5Il682ZM5q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m855" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Osx0co" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jz0B9Lk3AKaoT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OPPNsXii2uB8z6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ibqCwWa8p3zffbU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DSihoAJ6GjECKbg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cBygMtobAWYY0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YgtcLvYzUDRfAav" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " showed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DaJComzu6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "StVHFW8sz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "f6lNyMRm6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5RUEmAr9oCfAW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yQtdd0Gc8IxY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " EU", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HanFX4Mzz2VVD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p1tHf86M" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k1HdwVaHZao7R8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qSUxyyJxMJEuL7n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7xVgSj2f2FLU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hyOMOnQHaawCguP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "450", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KnedBZG6e6Y15" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "428", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n6KeCl7WDRWm4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "750", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "60EAbQzepegLW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "204", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "v9kesNrWf280f" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rlKfWwpNKmYOuoJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7g9TKBPoM8gwTE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DFR5tMoC3v" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-302881895eae", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 72, - "prompt_tokens": 701, - "total_tokens": 773, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "yM4A1q422wS3s" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/3183b7beec7e4e89f8051a2aec67b4a1f3b48fb71f8682967d35e19498eb9386.json b/tests/integration/responses/recordings/3183b7beec7e4e89f8051a2aec67b4a1f3b48fb71f8682967d35e19498eb9386.json deleted file mode 100644 index 2a6d0fa2d..000000000 --- a/tests/integration/responses/recordings/3183b7beec7e4e89f8051a2aec67b4a1f3b48fb71f8682967d35e19498eb9386.json +++ /dev/null @@ -1,1901 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_region[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the updates from the US region?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_gaVvfdNi1X9Ejqz6X9cirf8K", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Latest updates from the US\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_gaVvfdNi1X9Ejqz6X9cirf8K", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-247992711531, score: 0.0055330209977201275, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-247992711531', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-247992711531|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-247992711530, score: 0.0030342058372720073, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-247992711530', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-247992711530|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Latest updates from the US\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xbgQU1pz8digKj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Y11SOVtDhizUA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " latest", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KONaORapM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OTVgisCu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "F8fFCLXyF3s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aZKc5kF6Izj0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EyOtxHnHCxT21" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GhX6z0sWt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " include", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cHRnB8Eu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6w6PFQi9bwVN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " deployment", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wNt8N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dWLFRaAtoTtcf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " new", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hdKEemnii9Rc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " technical", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "a0rEaC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " features", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "63d1HW0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k3gxfpDS91QOq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XSwz9dqUNC4lvR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gv7fw5ky6B5m8wm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MILBm1J6SSF8RI8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NufIjgz7sQDo0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2TUf8mnjobmAciQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ku47E3nyPnAzvT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WzFxI6InLRxrxQw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "90yxqSuELQdL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UISsAwWYh9eJMAw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "247", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "anMEhYBKfqdq6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "992", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gSex80SzbOxub" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "711", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3lWS69pM375sg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "531", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EMrWsOSsCUXFE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ba2pdxGvVL70MyB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B5j8S4TtjCp7Lu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " Additionally", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jmh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7N9iUjFprpMKotf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qfwfzHA884WRM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ERZ3FMwTc9VYnV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fvctmbn442LEVlO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r4sCuVkOSSBxaTp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hRLijcyHVt3zq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nYm9gYiPJCD29ay" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EnERakfsNavvF9L" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " there", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QWjVNGC1pY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " was", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AAZZ8y1vfM5s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MwAQ5wkQCuLzfv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5Ez58wn4m95X1Wp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vn29orqNToIrHK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hxZ1OTjPnyewmVT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fld3Vw0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "u5dfAIrqLfJHO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VllH83vd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " as", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B8PYgitfJ9NYw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hWxCmDMnEBjWHW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " result", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rd3oiOusk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2JENcsF1FE45u" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GQjl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cmN3YM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9lO9rcO68XigRm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NOoKiyp1NLzDQ9A" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B9pKjViMd1YQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Iag83CfqbuzyMfI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "247", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "see6vhPp8FGDv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "992", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SDttS5cHQrKRP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "711", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1G13JxtGAwVSX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "530", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "G3jMGWayv5LgO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QQ0UWwE65UEv6vJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bk3Z7Esd27vevH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lGxLL18UVY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3183b7beec7e", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 65, - "prompt_tokens": 467, - "total_tokens": 532, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "zmImLRoI8q9Bj" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/354405fd4255ae9210d875c30b8c9e00a75d79046a954e4c5858e36c4fbd70c5.json b/tests/integration/responses/recordings/354405fd4255ae9210d875c30b8c9e00a75d79046a954e4c5858e36c4fbd70c5.json deleted file mode 100644 index 49cc0dced..000000000 --- a/tests/integration/responses/recordings/354405fd4255ae9210d875c30b8c9e00a75d79046a954e4c5858e36c4fbd70c5.json +++ /dev/null @@ -1,925 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-528246887823, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-528246887823|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cNAmgzguPKxzrT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EA42hQaqSv6Dl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BaXUa7hVjk5nx5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Qtlt9e2mKNnd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4uMDzeUHI2m6BqK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wAiaIwGRcePCkSP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9wrgxL4SLF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3LAZNfrT4P56F" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FQnWVVG3uk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " has", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QBV750ljlBuw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CvHO7IpkYl1t6WT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t9yXtBtcAN2Ym" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nYeTyozH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "l5TEj9wVsDVVa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " its", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eqM279ge3tlP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " mixture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "adEY1TL0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AQUpfvABqhFSw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CW4Mdlfb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0VM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lVhduxzervQNW2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jLw8pSzPx1Fpvdu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jNRy9leoYXdz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T0aCg9vGAgtyyqX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "528", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pecAt4HBqHqgq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "246", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HLmt7RtQ5V2Ad" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "887", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nayq4hw8P8k8X" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "823", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PONv6LbvqDB6n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OOocdPvPE66JCLe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IoRrfOtEJevEyh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3v8gEWZUDD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-354405fd4255", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 29, - "prompt_tokens": 332, - "total_tokens": 361, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "Vn7WFfgLy7CQc" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/3aa2233e21470cd1c2667b6e4e31cf680d89b3b3c936882767e4500b0cf6d517.json b/tests/integration/responses/recordings/3aa2233e21470cd1c2667b6e4e31cf680d89b3b3c936882767e4500b0cf6d517.json deleted file mode 100644 index b8ecdd13e..000000000 --- a/tests/integration/responses/recordings/3aa2233e21470cd1c2667b6e4e31cf680d89b3b3c936882767e4500b0cf6d517.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_FzhOmTdZThRndI5rSASPdAqr", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3cjx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7fk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "S" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "G" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OdWuY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "l2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KaloW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HzeTK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ceO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WI17fZQOl0jugmg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rQq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SOIKMbAKjFhXxq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gbX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bZLJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3aa2233e2147", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "R0oA3PanLpARhLY" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/3cc58466163409f7044471fb2a27413151487768ad8d178b4c1c766ebf757a48.json b/tests/integration/responses/recordings/3cc58466163409f7044471fb2a27413151487768ad8d178b4c1c766ebf757a48.json deleted file mode 100644 index 46158a0ac..000000000 --- a/tests/integration/responses/recordings/3cc58466163409f7044471fb2a27413151487768ad8d178b4c1c766ebf757a48.json +++ /dev/null @@ -1,1450 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_streaming_events[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the marketing updates?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_Vdxnbo2D8ds3BuKCon8XUt9P", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"marketing updates October 2023\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_Vdxnbo2D8ds3BuKCon8XUt9P", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 4 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-284290576316, score: 0.004140319532899986, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-284290576316', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-284290576316|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-284290576317, score: 0.004082999220108273, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-284290576317', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-284290576317|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-284290576318, score: 0.0040444179984006476, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-284290576318', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-284290576318|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[4] document_id: file-284290576319, score: 0.002818876355005682, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-284290576319', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-284290576319|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"marketing updates October 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nx9r4D69rcypWA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "A4ZKGkPtv2Wwqyp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " couldn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "i2Vdx5Y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fdW9EbykXqU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6chY1km" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dK6g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4wmjbVkImgz0Z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rRdBwM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "821V5Shj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FuoY0c1SRhRT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " October", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UAtkaVUB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0nAgeoEbS83tVPl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XTFlUvexf6qXO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vGoyiJEkxg2e8aV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gWbneuF5WI4RuRW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " If", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7brhkLMJznzgk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nLtGTa3GbOL1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d2pMwFmStIRJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " looking", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7OkqUapB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e7VxYUK15ayF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " recent", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n2ad1L6N4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aBjq14" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " trends", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PAtcZG4HR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "c7gbk7ZWS04MN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GxR9BDz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yvLyd0D" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ysvSUk2r" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Z9CS6e61ydO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " this", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Eixc8cqhCMt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " period", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "djozNodvK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fIShJ8hZ5mEx2p3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iXsDOto9eL2Xu8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " recommend", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hYLtEN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " checking", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yXpWOm8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " recent", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bJu5dQkiN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " industry", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8YpXBbr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " reports", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VcJy96vm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mFg5sjoMQhsKO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " news", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cgUn7xv07qK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " articles", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oewS46J" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wqnPVTmcDKwi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XbYtUitg7HE4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " most", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ayvl63ALzGx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " up", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "flFZZPrv2kJbB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "-to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LNGYbEWIaqAqk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": "-date", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cpr7cBlyclh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SCyc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gPPWMTfaAVWFaJ0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Glc1dAhUHV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-3cc584661634", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 48, - "prompt_tokens": 708, - "total_tokens": 756, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "0gPDKH0ISn2VM" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/436039b9bd78d795fe43610ba51875025701bd363939f36299cf66846167b173.json b/tests/integration/responses/recordings/436039b9bd78d795fe43610ba51875025701bd363939f36299cf66846167b173.json deleted file mode 100644 index 51e086faf..000000000 --- a/tests/integration/responses/recordings/436039b9bd78d795fe43610ba51875025701bd363939f36299cf66846167b173.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_UKFNZA0eSkL6fZHbs8ygBd5W", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZEDA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FWy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "A" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eg5pY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "i3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MIUQE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PcKai" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rZW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5efBkaSneq3MYiM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Xzt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VAZ5PINoNf5uLg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5kI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BvPC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-436039b9bd78", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "967FbyvY0943LNF" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/43f424df929f565d2255c6d491d4bd19d8b1e30993882dad557fbdb655c65149.json b/tests/integration/responses/recordings/43f424df929f565d2255c6d491d4bd19d8b1e30993882dad557fbdb655c65149.json deleted file mode 100644 index 9843deb0d..000000000 --- a/tests/integration/responses/recordings/43f424df929f565d2255c6d491d4bd19d8b1e30993882dad557fbdb655c65149.json +++ /dev/null @@ -1,586 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts_pdf]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_YkpKfL9mwbGk2BLqNDoaFrO0", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1mfS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d4b" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "l" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JRCsr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QMJkY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "scjuN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ocj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mo3wLXjk9CANvH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "byy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8nAM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-43f424df929f", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 22, - "prompt_tokens": 74, - "total_tokens": 96, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "JFn10T2PhGSmfeW" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/456673fd514b80abfa8bb13c25acb9fa640daa3a1e1b5eef7a96863a592cc7fa.json b/tests/integration/responses/recordings/456673fd514b80abfa8bb13c25acb9fa640daa3a1e1b5eef7a96863a592cc7fa.json deleted file mode 100644 index 6d6913d5a..000000000 --- a/tests/integration/responses/recordings/456673fd514b80abfa8bb13c25acb9fa640daa3a1e1b5eef7a96863a592cc7fa.json +++ /dev/null @@ -1,1091 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts_pdf]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_M8gyYiB39MwYdJKc4aHIGbfA", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_M8gyYiB39MwYdJKc4aHIGbfA", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-379221123213, score: 0.008294223190489081, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-379221123213', 'token_count': 98.0, 'metadata_token_count': 11.0} (cite as <|file-379221123213|>)\n, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n" - }, - { - "type": "text", - "text": "[2] document_id: file-379221123213, score: 0.0033899234401598543, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-379221123213', 'token_count': 498.0, 'metadata_token_count': 11.0} (cite as <|file-379221123213|>)\nLlama Stack \nLlama Stack Overview \nLlama Stack standardizes the core building blocks that simplify AI application development. It codifies best \npractices\n \nacross\n \nthe\n \nLlama\n \necosystem.\n \nMore\n \nspecifically,\n \nit\n \nprovides\n \u25cf Unified API layer for Inference, RAG, Agents, Tools, Safety, Evals, and Telemetry. \u25cf Plugin architecture to support the rich ecosystem of different API implementations in various \nenvironments,\n \nincluding\n \nlocal\n \ndevelopment,\n \non-premises,\n \ncloud,\n \nand\n \nmobile.\n \u25cf Prepackaged verified distributions which offer a one-stop solution for developers to get started quickly \nand\n \nreliably\n \nin\n \nany\n \nenvironment.\n \u25cf Multiple developer interfaces like CLI and SDKs for Python, Typescript, iOS, and Android. \u25cf Standalone applications as examples for how to build production-grade AI applications with Llama \nStack.\n \nLlama Stack Benefits \n\u25cf Flexible Options: Developers can choose their preferred infrastructure without changing APIs and enjoy \nflexible\n \ndeployment\n \nchoices.\n \u25cf Consistent Experience: With its unified APIs, Llama Stack makes it easier to build, test, and deploy AI \napplications\n \nwith\n \nconsistent\n \napplication\n \nbehavior.\n \u25cf Robust Ecosystem: Llama Stack is already integrated with distribution partners (cloud providers, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SwfFM0srbgssjN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TMGpD5yrMcsaA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hahFDk4feSTCWP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "95LWXZ3gscj3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8e1alI9PtPWeh4N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pXcHEjkUvIkiPIc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZBfIZFS72Y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6j7v1RfeXAKuX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n98X3HtSR3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " is", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PwXBDEpj60QVV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AydGbRLA6VQmbT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " Mi", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "67go56fuzMfjs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "xture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2qYVzlf84GT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "-of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xLePcKPzOicKY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PlAV9kCmVSJFdei" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "Experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1s7VSQ4bj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " (", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KpX0NqaWcNx8lF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "Mo", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lhWajHou1GuZDs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "E", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BQNIB9NDUPeCqDv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": ")", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CBPR5MhECVN8z6S" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "J1EB8maIVK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " with", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oXfyN6XnE2I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VUJFkglZr6NoMj0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rfc9ULZD1ePOC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ES9e0JOf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jtSBYNEQUwIk4w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VYWqZdmqVE9ASfF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "P41vqRnJjMXs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Byy12tOEJNEzC0i" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "379", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t3NQ6wh9nJOYp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "221", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VtpvQPfa7bckm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "123", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8nUw5oqI3pe5g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "213", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7Fmiy7C0pi2oJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8iORdw4bWjrxjzO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5iskgj0JKopJ9a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "18czcgOzOS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-456673fd514b", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 35, - "prompt_tokens": 992, - "total_tokens": 1027, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "Xswh7p7QTKmf" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/48cda1e432d9f9c3ce773147998fe5621478e542632f882fee067fd068c410dd.json b/tests/integration/responses/recordings/48cda1e432d9f9c3ce773147998fe5621478e542632f882fee067fd068c410dd.json deleted file mode 100644 index 21162c632..000000000 --- a/tests/integration/responses/recordings/48cda1e432d9f9c3ce773147998fe5621478e542632f882fee067fd068c410dd.json +++ /dev/null @@ -1,1834 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-528246887823, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-528246887823|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "assistant", - "content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-528246887823|>." - }, - { - "role": "user", - "content": "Can you tell me more about the architecture?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_rmMfpryevUEhVly9yXhlsVG0", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model architecture details\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_rmMfpryevUEhVly9yXhlsVG0", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-528246887823, score: 0.005821830799489026, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-528246887823|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model architecture details\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eOzeWuuyzb1Cax" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y0Y8odXG9qdUe7I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " couldn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EcimOlX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2xoX1Mfyt2k" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " any", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6N4ypYmL22bj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " additional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tXBaT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ucy0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " specifically", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WMb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " about", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JMLc0qNW7U" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PGuTOoueNkNj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "758" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wTolxcHFQFnhy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XXWkZBijjiRl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ENxaVZp29TKmrQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GcRsklIKy7uc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AkhqMJI2vVCTa87" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "18zmzdHXdV17dS0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aJCGfZTdv7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yKOqRBtVIHg8C" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0wLo48R9fN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " beyond", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wOrIhrEjy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " it", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zjl5lQZMpNEQy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " having", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fHiQTRhsZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SyRu2cUUzWw0dSR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "krUOaeWWs0nTm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "53ciIATg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O7TA3jGRi5gOK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " its", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eVRuQvfCo6jZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " mixture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NgLWPDUG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6hmxppafQG3w7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y0edqNvN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IXI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8ZFgOgkQUr0KHG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "F2x2Fxv2PW85Ntr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "X1kLNcc12pxW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q9Pz4rdZGASZqnL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "528", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nmto1jiHrMUGs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "246", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zOweP5LmiAtRH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "887", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fN5DDNzmCM8HQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "823", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qXtEXESLuSvKj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9tBrTzR3kDTCVpQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OlfqU2ckEurvkS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " If", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lt0DqkfjL7iWa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MjoezndWRWvB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " have", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SC3tCh87hus" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " more", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1r2cYBoAOF4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RdMKcy9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " aspects", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Tf5cQpGn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tPePnPjTkt9CC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Wkhuo2lcuAtf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qGD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e9WcLaE3hPnb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wrCPIgJbUCrs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " interested", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pJiej" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NHsi65VAY35Eu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kLk2gXY4LnU07wr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " please", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sS4rWhZAr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " let", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xTgqSZ5RDY7F" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " me", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vf8BNUHLXCp14" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": " know", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8WYGLMZP5AU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": "!", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BTg7xFpysmTHZuI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p0Ql9TrOAI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-48cda1e432d9", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 61, - "prompt_tokens": 633, - "total_tokens": 694, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "A8Hnqa5510Usq" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/5c9b18529d9400f8992d48eb3204e1ad41226bd16bdac187a98b591ea219062f.json b/tests/integration/responses/recordings/5c9b18529d9400f8992d48eb3204e1ad41226bd16bdac187a98b591ea219062f.json deleted file mode 100644 index 62a8fcff1..000000000 --- a/tests/integration/responses/recordings/5c9b18529d9400f8992d48eb3204e1ad41226bd16bdac187a98b591ea219062f.json +++ /dev/null @@ -1,668 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_FzhOmTdZThRndI5rSASPdAqr", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_FzhOmTdZThRndI5rSASPdAqr", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-797509666839, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-797509666839|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "assistant", - "content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-797509666839|>." - }, - { - "role": "user", - "content": "Can you tell me more about the architecture?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_0ABjXmEaManGBvCYVZD4QgMt", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "4WCi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "xZw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "v" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "pxc1l" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "rw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "1uQsI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "3GUgc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "iSJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " architecture", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "PBkDU4h5O" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " details", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "FhjYMfvqXEQemh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "bJM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": null, - "obfuscation": "SZTO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5c9b18529d94", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_65564d8ba5", - "usage": { - "completion_tokens": 23, - "prompt_tokens": 377, - "total_tokens": 400, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "aghfgDFthkUtG" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/5d8c5aed931209aeeaa35ead5654389f7ed38381807ba34956844352a35db706.json b/tests/integration/responses/recordings/5d8c5aed931209aeeaa35ead5654389f7ed38381807ba34956844352a35db706.json deleted file mode 100644 index ac2d20fdf..000000000 --- a/tests/integration/responses/recordings/5d8c5aed931209aeeaa35ead5654389f7ed38381807ba34956844352a35db706.json +++ /dev/null @@ -1,1843 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_and[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the engineering updates from the US?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_cpbC1lb0MlCcVyxpYT10GvpE", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"engineering updates United States October 2023\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_cpbC1lb0MlCcVyxpYT10GvpE", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-90357487492, score: 0.00660655550631643, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-90357487492', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-90357487492|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"engineering updates United States October 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zQkkxhoeujkc7P" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2xaSzeY7yKDrKpg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " couldn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "en9p5ol" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TOfMFx6hfEx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3P539nE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " engineering", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y4Bs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q1nDvqTq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wwwrhuEfHUX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hku7xlCFCPJX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CxO8KS2OSYV7K" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "l37B1YQNaVuP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " October", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OkU6tibY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QUJwQ2QMGQ7lBwi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xSMBPQ9YphrsO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YLjDf5RqAwAU8v6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fE88WBjGT2JuX5c" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " However", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1FZU2zRa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VtB4pZrnTfMQS0a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " there", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2osTTy3Acl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " were", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NWt4foJGaDd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " technical", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VuknOG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4Fg28gZO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NIrc5VEw2dn9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " new", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eLlxZ1TV3SY8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " features", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mKY5mcI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " deployed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zrSUS7W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "K588szoBe2CWG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nPKpJVDcCGOq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wdiO002WOCZMw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dBI9C2EaJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YXDJrAoM2xpn3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nbHskKzSk5Tn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " second", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xosvMX9WT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "casRFW5t" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2833DB7wHte62" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xZMH8EG1VcVUI05" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DNba0WYBfOB63" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "D10nFyYjOBtDhNB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SiBpzADVPzyPLB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gSFXtcS2xJrLrBn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9sb7koE3Ibmp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YTRbcGX5qHRIum0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "903", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VB4Wnn3wnB5rQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "574", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n7OUR9hI5aIZW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "874", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uTb0mzQGzZ7NC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "92", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Eg5vmUYKY4dYK3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LvKzyCi1SJ2eHvw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DNnVQcFXnZetlF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " Please", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LBkXQasfC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " let", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "U1XMN0DtNVNt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " me", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Aaw7bepHICtY9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " know", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "man35BruyNc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " if", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6sAVmPIjOkr00" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MIFWy91OAmYs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " want", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cfHX0lSqxUO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " more", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YcdaojiiLrg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " detailed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BP5KY9N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " search", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Py4nUtGu0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wdB7whVM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0fMWivMMJdj6S" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4p8vWkE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": " dates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ACCKnnDEJm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nEFmjSyYEL4C7m5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jdU4yQikwA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-5d8c5aed9312", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 63, - "prompt_tokens": 345, - "total_tokens": 408, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "WoKxnDwdUb8AK" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/7437ac7a1debcb3fb24b24eff26a26d16cd34bf338c9843ca8f9d5ce92101d72.json b/tests/integration/responses/recordings/7437ac7a1debcb3fb24b24eff26a26d16cd34bf338c9843ca8f9d5ce92101d72.json deleted file mode 100644 index 24837e75c..000000000 --- a/tests/integration/responses/recordings/7437ac7a1debcb3fb24b24eff26a26d16cd34bf338c9843ca8f9d5ce92101d72.json +++ /dev/null @@ -1,925 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_FzhOmTdZThRndI5rSASPdAqr", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_FzhOmTdZThRndI5rSASPdAqr", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-797509666839, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-797509666839|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vt3GstRZ0aEOtQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V8LCLbwuWE6zL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AE5LGiL5P2vkSi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "URAGF9HPepld" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YhQyLMSQQBttHSZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MTXfd73x6CxT4jC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FRyNMsTqpf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pEvb1BhfCzIu3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ti8cCbCXCN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " has", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ePgYRhIsTnxM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Er6jdeAhYhP5yZo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kKbskZOW0nnLX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9EkfQIUH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "s9t2OgQMO8lpH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " its", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Y5meY2O4Ow06" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " mixture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d8vzlOjF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BZ0ZP2N8lYwx9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GLsfkS8o" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3le" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6yIln93VE6CXZc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "12h0ytbne30ZcjD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0EYqrf9KJNnT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9ZNIDg7zELT5q61" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "797", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IGDExcadNKQ6z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "509", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "36Ncp9snNGK23" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "666", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oVuXPzQ3kvZIK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "839", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZIqozh3gNXBtg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j1xdaBLDmWec5Fo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "057ZL7KuQlBsN5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Mizb7MVtEE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-7437ac7a1deb", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 29, - "prompt_tokens": 332, - "total_tokens": 361, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "p2sRYaAtwLWB8" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/8d5e85fc1c0a827552218dceb170769d3809b1c271b0e1baaa182d8b01ad8571.json b/tests/integration/responses/recordings/8d5e85fc1c0a827552218dceb170769d3809b1c271b0e1baaa182d8b01ad8571.json deleted file mode 100644 index da0e36323..000000000 --- a/tests/integration/responses/recordings/8d5e85fc1c0a827552218dceb170769d3809b1c271b0e1baaa182d8b01ad8571.json +++ /dev/null @@ -1,1550 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_date_range[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What happened in Q1 2023?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_9VwRE1rGyuqq0lSoaOwqpufL", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Q1 2023 events\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_9VwRE1rGyuqq0lSoaOwqpufL", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-664068282484, score: 0.003343484460164191, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-664068282484', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-664068282484|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-664068282482, score: 0.003225665460848446, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-664068282482', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-664068282482|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Q1 2023 events\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xd1EWvG6Br7Mcu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "In", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "21AlcSbxTO2NI0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aZgCL6JG8N9qZT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZzfpP90mVCB5zkf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eeJiipF7y4uae9b" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5ZicAj0XS63mP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xFWsbGuSPpp3CP8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YebpJFCkNQtXTbc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " there", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m6onZvh1zo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " was", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GraxfuUTykoe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ktLD6juI9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "N1hMwY8uv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ErUAc22ysLHKJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Hc76PzZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "u5i7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6Nww0q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Rf4bxmgcX1T6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lXMjz3QMliBIE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YwcM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NiXjBs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " resulted", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QqspTfz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8ii7vh2Jb7Ylx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JiWbPj2MRLeoKA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "348XQmrHiPQxofQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kAeKzShFT2WSiQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OoyfjAD8Z9yBlHP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fWb5eLD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sobgcicZCvevA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AaEaIOVK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3LUwfzE4bKzeB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QLFqMwueqIXb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jaemHOrdklq9X" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iz0HAKWpi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fNevow6v8az7u0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qs9rTyPemvjiu9g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4HofZYdLTfhz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2y8Leo6hU70iMrC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "664", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rSjz9TEnMeUY4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "068", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4wkVg4aYwIh0W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "282", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vDoQXY7ktZ6Lt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "484", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sc96nstSQzCUX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hEAbpk0WmMMcOOq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "><", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TiabYQfNL2DXNi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "x6DEcbivhm7XICE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O25Jkg2FWWPI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Jn7sXN7Pn4hZt7B" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "664", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eF7eob56ddmSj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "068", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "A0H55G1XICkmB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "282", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4jdbPjqHim9R3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "482", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "i5OLbmY5H9Cqk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZU4vVlx7O6c55qE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "P5rydlYeSWFjpd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NlruXH6LVI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-8d5e85fc1c0a", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 52, - "prompt_tokens": 466, - "total_tokens": 518, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "KpIKP0E7E94PM" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/a37a1c209697b900177f10ec5348b0ced3394322927971a2f4dfe52c2768ce57.json b/tests/integration/responses/recordings/a37a1c209697b900177f10ec5348b0ced3394322927971a2f4dfe52c2768ce57.json deleted file mode 100644 index e06bfaba7..000000000 --- a/tests/integration/responses/recordings/a37a1c209697b900177f10ec5348b0ced3394322927971a2f4dfe52c2768ce57.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_4ac6gxccWFxDvEl8BizY3BJw", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KVv4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "07x" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WQVqQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5cG73" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0TZwr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "70V" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tgfI6t9sl0qk9lj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bb4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YwrLoyA0SH8QKR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r2N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UcRX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a37a1c209697", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "Z9OHDBA6RWfdcqi" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/a9af62eaeb01f80a9b09b7c7ba468e6a34945cbf0b12aa2c74b2caedc31a821f.json b/tests/integration/responses/recordings/a9af62eaeb01f80a9b09b7c7ba468e6a34945cbf0b12aa2c74b2caedc31a821f.json deleted file mode 100644 index 0cb8ac7e7..000000000 --- a/tests/integration/responses/recordings/a9af62eaeb01f80a9b09b7c7ba468e6a34945cbf0b12aa2c74b2caedc31a821f.json +++ /dev/null @@ -1,2198 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_date_range[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What happened in Q1 2023?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_a6TuAAAmLcfVWfz4H3VriVLv", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"key events in Q1 2023\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_a6TuAAAmLcfVWfz4H3VriVLv", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-151637398237, score: 0.0034820368215192597, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-151637398237', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-151637398237|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-151637398235, score: 0.0033373451138519685, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-151637398235', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-151637398235|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"key events in Q1 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FUOfxQCJpoN8Ac" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "In", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JtCuvuKyUbrAbt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qhTUHTQlrwXU0U" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y6wJsX0ap4QbHGG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OL0uTkEU8cMZPdo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0mlyebdjO9iCj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "95RQ1BybFVEeOc7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OWKlQBEZkc0wsq2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " there", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CZLdJybw0Q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " was", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WpFn07JrgbOg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GSeDlgHdp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2SvXL4cDz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hHJYKNb0ZPFgV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zpuFDkz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mDdd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QbGZOR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yNhvDFVl7tilGHa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " suggesting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OEz5p" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kdgTe5OLeAnYdy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " positive", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hV7VamL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " trend", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n2v0HSlXjg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZFKtQB6USHuMl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " EU", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AFIDIU9dGQJ4x" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iioEXzYK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qU8d2pPUEKgKCD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IaWnnKBiO6Qf5zb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PI3GH5WJDsv9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rxRdxUoXHnSzq8i" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "151", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EM1PtY9vLHHky" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "637", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9GooWUHnQ2qAt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "398", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IyiDkF20T3skQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "237", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zonKkqaKIjMs9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vhBMsoF9J6vJfrA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JVo6EBdPXaSJig" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " In", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "H2fKEeiz7VPy8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wnvgMjFFaXfw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " United", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JnJ8KCmzx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " States", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yg0qYoQao" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LzHxTwJb3TGqFY4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3qGu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UfofNg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " led", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IPIyv1PEcg7H" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xylH74SZS0Z2e" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JpLXyQASFCMrri" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k35I1FSYjVTvDdD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MfsreCLZy22ePz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kAc78c4crPTx9ti" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xaRO1oN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vdkXj3BS1GN8g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t2R31Xot" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vhcv7FZZikNgpC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WUltM5F05EgHUlJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e6Q0E3SMLHNp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Hu3MiziaqEvcdAf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "151", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XWaYLY8lCPPRk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "637", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sx9VOmWIA58h5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "398", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cSbrZOxDOWsjS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "235", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "S02wAL3Ay31YZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "clIJQS8VeawXpEf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e6bAxu9n9Q6rMb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " These", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mRjmT3yB3U" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " outcomes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pwNNj8y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " reflect", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "STnse8g6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " significant", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "73uD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " developments", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IW1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SeTmIBQHq8bhH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ye2Ql05U9OfQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LXwQyM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " sector", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "97Veivzyv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Y8pwna8zJhjG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " both", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iyB99YAAwT5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " regions", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EahPYT2z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " during", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nRmQF4TI8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " this", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nNHLBxmmnaX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": " period", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TvtyJSr8F" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fh4z4XUNWx0FkjJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "73H8Ymm9DP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-a9af62eaeb01", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 76, - "prompt_tokens": 472, - "total_tokens": 548, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "zkIB1oeGf8c62" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/b088ac3381c34089cf0eaba5d4b922271e8f8e8f169d94c5505de2a3ebe69c47.json b/tests/integration/responses/recordings/b088ac3381c34089cf0eaba5d4b922271e8f8e8f169d94c5505de2a3ebe69c47.json deleted file mode 100644 index 1b58212cb..000000000 --- a/tests/integration/responses/recordings/b088ac3381c34089cf0eaba5d4b922271e8f8e8f169d94c5505de2a3ebe69c47.json +++ /dev/null @@ -1,763 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_gZXRKN1HMDC16NP9wNPAkP9K", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model experts count\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_gZXRKN1HMDC16NP9wNPAkP9K", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-864460993305, score: 0.011418757438261288, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'document_id': 'file-864460993305', 'token_count': 10.0, 'metadata_token_count': 13.0} (cite as <|file-864460993305|>)\nLlama 4 Maverick has 128 experts\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model experts count\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mdcAUKOPQatFDX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SXcyzIHM4JAb9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tIP37AbLRJtgw3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T6hl8O1yALrY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EXmrHS7V452DM8U" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WtvMno5JF3BbJja" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wasA4Ibq0N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zC5PHOZiqE8hV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TCLUWLK9fl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " has", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p1X5NlNo8vvP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B4xarQa3WvzzZke" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BEDzpnch9VTi5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NbeiTxmO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oEzJnEJlvJlvVB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "COTg5EQAvBoF1X4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "agnuCzlpfNMe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6VItAaWcjjp8PCq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "864", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IG901zUD4iD52" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "460", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dTfo3F1G4iNgN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "993", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1hexBY3sKrN92" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "305", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "z2dp3INsqrUxD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Dt8q83oqrxwR8j6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ALKi5zh4iadh4W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j7ONgCjwww" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b088ac3381c3", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 23, - "prompt_tokens": 324, - "total_tokens": 347, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "frqCbYK1PAWId" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/b2d4b49b6d354fb75745d0b2ad461dac007bb6afa191350b03077eb71c316c61.json b/tests/integration/responses/recordings/b2d4b49b6d354fb75745d0b2ad461dac007bb6afa191350b03077eb71c316c61.json deleted file mode 100644 index 021aa70e4..000000000 --- a/tests/integration/responses/recordings/b2d4b49b6d354fb75745d0b2ad461dac007bb6afa191350b03077eb71c316c61.json +++ /dev/null @@ -1,623 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_gZXRKN1HMDC16NP9wNPAkP9K", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iVfJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "F4s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q90JJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "e6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RVjmv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fxip1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "APw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SGxrR0wH4r9xmj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " count", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DPW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pWLu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b2d4b49b6d35", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 23, - "prompt_tokens": 74, - "total_tokens": 97, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "AQxCe0R2ppw6hGr" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/b3e7d7ee61c61b858b8a5f6af016e26bc3f948640354c1210f02c3b63bd26a55.json b/tests/integration/responses/recordings/b3e7d7ee61c61b858b8a5f6af016e26bc3f948640354c1210f02c3b63bd26a55.json deleted file mode 100644 index ec816c876..000000000 --- a/tests/integration/responses/recordings/b3e7d7ee61c61b858b8a5f6af016e26bc3f948640354c1210f02c3b63bd26a55.json +++ /dev/null @@ -1,2522 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_category[openai_client-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "Show me all marketing reports" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_oijotTqrVfTYc1H7Ty7OAMHx", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"marketing reports\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_oijotTqrVfTYc1H7Ty7OAMHx", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-824569833174, score: 0.0027636340573366355, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-824569833174', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-824569833174|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-824569833176, score: 0.0025355615447054444, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-824569833176', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-824569833176|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"marketing reports\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cos6THa2ZX2KuU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fZWSFgk1Wuv4xA7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " found", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "07DSnJFHoj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " two", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V6ZMAXclTLwu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7oOKrE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " reports", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "trWMCEEC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ":\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OxyIyP7u2Ob" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r2RZSdLmE80CNIG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yJvOtk4cU4VUUlb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3Oyep6T6FRdaz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6BBhoQizlwv7m7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " Region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q5iXHjeXj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ":**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mS75FlBS3PoNC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2wNojM7VjGw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Y8xa7BkEU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " focuses", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "W4FilnFr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mAuy2OnOvwKqh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YnRs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "secdWb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eYAu4NW47CVM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VC7kv2aFDsX83n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BrOd6C1yPMX8FOM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qFSCWcv23GRdyza" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fuaXbYk7cEVxV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q14il9Icry8ur4W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9c1sbULtapcYo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "23VcgXbVznMI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ce8ZiEMyhaZ81" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qYfmmBPJeAGzp4K" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " where", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rFDP6ez75f" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oZ2hPrbE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " increased", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QTljTl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " by", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "R5GyjiHFskMh3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "358zKwoy9IgLkgs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SySLcX59fdcAPY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gaHrAcd9DrrG4wy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vDwGEi7dPEs1IU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eGeq8xRIQsvazAu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sim7a6Q5hoSi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PDuli1x9NTHNzEv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "824", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0Dqig36ctmcCn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "569", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PY9ad2KqxevuT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "833", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OZ4s2XeYs9Yxo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "174", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V8JXGZVplbGuY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TvG7WEQ7sORVuqp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bcesVvHFUALcMJz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ".\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AycX7Q2TJgB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KkzDaxk9seUS8RO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p5qMyuMSlGwwubs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mwzU5jL6IckFr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "appU9xQA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " Region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OPBHPOt0K" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ":**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BZPqcPd67QKyp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zUaUaNeDfFo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ELstnCvfT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " details", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1CgWNiyE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "09bpzjp2bEks" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ogm9r5X1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mvCjxDOvHpxbi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FIJ4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kcvMCY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LcB85y37trMdZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WS5xOmUygAD8wd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8Q5OzwSoBcjCmdh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Jxb4XJIyOWLMWlC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OCLlLpIv6srSx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6sq5GC8JM5ZNmLl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CG9PkggQdi0n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kNv92G9b6sUh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RC0MI0D" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9rn0MKS5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rypVA1w8HQvBKYz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " citing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qmlWXMuo3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I5dMeVX52" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y4l1qzJ15" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DohbrYLnknoSJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5BDnaJfP7aPB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Xcav3jUhA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vaaw6gdZ1gb5Rc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kpNQt9js5lcD0zF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hrZzbD1OTBPG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0St3B7u13lxZYQZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "824", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "12k4Iqmp2C94u" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "569", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qSU6y1yxLUuym" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "833", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lDT1VP4FOH4pi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "176", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ua1xbGW5Ap5Dp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8QDWL2sDP9fj1ax" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jjCpaZiSRYkhQE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VXUdovY5BC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b3e7d7ee61c6", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 88, - "prompt_tokens": 456, - "total_tokens": 544, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "SkreRidbyd2WQ" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/b43291ac9074915996cd682cd023ce83993fb0b178ad02cb989e90ada09bdc1d.json b/tests/integration/responses/recordings/b43291ac9074915996cd682cd023ce83993fb0b178ad02cb989e90ada09bdc1d.json deleted file mode 100644 index 75434eeae..000000000 --- a/tests/integration/responses/recordings/b43291ac9074915996cd682cd023ce83993fb0b178ad02cb989e90ada09bdc1d.json +++ /dev/null @@ -1,705 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search_empty_vector_store[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_ltsd3q9G7fq4by5VmgdvtNRX", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_ltsd3q9G7fq4by5VmgdvtNRX", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 0 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query.\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0fgBVqnoZphRrO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kAndkfubV6NKXsY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " couldn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EXhAPDe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nrXymFqvjdq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lajtruZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xasK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mrqUvzsWBTOO8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ha2tJcKUNTiA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " number", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vpCsuweOe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GFFxLjDCZduzC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Xs8Vo94R" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qLbADKniURbG3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YF79Ocjj7FyP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pyuKZULeLEPvik" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UKwrHwSz4E7a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GRGchsnIuihqbZ0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Qpq01eD86BDpBoj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZbaWcRhys3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "srlQAcwr3TFz2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OaURzdjGvn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I81L6v0mjvIsSpW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eCBzPYMI2j" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b43291ac9074", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 21, - "prompt_tokens": 163, - "total_tokens": 184, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "hPWaHWTF4MLy7" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/b46a06f7d0c1fde007c2b9e6e6d0b4c0694dccd4c61731db54c2d06bad1a0098.json b/tests/integration/responses/recordings/b46a06f7d0c1fde007c2b9e6e6d0b4c0694dccd4c61731db54c2d06bad1a0098.json deleted file mode 100644 index 3014d416c..000000000 --- a/tests/integration/responses/recordings/b46a06f7d0c1fde007c2b9e6e6d0b4c0694dccd4c61731db54c2d06bad1a0098.json +++ /dev/null @@ -1,1091 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts_pdf]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_YkpKfL9mwbGk2BLqNDoaFrO0", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_YkpKfL9mwbGk2BLqNDoaFrO0", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-156847829497, score: 0.007500206285428307, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-156847829497', 'token_count': 98.0, 'metadata_token_count': 11.0} (cite as <|file-156847829497|>)\n, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n" - }, - { - "type": "text", - "text": "[2] document_id: file-156847829497, score: 0.0032793168757021015, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-156847829497', 'token_count': 498.0, 'metadata_token_count': 11.0} (cite as <|file-156847829497|>)\nLlama Stack \nLlama Stack Overview \nLlama Stack standardizes the core building blocks that simplify AI application development. It codifies best \npractices\n \nacross\n \nthe\n \nLlama\n \necosystem.\n \nMore\n \nspecifically,\n \nit\n \nprovides\n \u25cf Unified API layer for Inference, RAG, Agents, Tools, Safety, Evals, and Telemetry. \u25cf Plugin architecture to support the rich ecosystem of different API implementations in various \nenvironments,\n \nincluding\n \nlocal\n \ndevelopment,\n \non-premises,\n \ncloud,\n \nand\n \nmobile.\n \u25cf Prepackaged verified distributions which offer a one-stop solution for developers to get started quickly \nand\n \nreliably\n \nin\n \nany\n \nenvironment.\n \u25cf Multiple developer interfaces like CLI and SDKs for Python, Typescript, iOS, and Android. \u25cf Standalone applications as examples for how to build production-grade AI applications with Llama \nStack.\n \nLlama Stack Benefits \n\u25cf Flexible Options: Developers can choose their preferred infrastructure without changing APIs and enjoy \nflexible\n \ndeployment\n \nchoices.\n \u25cf Consistent Experience: With its unified APIs, Llama Stack makes it easier to build, test, and deploy AI \napplications\n \nwith\n \nconsistent\n \napplication\n \nbehavior.\n \u25cf Robust Ecosystem: Llama Stack is already integrated with distribution partners (cloud providers, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9mAXEOvOfMup6u" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GaQJtU7pcY6w7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q4lslOYYHnr5IP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6oz6SHQVeufF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ecdCAkflBM9nZ6d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8v85nLwLgtjkxyg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NMeJSqXuu2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p1RdfBeFxV4Uf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GOiuM2mxWT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " is", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8LEs9BVZweIow" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TTBMUF8seY54gd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " Mi", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5namyFfEh57XS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "xture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1gJgnqE1SZ5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "-of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wEImop2365qsS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "X6H9a5iFeAkAhpU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "Experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qEPnJkkw1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " (", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p3yf4TRUYdaBSa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "Mo", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WFEKUJUOF4MKrm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "E", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "93ciJDT2caXproR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": ")", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NQ4VVCkxwtuIxJt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ke3XThIbnJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " with", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "S1vLYWIy7r3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EyDsqCjB4YD9ltR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MNC8fqnLUKVjI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T4PnWmda" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "z6eh5lAJcswUP3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wvEZU5szEFPH1Kk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RIYiWmtEuOiH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "12HWiETWUYDEhQx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "156", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "g57T8niUOMtXP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "847", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CdEvthV28QdCM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "829", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cSGlBGBu3nkVa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "497", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vJUVZRMQsV48E" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rzxCsnLb6rX2LJn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LNU761zfPNXsFC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IKc5sES0GC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-b46a06f7d0c1", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 35, - "prompt_tokens": 988, - "total_tokens": 1023, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "VTbzbzijYTiy" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/ced781861a2e5ed75a37b9a2fc07a98f92a88d4c1ea550aa05011c5270184916.json b/tests/integration/responses/recordings/ced781861a2e5ed75a37b9a2fc07a98f92a88d4c1ea550aa05011c5270184916.json deleted file mode 100644 index 0a55bae94..000000000 --- a/tests/integration/responses/recordings/ced781861a2e5ed75a37b9a2fc07a98f92a88d4c1ea550aa05011c5270184916.json +++ /dev/null @@ -1,5319 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_or[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "Show me marketing and sales documents" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_23WRIrHGU36iECmGK5iHCAAJ", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\": \"marketing documents\"}" - } - }, - { - "index": 1, - "id": "call_WvfMjaowll1M9Wj78NvoaEPu", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\": \"sales documents\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_23WRIrHGU36iECmGK5iHCAAJ", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-66545595855, score: 0.002369960119027428, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-66545595855', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-66545595855|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-66545595857, score: 0.002158784645354334, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-66545595857', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-66545595857|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-66545595858, score: 0.0018325740484518083, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-66545595858', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-66545595858|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"marketing documents\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_WvfMjaowll1M9Wj78NvoaEPu", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-66545595855, score: 0.0020592709044779337, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-66545595855', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-66545595855|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-66545595858, score: 0.0017944759843167356, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-66545595858', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-66545595858|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n" - }, - { - "type": "text", - "text": "[3] document_id: file-66545595857, score: 0.0017688937698533602, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-66545595857', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-66545595857|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"sales documents\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XdGPQPiUvkZg18" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "Here", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "U9KXrqpqxByY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zzjdLInU1ji2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " some", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YUMsUNqAoph" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NKA2xD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zcuaUrNIGPxx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rvHZ39W1jM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "49bIx7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MaRroEokYst" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EZYbR3MbPTylNGW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fNadGsoG2TCHMYa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qrUHyqTlin60x" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "Marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RZCTYMR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sexSac" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rGlHaAwtZoMtgaJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "**\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PLGDUfMXXQJN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SN88vv2Eoc2dqu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " -", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pp5ksdi1vZspze" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KGnhyG6Spmx5U" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "M8fNxOP35v6R5d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cJJK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m8V9o2R" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "s", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BKMPxbHC37gY6gA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KJhwksBZ6ZzV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5tbSXG6g52reMx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q5UfaqXnt8o57UA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CJeKVrgMDbtvnMq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ge5jm9EMDGVet" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "C4vWOdzPSQl7r2O" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HJsmbNYuayORG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5mTvbRh40A7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Mh7qVXq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " discusses", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MSuTTx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "n1hU216CYRDj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oSaj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sEZa7q" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "it9aUykIAQWaP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0Iuc2wr6o6bR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tF34hJog5HIkz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kwIs93lNP7T1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nLrm8oO1C1e6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " first", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d6zxioYbLu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ejmt77V0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pUb5MOvONjEJF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "svGzq7vrZWY8xqX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qYaY20ptEv07M" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZrstyaAkaWva5mj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kcCDiDkEdyGz5iQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " noting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "98bqc1Pdo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DZ2XHlGvMNKd60" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qGilZOLkudHeoEF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6Fbpmma7YgV4ot" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HhblN6JjioYGroz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0Ls7w8e" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B8snkzwgxBBwa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O6xdOozH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KidRqNYLsnl0T" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RlNOGLUvSPgA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "f113WpE4N2R3s" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MdbBcERbV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VH2rPJfkX9C9Sv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "K0YoRj4VvNIBZbL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SeJxRpLk0XDn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HEDzG5kWNlYSZt8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "665", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2TH9KIksys2nq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "455", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QtvDnW8YhpOHC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "958", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3WKH3x18PIASG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "55", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SMgs02wVl7En1x" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CKKFKSVVyatjPiq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ">.\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "icPW125gWOzL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nVRDVIR18Mj0im" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " -", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1XRGjG4WKtAUKp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7oLVjXHoa4NiB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aiFIUayO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "x61Z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oIKPuZL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jJrv07yR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "svE9JZ1ohEnO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "u1Qjnc5voABuDh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RHeEOOYDQBNgqUu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QOlFXrw8Oky2lvV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DZ0GhvBYiHMz5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "M4YDju4MSAk5trc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jzU93BUlsMEdD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BYXaQ4r6L7D" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VVDUbCS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " summarizes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kaG7r" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EpQuVKWEIJXC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YQu5Fgmx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "osvH63jj14TX0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6JQz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hwRMG7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "749CQoHIqLvYs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Europe", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YndeezPy5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FTuQvLFg0PfY09a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " reporting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dj1I03" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hac6e4uMJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HPr0QHfE2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pPb5gSzaweV8f" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " EU", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pdVa7p1jdNHFD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CVzAb4qi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "30vqYgCRn8Pf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HwMbpv7eiiK2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " same", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DwMSqk8FPXX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " period", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5Ysl62z27" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8iCfjXS155k6td" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ooFuPXfLHoRnXZ8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2zXyLVeFTjXC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DhHIYZWpRM4yWIl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "665", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jrIhDBQy41thm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "455", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "P5a8wui5R8TH4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "958", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IX3QKXxAZ2ec5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "57", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KhfYlHu3G0j7ON" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uk8g4lNDirw0HAC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bPWTs0QZmXVzHxi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ".\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4cUijEPcGP7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UTWq2WucStv8GBz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rbfRadw8IxHjdDZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1rSJBtQSpPQZx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "Sales", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "C7McWLKOkvw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Documents", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SOSLyt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "piUsWn8EBb7NjCa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "**\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Kb8UQh5ooOOn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "owsU7AvtxRl3kl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " -", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ExGbBEj0ZZiEeO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LkhE90XcCt6QB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ro9yRuQMye1f" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Pacific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QJPNzWYh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CVZnpy4Y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Figures", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4LCiSbby" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NZBAX47EqHal" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k4wYrq2UJsc8FI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XnvMVcQXnZVg53T" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "G86LWlfiKXx9Ezf" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kGj2b6q4bKQjS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SOHSHJ5tpvjc3EN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ":**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JIxK4WXw9hNN7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2V0SGyXuBBb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LtTGZ5l" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " outlines", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Oef5Th7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xuVMjXg6INli" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gXoUX25I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " figures", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MhYIddql" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CojnKuUoJdEh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fndEVKE6cEET" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sTVeoEwpIaI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Pacific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QLkH7h9f" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AG87hmAQm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "49wW6MlCMn4mv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SWGxSzMrfUoK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " third", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tfqImYMumN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gcpKnfPS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "40j8DJWHcu06X" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VQIhIYUHOTkVVaq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "w2OFvAuBs6SkU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yFJPhxFj2p8NG43" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j7jM9iqpVCCQUwJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " marking", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uxjZvTlO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "07u6pYFZV57uUl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " record", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VSMUQ0Sry" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "-breaking", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4LTMtCq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " quarter", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jFHWxBdu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xvUWnFlQr9dYw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " Asia", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "a3BGrtj1ZNq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dPT7A9SLFuH4sc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TwAufdUHaGxnpXx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yY9mj10ACA9Z" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "HKlp5d8z1sCRnvo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "665", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xdwJpwbzCAAeZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "455", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "28DGC41ugGYBT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "958", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TQukAPywNWLCT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "58", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "S3fJdG0iCX5tYc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1sVu1FvGcBfoRQ8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NejYFG2Dhq9dell" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": ".\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DdjiLHp9ldg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "Feel", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6DNGC3GK7maK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " free", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lPDuTrpbpsY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jr8PFNUPcfeuU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " ask", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "b6kvXXT5e0cM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " if", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rxtEujuhrSKRE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XTLKkpLrB30T" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " need", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "c2s2qJpL62p" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " more", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sW13r2ijB7n" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " detailed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nGIlF9a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ibll" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " about", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qIy4kyp8Rg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " any", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oFiPqolmcN20" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7XspzkQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": " document", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9zcTaMM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": "!", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FvykFc3GIhoBbVo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "icraAZ8N3I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-ced781861a2e", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 190, - "prompt_tokens": 1132, - "total_tokens": 1322, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "hN0VoxNXyt" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/e30fbbfccc7f5b97dc6624fdfaeb656c32bdeef203f7d956848838090a8d97a0.json b/tests/integration/responses/recordings/e30fbbfccc7f5b97dc6624fdfaeb656c32bdeef203f7d956848838090a8d97a0.json deleted file mode 100644 index b177c4a54..000000000 --- a/tests/integration/responses/recordings/e30fbbfccc7f5b97dc6624fdfaeb656c32bdeef203f7d956848838090a8d97a0.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search_empty_vector_store[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_mj487Ks2sh1tl4Ic45p6c05I", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wqUy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "30F" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "o" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mD2xK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gekaZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I28xt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kvU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wb14MkADYXLgXVa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tz6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zjEyGJrVXsF0qr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mmH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YLwR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e30fbbfccc7f", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "lrOfy3Ovh9Ay9TZ" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/e858ad8875d3c11059189000e91b485ae0e322a9a0cdf8f9502002b09b72b2e6.json b/tests/integration/responses/recordings/e858ad8875d3c11059189000e91b485ae0e322a9a0cdf8f9502002b09b72b2e6.json deleted file mode 100644 index c1a0ac32d..000000000 --- a/tests/integration/responses/recordings/e858ad8875d3c11059189000e91b485ae0e322a9a0cdf8f9502002b09b72b2e6.json +++ /dev/null @@ -1,2360 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_region[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "What are the updates from the US region?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_e9va2TPHaw7wGHW4FVbhLLGF", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"latest news updates from the US\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_e9va2TPHaw7wGHW4FVbhLLGF", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-446066738450, score: 0.004746808510792406, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-446066738450', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-446066738450|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-446066738449, score: 0.0030296457418813576, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-446066738449', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-446066738449|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"latest news updates from the US\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aqhlT3eLRWQVwn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "j7LxLf9nkbMUsat" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " wasn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Yb3E2vjQp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " able", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BR6Ep6mHUWl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jzvUpyQg6t4KY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JME4a5nGFyY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "42PfnWBpmMEZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " most", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5sjtigBbKs8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " recent", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TKPDi6h5C" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " news", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xfcBAdtmVYd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " updates", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vyOA40EL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "WKvCRFY5i52" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "voRN63Cxay3h" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m5waqSvq8nrmC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bAIBogqDBYLQNJz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " However", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sImHMBhP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PAXzYayA6u8QaaF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rpK1HnedyTx1Bm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " found", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TSzSuVXOB2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " some", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oiHPqyf8rj1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zyzU" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LRkXcesu3kv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5CBXfrNr5m88x0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9PnCqc0rnxrXYq2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XFJq7Nmwtc9HsCL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "etKgbMagTzQEo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wruhogHDwJaUzJc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " about", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0RHggce0tm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " new", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UMdtawsVbdqd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " technical", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TXEIpE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " features", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "u1W8Yci" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " deployed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GR4xsQ9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QU7mH4VrUAVOd" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qjeREFEKK6nW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qMoAzMOaFxAqF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QhKyLz28W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V1fpkM29YgqJG6" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CWHl6w2xP93DUeE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "O3C1cO8u6FBB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ptqckvWLesqbhgF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "446", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p6811HKToTLjy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "066", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0Sa6Uo6Y5pn92" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "738", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7uw3kvgy6SwHO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "450", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ib0kPdAMq2DjA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lnD9KahYPqKmzmO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3GppRQgYKPKeHAe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9LnH5w7ldqMY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1GX8zk8CEPBra1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " note", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LPbytQBnCrp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ujy94Y5IDckl5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "soNT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "L2dXaP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "00yEtV5StjzAZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GN2wLSQU268YJi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OHt1oCwSLstZNsF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kgMyyevpdoWkOZP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZUPGxzCq0Smmb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BKtG6sQzGFt9Jlz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YdhDAdYKfXOYBik" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " which", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "69J0RaD351" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " led", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "P38reGXyb7Mm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VhkGg6hSFv2Yc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8oo41u3oe7dqRF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vVOeAUoIlL2Dr45" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5O1Y1KhyPD9FiH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wIRIrsBe79ovgA7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4f3i8JTv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "k7xvmJi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wCzB9BAnRxCNt" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "G1QnFuuZ1Zxh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RH76JhPNigfCV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9n7UQGypA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "7A2ePQsmZL6Unj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0sqmXYlUgeA5ad0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hNr6Yln8H2Da" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OC8EK7rWWZ3QeZk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "446", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZYFHFT47qvkRR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "066", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rn6vSev3yHACP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "738", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "v6G9elSOI51Vq" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "449", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2uVAEusItKumD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "5FK7OttxZAl0pGP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rz0blhyxkJsBC2" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eOmKAZr9Iz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-e858ad8875d3", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 82, - "prompt_tokens": 468, - "total_tokens": 550, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "zhqZ1oh8DC60I" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/f27d01b45fdcb22d0f17dfc1443b33687ed1de90043fb14ef5b0627301dc3afa.json b/tests/integration/responses/recordings/f27d01b45fdcb22d0f17dfc1443b33687ed1de90043fb14ef5b0627301dc3afa.json deleted file mode 100644 index 80d540be6..000000000 --- a/tests/integration/responses/recordings/f27d01b45fdcb22d0f17dfc1443b33687ed1de90043fb14ef5b0627301dc3afa.json +++ /dev/null @@ -1,1191 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search_empty_vector_store[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_mj487Ks2sh1tl4Ic45p6c05I", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_mj487Ks2sh1tl4Ic45p6c05I", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 0 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query.\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SRIlpdajZx8FO4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": "I", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0bprKCxqVUT1IzO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " couldn't", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gufTJos" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " find", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0AvzawzpiKm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "bUJDVvX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GrXx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NMOFb2ld93Xol" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "w15SXaCjAMFj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " number", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "27B3YvlcC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2HxNBEwC17WR3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GW7ttzUM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "D6d19PSXPXrBn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "mFt8405teF5j" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LwYKpJV7S9PiWW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SYXxmYtps79F" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lpI5leMBjGqY9pQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VbSAwY12HmP5brn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FYSKXqx8uk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "v9DS20Kn7CGoi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ja8yRMZINL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "40vCQeypW8n1Qlc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " You", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Zk30ZUxBclCm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " might", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "R4iAb4Bfph" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " need", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Gafvg4ZIPf7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jMexvTa4ndGz1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " refer", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LRMPjt7rIY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TrWcCKSieTc6E" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UlGsWusYETYB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " official", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "h00N110" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " documentation", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UinXMBmHRF56N" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " announcements", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4j" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r82bBftENEV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1CXANU3X2bkO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " developers", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "N2K2y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "C0Ed59GaOm2l" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " precise", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "T70MK9LI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": " details", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ar6pbTXi" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Qdz1YJaJbTR9nsh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MJhFNn9Hl7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f27d01b45fdc", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 39, - "prompt_tokens": 163, - "total_tokens": 202, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "U92EityXUiyFJ" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/f5ba8a29a9c1a20e23214598f57bf223828256fb59234fbd33d7c19e482ef854.json b/tests/integration/responses/recordings/f5ba8a29a9c1a20e23214598f57bf223828256fb59234fbd33d7c19e482ef854.json deleted file mode 100644 index d9d0e9fe2..000000000 --- a/tests/integration/responses/recordings/f5ba8a29a9c1a20e23214598f57bf223828256fb59234fbd33d7c19e482ef854.json +++ /dev/null @@ -1,2684 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_category[client_with_models-txt=openai/gpt-4o]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "Show me all marketing reports" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_ymvqIkud4SsWScbsp71WX2Dv", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"all marketing reports\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_ymvqIkud4SsWScbsp71WX2Dv", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-630021438843, score: 0.002382802518405855, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-630021438843', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-630021438843|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n" - }, - { - "type": "text", - "text": "[2] document_id: file-630021438845, score: 0.002299599142141726, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-630021438845', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-630021438845|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"all marketing reports\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4N3tp1S9tG28pA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "Here", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3200bmW3d5LS" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "icOnsMG8XjZY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BbiVCyfXUk68" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NCG4Zu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " reports", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fsNjMgKm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ":\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TQwaCzP6NLm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BvL4pkg1rAUsy2M" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IGtBzb0Od4HghWa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9liSZV5esGAaE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2wVlojWNs6TpdF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3wrOaa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fDxFfB5eX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gbK3KPw9fAFs" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "J2j0xqJtGPap7w" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zb3euNdyWlcjpXc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wWRXXTzFMTYXeP0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ilaeyx1BqbgZD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DxUsz43eJDNNhbj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Z0Y9820piNv40Y" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AE38Rd0KyZnMREA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cqYUOSzTWeob" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FPNh7M5iJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " highlights", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Qz0rY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " promotional", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "oy56" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " campaigns", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4ZUWpm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zvoUYGJN9SxEg" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PLAVerfDu4qV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " United", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "r9MEWdRX1" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " States", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9RZKWzftx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9PrQiraxny6ioez" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " noting", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "iP5zVVAvh" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DK61Nsnswx7ZAK" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LKey5lze3PBdBw8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "15", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "SFY6qLBHbhBoz5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "%", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wnZ9137DkdRtHnO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " increase", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2UxD8eW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "izLdQZY7PY913" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " revenue", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "95pa26qo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "li9rjxSnrTNXp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "t1StV9n5uZFD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " US", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "p9MdlLIU3HaH0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " region", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CIM9ziWZa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cgWeWoQHbU2fTu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aVK9Tjns7uHQh16" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fsVP8sKv0tR8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "L1jwZfUaSQpM4qM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "630", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Fy7YZIPEcZKm5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "021", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pdCoRFv2k8zpQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "438", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Y0hsHzSES4VUo" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "843", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "D5vFkTtBMBfHP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PRIQRkAhi0ZbGFT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ">", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BVIRRvUC0DJf8w4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ".\n\n", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YSlEJCMJNV9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "2", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "odTW1kHMpKzOCzR" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Fa8dy1nw3lQ2VGx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " **", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IN5shTxuimZj0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "European", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "0j0VwXeA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Marketing", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "weUOZe" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BHWp7GwgD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " for", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CXAY9zsXV9fV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Q", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nT76pMsMcg2e6r" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "1", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3fwuaSCwvpZwcnL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "fxjItuhdH7LNciW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "202", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PiRaydhUnVTlm" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "3", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "P8sjxeap5reKf1d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "**", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pnQWynGG5fjcs5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ":", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "I0udDGwQurbphXj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " This", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qcE4ND7CQKV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " report", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "RhvpSlXJb" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " focuses", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "porIjgLV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "R8B0GNlG7RMnD" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " advertising", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qGCn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " campaign", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4mzlLa9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " results", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ffQEjjrk" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UAHd3HQLxRjvz" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " Europe", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FjrIOJBji" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "B7fyyvXVxvNY2so" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " indicating", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "X4ES5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " strong", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jHeWXatus" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " growth", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dwGuvRNb4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "kBNLi3RoTmt7W" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " EU", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gH61e6JTi9jfM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " markets", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OR0WZkgp" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JcDso3jKuaMHzB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lyhNXzIAlnTHB82" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IrFupLnQwQf8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "apaTzs0DuUoMiUL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "630", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "y0HYANL4WSuYW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "021", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "MVJhM1B1VFq25" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "438", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8xPNVYlBjkKZC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "845", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2XVvctfn3uPYV" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "we6kObliWDle0WB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "2KOaYFLi1Q3RkY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "zBjTjqxhW0" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f5ba8a29a9c1", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 94, - "prompt_tokens": 456, - "total_tokens": 550, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "mqEOVo9hWj09s" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/f80d49a32ea559ef5f0020301475b04deeb96c97931d3413adf0a4e5216a42d4.json b/tests/integration/responses/recordings/f80d49a32ea559ef5f0020301475b04deeb96c97931d3413adf0a4e5216a42d4.json deleted file mode 100644 index 53dd33aca..000000000 --- a/tests/integration/responses/recordings/f80d49a32ea559ef5f0020301475b04deeb96c97931d3413adf0a4e5216a42d4.json +++ /dev/null @@ -1,660 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search_empty_vector_store[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_ltsd3q9G7fq4by5VmgdvtNRX", - "function": { - "arguments": "", - "name": "knowledge_search" - }, - "type": "function" - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sdM3" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "{\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "m73" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "query", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\":\"", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "J" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "L", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VHl6C" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "lama", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "cL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " ", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "nSM0I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "4", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rq64c" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " Maver", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "ick", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "q5M" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " model", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " number", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "tzlHgv0gPZ2Y7be" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " of", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "8LG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": " experts", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "UbWx20CRkD2j80" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": [ - { - "index": 0, - "id": null, - "function": { - "arguments": "\"}", - "name": null - }, - "type": null - } - ] - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "CRw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "tool_calls", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "jPOO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f80d49a32ea5", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 24, - "prompt_tokens": 74, - "total_tokens": 98, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "I4x1yzasaOQSFKk" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/recordings/f970cb793e3a68879444e3b218cfbe37829f7ed7ca4706ff0cbb6113f9a3253b.json b/tests/integration/responses/recordings/f970cb793e3a68879444e3b218cfbe37829f7ed7ca4706ff0cbb6113f9a3253b.json deleted file mode 100644 index b3881f6ff..000000000 --- a/tests/integration/responses/recordings/f970cb793e3a68879444e3b218cfbe37829f7ed7ca4706ff0cbb6113f9a3253b.json +++ /dev/null @@ -1,2239 +0,0 @@ -{ - "test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]", - "request": { - "method": "POST", - "url": "https://api.openai.com/v1/v1/chat/completions", - "headers": {}, - "body": { - "model": "gpt-4o", - "messages": [ - { - "role": "user", - "content": "How many experts does the Llama 4 Maverick model have?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_FzhOmTdZThRndI5rSASPdAqr", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_FzhOmTdZThRndI5rSASPdAqr", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-797509666839, score: 0.01927231682811354, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-797509666839|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - }, - { - "role": "assistant", - "content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-797509666839|>." - }, - { - "role": "user", - "content": "Can you tell me more about the architecture?" - }, - { - "role": "assistant", - "content": "", - "tool_calls": [ - { - "index": 0, - "id": "call_0ABjXmEaManGBvCYVZD4QgMt", - "type": "function", - "function": { - "name": "knowledge_search", - "arguments": "{\"query\":\"Llama 4 Maverick model architecture details\"}" - } - } - ] - }, - { - "role": "tool", - "tool_call_id": "call_0ABjXmEaManGBvCYVZD4QgMt", - "content": [ - { - "type": "text", - "text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "[1] document_id: file-797509666839, score: 0.005821830799489026, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-797509666839|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n" - }, - { - "type": "text", - "text": "END of knowledge_search tool results.\n" - }, - { - "type": "text", - "text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model architecture details\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n" - } - ] - } - ], - "stream": true, - "stream_options": { - "include_usage": true - }, - "tools": [ - { - "type": "function", - "function": { - "name": "knowledge_search", - "description": "Search for information in a database.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query to search for. Can be a natural language sentence or keywords." - } - }, - "required": [ - "query" - ] - } - } - } - ] - }, - "endpoint": "/v1/chat/completions", - "model": "gpt-4o" - }, - "response": { - "body": [ - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rsd7d3rp5kbEVl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "The", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ODJOwvLFURckN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " search", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xZb015AGx" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " did", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3JGWBbu6DPDN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " not", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wckpIDwdjVDO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " return", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ldxxVIrQG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " detailed", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "QKiep2I" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " information", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PN7l" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " on", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Ur07TqLu832VA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "sPmbNMurjXRy" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " L", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "aPlzubhDMGESKu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "lama", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "GKINsm8cApiJ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1oce53N1sG0TvTv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "4", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "apwsDYdU0Sa7ecI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " Maver", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lTQzNiGeSA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "ick", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "9k98JY73pqRJ7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ugrRMI4fEF" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "'s", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6QvjTKjAVkdg5a" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Vzc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " beyond", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vNcwB3Qed" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "VPWpdr5Fl3Ip" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " fact", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xazWtEOj5Kj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " that", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XADhfcr8VVN" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " it", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "vCKOntIT1zH0d" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " includes", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6Dtr2fH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " ", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "TLAxs44DOUGC7Os" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "128", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FWII1RcbMI53t" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NFr4G1k5" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Pk903dzZmEQ8E" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " its", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "eoSzZZOvuh6g" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " mixture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "4alUaiBM" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "OX8jc0x591gH7" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " experts", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "BSBTyRnI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " design", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "6hDOBVMq4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " <", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "LnwqjhP9VT9Ooc" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Sio7mRGTVjjrluP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "file", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "F1432MVyFi6k" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "-", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uZnDp5RxdElbmFG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "797", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DkioNrKVpHgm4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "509", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "pNTgUVRUx1zmI" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "666", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "IuAWvaapdXENa" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "839", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "lPzK8ydEBn7KZ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "|", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "i7j8Lcp3w6fiwfB" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": ">.", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XOa8gEi2dmWmwv" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " If", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "H3SJqnjZ7Pqla" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "JcW6AJ5otYLu" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " have", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "3zHp4CcYvt4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " specific", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gbNW68h" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " aspects", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "d3l4elcj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "wm2dCDI1w9YZT" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xksFX6yWVCKr" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " architecture", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "EBl" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "uHyFNawwHG4S" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " are", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "XWndZ3Vs6Fl4" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " interested", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "KryRC" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "PsbGeC1p8q1qW" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "R0ewazqkDYFcutH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " such", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DunAPSZH5pH" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " as", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "rPK25ptTTBfmO" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1rpL0ku9AVho" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " model", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "Q4tHIRqic8" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "'s", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xnAtIpnDFBpzrn" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " layers", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "dobI7KtDw" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "qRPkAtKOKDpghrA" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " computational", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ZQ" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " pathway", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "AVY3TggP" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "V4ADAwStu1UFXpj" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "xtIJsJ9g6yDTG" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " unique", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "FqoXIcjuE" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " features", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "NBtG144" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "ktFPoLRC0Zopjl9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " please", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "1FQhYXLuY" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " let", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "DdsYWqEjWxdX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " me", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "gEWWKiK0fl8iX" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": " know", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "hpfipWYz4jL" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": "!", - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "YavpvIVxZjJr6x9" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [ - { - "delta": { - "content": null, - "function_call": null, - "refusal": null, - "role": null, - "tool_calls": null - }, - "finish_reason": "stop", - "index": 0, - "logprobs": null - } - ], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": null, - "obfuscation": "yfTg1otZak" - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "rec-f970cb793e3a", - "choices": [], - "created": 0, - "model": "gpt-4o-2024-08-06", - "object": "chat.completion.chunk", - "service_tier": "default", - "system_fingerprint": "fp_f64f290af2", - "usage": { - "completion_tokens": 76, - "prompt_tokens": 633, - "total_tokens": 709, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0 - }, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0 - } - }, - "obfuscation": "CCFg1IMN4ffFL" - } - } - ], - "is_streaming": true - }, - "id_normalization_mapping": {} -} diff --git a/tests/integration/responses/test_tool_responses.py b/tests/integration/responses/test_tool_responses.py index 3f1c35214..2cff4d27d 100644 --- a/tests/integration/responses/test_tool_responses.py +++ b/tests/integration/responses/test_tool_responses.py @@ -5,7 +5,6 @@ # the root directory of this source tree. import json -import logging # allow-direct-logging import os import httpx @@ -199,7 +198,7 @@ def test_response_sequential_file_search( @pytest.mark.parametrize("case", mcp_tool_test_cases) -def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, caplog): +def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case): if not isinstance(compat_client, LlamaStackAsLibraryClient): pytest.skip("in-process MCP server is only supported in library client") @@ -246,17 +245,13 @@ def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, cap if isinstance(compat_client, LlamaStackAsLibraryClient) else (httpx.HTTPStatusError, openai.AuthenticationError) ) - # Suppress expected auth error logs only for the failing auth attempt - with caplog.at_level( - logging.CRITICAL, logger="llama_stack.providers.inline.agents.meta_reference.responses.streaming" - ): - with pytest.raises(exc_type): - compat_client.responses.create( - model=text_model_id, - input=case.input, - tools=tools, - stream=False, - ) + with pytest.raises(exc_type): + compat_client.responses.create( + model=text_model_id, + input=case.input, + tools=tools, + stream=False, + ) for tool in tools: if tool["type"] == "mcp": diff --git a/tests/integration/telemetry/conftest.py b/tests/integration/telemetry/conftest.py index b055e47ac..d11f00c9f 100644 --- a/tests/integration/telemetry/conftest.py +++ b/tests/integration/telemetry/conftest.py @@ -23,7 +23,7 @@ from opentelemetry.sdk.trace import ReadableSpan, TracerProvider from opentelemetry.sdk.trace.export import SimpleSpanProcessor from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter -import llama_stack.core.telemetry.telemetry as telemetry_module +import llama_stack.providers.inline.telemetry.meta_reference.telemetry as telemetry_module from llama_stack.testing.api_recorder import patch_httpx_for_test_id from tests.integration.fixtures.common import instantiate_llama_stack_client diff --git a/tests/integration/vector_io/test_vector_io.py b/tests/integration/vector_io/test_vector_io.py index 1f67ddb24..e5ca7a0db 100644 --- a/tests/integration/vector_io/test_vector_io.py +++ b/tests/integration/vector_io/test_vector_io.py @@ -49,50 +49,46 @@ def client_with_empty_registry(client_with_models): @vector_provider_wrapper -def test_vector_store_retrieve( - client_with_empty_registry, embedding_model_id, embedding_dimension, vector_io_provider_id -): - vector_store_name = "test_vector_store" +def test_vector_db_retrieve(client_with_empty_registry, embedding_model_id, embedding_dimension, vector_io_provider_id): + vector_db_name = "test_vector_db" create_response = client_with_empty_registry.vector_stores.create( - name=vector_store_name, + name=vector_db_name, extra_body={ "provider_id": vector_io_provider_id, }, ) - actual_vector_store_id = create_response.id + actual_vector_db_id = create_response.id # Retrieve the vector store and validate its properties - response = client_with_empty_registry.vector_stores.retrieve(vector_store_id=actual_vector_store_id) + response = client_with_empty_registry.vector_stores.retrieve(vector_store_id=actual_vector_db_id) assert response is not None - assert response.id == actual_vector_store_id - assert response.name == vector_store_name + assert response.id == actual_vector_db_id + assert response.name == vector_db_name assert response.id.startswith("vs_") @vector_provider_wrapper -def test_vector_store_register( - client_with_empty_registry, embedding_model_id, embedding_dimension, vector_io_provider_id -): - vector_store_name = "test_vector_store" +def test_vector_db_register(client_with_empty_registry, embedding_model_id, embedding_dimension, vector_io_provider_id): + vector_db_name = "test_vector_db" response = client_with_empty_registry.vector_stores.create( - name=vector_store_name, + name=vector_db_name, extra_body={ "provider_id": vector_io_provider_id, }, ) - actual_vector_store_id = response.id - assert actual_vector_store_id.startswith("vs_") - assert actual_vector_store_id != vector_store_name + actual_vector_db_id = response.id + assert actual_vector_db_id.startswith("vs_") + assert actual_vector_db_id != vector_db_name vector_stores = client_with_empty_registry.vector_stores.list() assert len(vector_stores.data) == 1 vector_store = vector_stores.data[0] - assert vector_store.id == actual_vector_store_id - assert vector_store.name == vector_store_name + assert vector_store.id == actual_vector_db_id + assert vector_store.name == vector_db_name - client_with_empty_registry.vector_stores.delete(vector_store_id=actual_vector_store_id) + client_with_empty_registry.vector_stores.delete(vector_store_id=actual_vector_db_id) vector_stores = client_with_empty_registry.vector_stores.list() assert len(vector_stores.data) == 0 @@ -112,23 +108,23 @@ def test_vector_store_register( def test_insert_chunks( client_with_empty_registry, embedding_model_id, embedding_dimension, sample_chunks, test_case, vector_io_provider_id ): - vector_store_name = "test_vector_store" + vector_db_name = "test_vector_db" create_response = client_with_empty_registry.vector_stores.create( - name=vector_store_name, + name=vector_db_name, extra_body={ "provider_id": vector_io_provider_id, }, ) - actual_vector_store_id = create_response.id + actual_vector_db_id = create_response.id client_with_empty_registry.vector_io.insert( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, chunks=sample_chunks, ) response = client_with_empty_registry.vector_io.query( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, query="What is the capital of France?", ) assert response is not None @@ -137,7 +133,7 @@ def test_insert_chunks( query, expected_doc_id = test_case response = client_with_empty_registry.vector_io.query( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, query=query, ) assert response is not None @@ -155,15 +151,15 @@ def test_insert_chunks_with_precomputed_embeddings( "inline::qdrant": {"score_threshold": -1.0}, "remote::qdrant": {"score_threshold": -1.0}, } - vector_store_name = "test_precomputed_embeddings_db" + vector_db_name = "test_precomputed_embeddings_db" register_response = client_with_empty_registry.vector_stores.create( - name=vector_store_name, + name=vector_db_name, extra_body={ "provider_id": vector_io_provider_id, }, ) - actual_vector_store_id = register_response.id + actual_vector_db_id = register_response.id chunks_with_embeddings = [ Chunk( @@ -174,13 +170,13 @@ def test_insert_chunks_with_precomputed_embeddings( ] client_with_empty_registry.vector_io.insert( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, chunks=chunks_with_embeddings, ) provider = [p.provider_id for p in client_with_empty_registry.providers.list() if p.api == "vector_io"][0] response = client_with_empty_registry.vector_io.query( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, query="precomputed embedding test", params=vector_io_provider_params_dict.get(provider, None), ) @@ -204,16 +200,16 @@ def test_query_returns_valid_object_when_identical_to_embedding_in_vdb( "remote::qdrant": {"score_threshold": 0.0}, "inline::qdrant": {"score_threshold": 0.0}, } - vector_store_name = "test_precomputed_embeddings_db" + vector_db_name = "test_precomputed_embeddings_db" register_response = client_with_empty_registry.vector_stores.create( - name=vector_store_name, + name=vector_db_name, extra_body={ "embedding_model": embedding_model_id, "provider_id": vector_io_provider_id, }, ) - actual_vector_store_id = register_response.id + actual_vector_db_id = register_response.id chunks_with_embeddings = [ Chunk( @@ -224,13 +220,13 @@ def test_query_returns_valid_object_when_identical_to_embedding_in_vdb( ] client_with_empty_registry.vector_io.insert( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, chunks=chunks_with_embeddings, ) provider = [p.provider_id for p in client_with_empty_registry.providers.list() if p.api == "vector_io"][0] response = client_with_empty_registry.vector_io.query( - vector_db_id=actual_vector_store_id, + vector_db_id=actual_vector_db_id, query="duplicate", params=vector_io_provider_params_dict.get(provider, None), ) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 893cc4a7d..1ae96d448 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -4,12 +4,9 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -import logging # allow-direct-logging import os import warnings -import pytest - def pytest_sessionstart(session) -> None: if "LLAMA_STACK_LOGGING" not in os.environ: @@ -20,10 +17,4 @@ def pytest_sessionstart(session) -> None: warnings.filterwarnings("ignore", category=PendingDeprecationWarning) -@pytest.fixture(autouse=True) -def suppress_httpx_logs(caplog): - """Suppress httpx INFO logs for all unit tests""" - caplog.set_level(logging.WARNING, logger="httpx") - - pytest_plugins = ["tests.unit.fixtures"] diff --git a/tests/unit/conversations/test_conversations.py b/tests/unit/conversations/test_conversations.py index 3f0175831..ff6dd243d 100644 --- a/tests/unit/conversations/test_conversations.py +++ b/tests/unit/conversations/test_conversations.py @@ -82,7 +82,7 @@ async def test_conversation_items(service): assert len(item_list.data) == 1 assert item_list.data[0].id == "msg_test123" - items = await service.list_items(conversation.id) + items = await service.list(conversation.id) assert len(items.data) == 1 @@ -120,7 +120,7 @@ async def test_openai_type_compatibility(service): assert hasattr(item_list, attr) assert item_list.object == "list" - items = await service.list_items(conversation.id) + items = await service.list(conversation.id) item = await service.retrieve(conversation.id, items.data[0].id) item_dict = item.model_dump() diff --git a/tests/unit/core/routers/test_safety_router.py b/tests/unit/core/routers/test_safety_router.py deleted file mode 100644 index bf195ff33..000000000 --- a/tests/unit/core/routers/test_safety_router.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -from unittest.mock import AsyncMock - -from llama_stack.apis.safety.safety import ModerationObject, ModerationObjectResults -from llama_stack.apis.shields import ListShieldsResponse, Shield -from llama_stack.core.datatypes import SafetyConfig -from llama_stack.core.routers.safety import SafetyRouter - - -async def test_run_moderation_uses_default_shield_when_model_missing(): - routing_table = AsyncMock() - shield = Shield( - identifier="shield-1", - provider_resource_id="provider/shield-model", - provider_id="provider-id", - params={}, - ) - routing_table.list_shields.return_value = ListShieldsResponse(data=[shield]) - - moderation_response = ModerationObject( - id="mid", - model="shield-1", - results=[ModerationObjectResults(flagged=False)], - ) - provider = AsyncMock() - provider.run_moderation.return_value = moderation_response - routing_table.get_provider_impl.return_value = provider - - router = SafetyRouter(routing_table=routing_table, safety_config=SafetyConfig(default_shield_id="shield-1")) - - result = await router.run_moderation("hello world") - - assert result is moderation_response - routing_table.get_provider_impl.assert_awaited_once_with("shield-1") - provider.run_moderation.assert_awaited_once() - _, kwargs = provider.run_moderation.call_args - assert kwargs["model"] == "provider/shield-model" - assert kwargs["input"] == "hello world" diff --git a/tests/unit/core/routers/test_vector_io.py b/tests/unit/core/routers/test_vector_io.py index dd3246cb3..997df0d78 100644 --- a/tests/unit/core/routers/test_vector_io.py +++ b/tests/unit/core/routers/test_vector_io.py @@ -21,7 +21,7 @@ async def test_single_provider_auto_selection(): Mock(identifier="all-MiniLM-L6-v2", model_type="embedding", metadata={"embedding_dimension": 384}) ] ) - mock_routing_table.register_vector_store = AsyncMock( + mock_routing_table.register_vector_db = AsyncMock( return_value=Mock(identifier="vs_123", provider_id="inline::faiss", provider_resource_id="vs_123") ) mock_routing_table.get_provider_impl = AsyncMock( diff --git a/tests/unit/core/test_stack_validation.py b/tests/unit/core/test_stack_validation.py index d28803006..fa5348d1c 100644 --- a/tests/unit/core/test_stack_validation.py +++ b/tests/unit/core/test_stack_validation.py @@ -11,9 +11,8 @@ from unittest.mock import AsyncMock import pytest from llama_stack.apis.models import ListModelsResponse, Model, ModelType -from llama_stack.apis.shields import ListShieldsResponse, Shield -from llama_stack.core.datatypes import QualifiedModel, SafetyConfig, StackRunConfig, StorageConfig, VectorStoresConfig -from llama_stack.core.stack import validate_safety_config, validate_vector_stores_config +from llama_stack.core.datatypes import QualifiedModel, StackRunConfig, StorageConfig, VectorStoresConfig +from llama_stack.core.stack import validate_vector_stores_config from llama_stack.providers.datatypes import Api @@ -66,37 +65,3 @@ class TestVectorStoresValidation: ) await validate_vector_stores_config(run_config.vector_stores, {Api.models: mock_models}) - - -class TestSafetyConfigValidation: - async def test_validate_success(self): - safety_config = SafetyConfig(default_shield_id="shield-1") - - shield = Shield( - identifier="shield-1", - provider_id="provider-x", - provider_resource_id="model-x", - params={}, - ) - - shields_impl = AsyncMock() - shields_impl.list_shields.return_value = ListShieldsResponse(data=[shield]) - - await validate_safety_config(safety_config, {Api.shields: shields_impl, Api.safety: AsyncMock()}) - - async def test_validate_wrong_shield_id(self): - safety_config = SafetyConfig(default_shield_id="wrong-shield-id") - - shields_impl = AsyncMock() - shields_impl.list_shields.return_value = ListShieldsResponse( - data=[ - Shield( - identifier="shield-1", - provider_resource_id="model-x", - provider_id="provider-x", - params={}, - ) - ] - ) - with pytest.raises(ValueError, match="wrong-shield-id"): - await validate_safety_config(safety_config, {Api.shields: shields_impl, Api.safety: AsyncMock()}) diff --git a/tests/unit/distribution/test_build_path.py b/tests/unit/distribution/test_build_path.py new file mode 100644 index 000000000..52a71286b --- /dev/null +++ b/tests/unit/distribution/test_build_path.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from pathlib import Path + +from llama_stack.cli.stack._build import ( + _run_stack_build_command_from_build_config, +) +from llama_stack.core.datatypes import BuildConfig, DistributionSpec +from llama_stack.core.utils.image_types import LlamaStackImageType + + +def test_container_build_passes_path(monkeypatch, tmp_path): + called_with = {} + + def spy_build_image(build_config, image_name, distro_or_config, run_config=None): + called_with["path"] = distro_or_config + called_with["run_config"] = run_config + return 0 + + monkeypatch.setattr( + "llama_stack.cli.stack._build.build_image", + spy_build_image, + raising=True, + ) + + cfg = BuildConfig( + image_type=LlamaStackImageType.CONTAINER.value, + distribution_spec=DistributionSpec(providers={}, description=""), + ) + + _run_stack_build_command_from_build_config(cfg, image_name="dummy") + + assert "path" in called_with + assert isinstance(called_with["path"], str) + assert Path(called_with["path"]).exists() + assert called_with["run_config"] is None diff --git a/tests/unit/distribution/test_distribution.py b/tests/unit/distribution/test_distribution.py index 4161d7b84..3b0643a13 100644 --- a/tests/unit/distribution/test_distribution.py +++ b/tests/unit/distribution/test_distribution.py @@ -196,6 +196,8 @@ class TestProviderRegistry: assert internal_api not in apis, f"Internal API {internal_api} should not be in providable_apis" for api in apis: + if api == Api.telemetry: + continue module_name = f"llama_stack.providers.registry.{api.name.lower()}" try: importlib.import_module(module_name) diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses_conversations.py b/tests/unit/providers/agents/meta_reference/test_openai_responses_conversations.py index c2c113c1b..2ca350862 100644 --- a/tests/unit/providers/agents/meta_reference/test_openai_responses_conversations.py +++ b/tests/unit/providers/agents/meta_reference/test_openai_responses_conversations.py @@ -62,7 +62,7 @@ class TestConversationValidation: conv_id = "conv_nonexistent" # Mock conversation not found - mock_conversations_api.list_items.side_effect = ConversationNotFoundError("conv_nonexistent") + mock_conversations_api.list.side_effect = ConversationNotFoundError("conv_nonexistent") with pytest.raises(ConversationNotFoundError): await responses_impl_with_conversations.create_openai_response( @@ -160,7 +160,7 @@ class TestIntegrationWorkflow: self, responses_impl_with_conversations, mock_conversations_api ): """Test creating a response with a valid conversation parameter.""" - mock_conversations_api.list_items.return_value = ConversationItemList( + mock_conversations_api.list.return_value = ConversationItemList( data=[], first_id=None, has_more=False, last_id=None, object="list" ) @@ -227,7 +227,7 @@ class TestIntegrationWorkflow: self, responses_impl_with_conversations, mock_conversations_api ): """Test creating a response with a non-existent conversation.""" - mock_conversations_api.list_items.side_effect = ConversationNotFoundError("conv_nonexistent") + mock_conversations_api.list.side_effect = ConversationNotFoundError("conv_nonexistent") with pytest.raises(ConversationNotFoundError) as exc_info: await responses_impl_with_conversations.create_openai_response( diff --git a/tests/unit/providers/utils/inference/test_openai_mixin.py b/tests/unit/providers/utils/inference/test_openai_mixin.py index d98c096aa..61a1f8f61 100644 --- a/tests/unit/providers/utils/inference/test_openai_mixin.py +++ b/tests/unit/providers/utils/inference/test_openai_mixin.py @@ -38,28 +38,6 @@ class OpenAIMixinWithEmbeddingsImpl(OpenAIMixinImpl): } -class OpenAIMixinWithCustomModelConstruction(OpenAIMixinImpl): - """Test implementation that uses construct_model_from_identifier to add rerank models""" - - embedding_model_metadata: dict[str, dict[str, int]] = { - "text-embedding-3-small": {"embedding_dimension": 1536, "context_length": 8192}, - "text-embedding-ada-002": {"embedding_dimension": 1536, "context_length": 8192}, - } - - # Adds rerank models via construct_model_from_identifier - rerank_model_ids: set[str] = {"rerank-model-1", "rerank-model-2"} - - def construct_model_from_identifier(self, identifier: str) -> Model: - if identifier in self.rerank_model_ids: - return Model( - provider_id=self.__provider_id__, # type: ignore[attr-defined] - provider_resource_id=identifier, - identifier=identifier, - model_type=ModelType.rerank, - ) - return super().construct_model_from_identifier(identifier) - - @pytest.fixture def mixin(): """Create a test instance of OpenAIMixin with mocked model_store""" @@ -84,13 +62,6 @@ def mixin_with_embeddings(): return OpenAIMixinWithEmbeddingsImpl(config=config) -@pytest.fixture -def mixin_with_custom_model_construction(): - """Create a test instance using custom construct_model_from_identifier""" - config = RemoteInferenceProviderConfig() - return OpenAIMixinWithCustomModelConstruction(config=config) - - @pytest.fixture def mock_models(): """Create multiple mock OpenAI model objects""" @@ -142,19 +113,6 @@ def mock_client_context(): return _mock_client_context -def _assert_models_match_expected(actual_models, expected_models): - """Verify the models match expected attributes. - - Args: - actual_models: List of models to verify - expected_models: Mapping of model identifier to expected attribute values - """ - for identifier, expected_attrs in expected_models.items(): - model = next(m for m in actual_models if m.identifier == identifier) - for attr_name, expected_value in expected_attrs.items(): - assert getattr(model, attr_name) == expected_value - - class TestOpenAIMixinListModels: """Test cases for the list_models method""" @@ -384,71 +342,21 @@ class TestOpenAIMixinEmbeddingModelMetadata: assert result is not None assert len(result) == 2 - expected_models = { - "text-embedding-3-small": { - "model_type": ModelType.embedding, - "metadata": {"embedding_dimension": 1536, "context_length": 8192}, - "provider_id": "test-provider", - "provider_resource_id": "text-embedding-3-small", - }, - "gpt-4": { - "model_type": ModelType.llm, - "metadata": {}, - "provider_id": "test-provider", - "provider_resource_id": "gpt-4", - }, - } + # Find the models in the result + embedding_model = next(m for m in result if m.identifier == "text-embedding-3-small") + llm_model = next(m for m in result if m.identifier == "gpt-4") - _assert_models_match_expected(result, expected_models) + # Check embedding model + assert embedding_model.model_type == ModelType.embedding + assert embedding_model.metadata == {"embedding_dimension": 1536, "context_length": 8192} + assert embedding_model.provider_id == "test-provider" + assert embedding_model.provider_resource_id == "text-embedding-3-small" - -class TestOpenAIMixinCustomModelConstruction: - """Test cases for mixed model types (LLM, embedding, rerank) through construct_model_from_identifier""" - - async def test_mixed_model_types_identification(self, mixin_with_custom_model_construction, mock_client_context): - """Test that LLM, embedding, and rerank models are correctly identified with proper types and metadata""" - # Create mock models: 1 embedding, 1 rerank, 1 LLM - mock_embedding_model = MagicMock(id="text-embedding-3-small") - mock_rerank_model = MagicMock(id="rerank-model-1") - mock_llm_model = MagicMock(id="gpt-4") - mock_models = [mock_embedding_model, mock_rerank_model, mock_llm_model] - - mock_client = MagicMock() - - async def mock_models_list(): - for model in mock_models: - yield model - - mock_client.models.list.return_value = mock_models_list() - - with mock_client_context(mixin_with_custom_model_construction, mock_client): - result = await mixin_with_custom_model_construction.list_models() - - assert result is not None - assert len(result) == 3 - - expected_models = { - "text-embedding-3-small": { - "model_type": ModelType.embedding, - "metadata": {"embedding_dimension": 1536, "context_length": 8192}, - "provider_id": "test-provider", - "provider_resource_id": "text-embedding-3-small", - }, - "rerank-model-1": { - "model_type": ModelType.rerank, - "metadata": {}, - "provider_id": "test-provider", - "provider_resource_id": "rerank-model-1", - }, - "gpt-4": { - "model_type": ModelType.llm, - "metadata": {}, - "provider_id": "test-provider", - "provider_resource_id": "gpt-4", - }, - } - - _assert_models_match_expected(result, expected_models) + # Check LLM model + assert llm_model.model_type == ModelType.llm + assert llm_model.metadata == {} # No metadata for LLMs + assert llm_model.provider_id == "test-provider" + assert llm_model.provider_resource_id == "gpt-4" class TestOpenAIMixinAllowedModels: diff --git a/tests/unit/providers/vector_io/conftest.py b/tests/unit/providers/vector_io/conftest.py index 2951ca2e5..c78596018 100644 --- a/tests/unit/providers/vector_io/conftest.py +++ b/tests/unit/providers/vector_io/conftest.py @@ -10,8 +10,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import numpy as np import pytest +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, ChunkMetadata, QueryChunksResponse -from llama_stack.apis.vector_stores import VectorStore from llama_stack.core.storage.datatypes import KVStoreReference, SqliteKVStoreConfig from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig from llama_stack.providers.inline.vector_io.faiss.faiss import FaissIndex, FaissVectorIOAdapter @@ -31,7 +31,7 @@ def vector_provider(request): @pytest.fixture -def vector_store_id() -> str: +def vector_db_id() -> str: return f"test-vector-db-{random.randint(1, 100)}" @@ -149,8 +149,8 @@ async def sqlite_vec_adapter(sqlite_vec_db_path, unique_kvstore_config, mock_inf ) collection_id = f"sqlite_test_collection_{np.random.randint(1e6)}" await adapter.initialize() - await adapter.register_vector_store( - VectorStore( + await adapter.register_vector_db( + VectorDB( identifier=collection_id, provider_id="test_provider", embedding_model="test_model", @@ -186,8 +186,8 @@ async def faiss_vec_adapter(unique_kvstore_config, mock_inference_api, embedding files_api=None, ) await adapter.initialize() - await adapter.register_vector_store( - VectorStore( + await adapter.register_vector_db( + VectorDB( identifier=f"faiss_test_collection_{np.random.randint(1e6)}", provider_id="test_provider", embedding_model="test_model", @@ -215,7 +215,7 @@ def mock_psycopg2_connection(): async def pgvector_vec_index(embedding_dimension, mock_psycopg2_connection): connection, cursor = mock_psycopg2_connection - vector_store = VectorStore( + vector_db = VectorDB( identifier="test-vector-db", embedding_model="test-model", embedding_dimension=embedding_dimension, @@ -225,7 +225,7 @@ async def pgvector_vec_index(embedding_dimension, mock_psycopg2_connection): with patch("llama_stack.providers.remote.vector_io.pgvector.pgvector.psycopg2"): with patch("llama_stack.providers.remote.vector_io.pgvector.pgvector.execute_values"): - index = PGVectorIndex(vector_store, embedding_dimension, connection, distance_metric="COSINE") + index = PGVectorIndex(vector_db, embedding_dimension, connection, distance_metric="COSINE") index._test_chunks = [] original_add_chunks = index.add_chunks @@ -281,30 +281,30 @@ async def pgvector_vec_adapter(unique_kvstore_config, mock_inference_api, embedd await adapter.initialize() adapter.conn = mock_conn - async def mock_insert_chunks(vector_store_id, chunks, ttl_seconds=None): - index = await adapter._get_and_cache_vector_store_index(vector_store_id) + async def mock_insert_chunks(vector_db_id, chunks, ttl_seconds=None): + index = await adapter._get_and_cache_vector_db_index(vector_db_id) if not index: - raise ValueError(f"Vector DB {vector_store_id} not found") + raise ValueError(f"Vector DB {vector_db_id} not found") await index.insert_chunks(chunks) adapter.insert_chunks = mock_insert_chunks - async def mock_query_chunks(vector_store_id, query, params=None): - index = await adapter._get_and_cache_vector_store_index(vector_store_id) + async def mock_query_chunks(vector_db_id, query, params=None): + index = await adapter._get_and_cache_vector_db_index(vector_db_id) if not index: - raise ValueError(f"Vector DB {vector_store_id} not found") + raise ValueError(f"Vector DB {vector_db_id} not found") return await index.query_chunks(query, params) adapter.query_chunks = mock_query_chunks - test_vector_store = VectorStore( + test_vector_db = VectorDB( identifier=f"pgvector_test_collection_{random.randint(1, 1_000_000)}", provider_id="test_provider", embedding_model="test_model", embedding_dimension=embedding_dimension, ) - await adapter.register_vector_store(test_vector_store) - adapter.test_collection_id = test_vector_store.identifier + await adapter.register_vector_db(test_vector_db) + adapter.test_collection_id = test_vector_db.identifier yield adapter await adapter.shutdown() diff --git a/tests/unit/providers/vector_io/test_faiss.py b/tests/unit/providers/vector_io/test_faiss.py index 7b870d16e..fa5c5f56b 100644 --- a/tests/unit/providers/vector_io/test_faiss.py +++ b/tests/unit/providers/vector_io/test_faiss.py @@ -11,8 +11,8 @@ import numpy as np import pytest from llama_stack.apis.files import Files +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import Chunk, QueryChunksResponse -from llama_stack.apis.vector_stores import VectorStore from llama_stack.providers.datatypes import HealthStatus from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig from llama_stack.providers.inline.vector_io.faiss.faiss import ( @@ -43,8 +43,8 @@ def embedding_dimension(): @pytest.fixture -def vector_store_id(): - return "test_vector_store" +def vector_db_id(): + return "test_vector_db" @pytest.fixture @@ -61,12 +61,12 @@ def sample_embeddings(embedding_dimension): @pytest.fixture -def mock_vector_store(vector_store_id, embedding_dimension) -> MagicMock: - mock_vector_store = MagicMock(spec=VectorStore) - mock_vector_store.embedding_model = "mock_embedding_model" - mock_vector_store.identifier = vector_store_id - mock_vector_store.embedding_dimension = embedding_dimension - return mock_vector_store +def mock_vector_db(vector_db_id, embedding_dimension) -> MagicMock: + mock_vector_db = MagicMock(spec=VectorDB) + mock_vector_db.embedding_model = "mock_embedding_model" + mock_vector_db.identifier = vector_db_id + mock_vector_db.embedding_dimension = embedding_dimension + return mock_vector_db @pytest.fixture diff --git a/tests/unit/providers/vector_io/test_vector_io_openai_vector_stores.py b/tests/unit/providers/vector_io/test_vector_io_openai_vector_stores.py index 9d9c767f6..ad55b9336 100644 --- a/tests/unit/providers/vector_io/test_vector_io_openai_vector_stores.py +++ b/tests/unit/providers/vector_io/test_vector_io_openai_vector_stores.py @@ -12,6 +12,7 @@ import numpy as np import pytest from llama_stack.apis.common.errors import VectorStoreNotFoundError +from llama_stack.apis.vector_dbs import VectorDB from llama_stack.apis.vector_io import ( Chunk, OpenAICreateVectorStoreFileBatchRequestWithExtraBody, @@ -20,7 +21,6 @@ from llama_stack.apis.vector_io import ( VectorStoreChunkingStrategyAuto, VectorStoreFileObject, ) -from llama_stack.apis.vector_stores import VectorStore from llama_stack.providers.inline.vector_io.sqlite_vec.sqlite_vec import VECTOR_DBS_PREFIX # This test is a unit test for the inline VectorIO providers. This should only contain @@ -71,7 +71,7 @@ async def test_chunk_id_conflict(vector_index, sample_chunks, embedding_dimensio async def test_initialize_adapter_with_existing_kvstore(vector_io_adapter): key = f"{VECTOR_DBS_PREFIX}db1" - dummy = VectorStore( + dummy = VectorDB( identifier="foo_db", provider_id="test_provider", embedding_model="test_model", embedding_dimension=128 ) await vector_io_adapter.kvstore.set(key=key, value=json.dumps(dummy.model_dump())) @@ -81,10 +81,10 @@ async def test_initialize_adapter_with_existing_kvstore(vector_io_adapter): async def test_persistence_across_adapter_restarts(vector_io_adapter): await vector_io_adapter.initialize() - dummy = VectorStore( + dummy = VectorDB( identifier="foo_db", provider_id="test_provider", embedding_model="test_model", embedding_dimension=128 ) - await vector_io_adapter.register_vector_store(dummy) + await vector_io_adapter.register_vector_db(dummy) await vector_io_adapter.shutdown() await vector_io_adapter.initialize() @@ -92,15 +92,15 @@ async def test_persistence_across_adapter_restarts(vector_io_adapter): await vector_io_adapter.shutdown() -async def test_register_and_unregister_vector_store(vector_io_adapter): +async def test_register_and_unregister_vector_db(vector_io_adapter): unique_id = f"foo_db_{np.random.randint(1e6)}" - dummy = VectorStore( + dummy = VectorDB( identifier=unique_id, provider_id="test_provider", embedding_model="test_model", embedding_dimension=128 ) - await vector_io_adapter.register_vector_store(dummy) + await vector_io_adapter.register_vector_db(dummy) assert dummy.identifier in vector_io_adapter.cache - await vector_io_adapter.unregister_vector_store(dummy.identifier) + await vector_io_adapter.unregister_vector_db(dummy.identifier) assert dummy.identifier not in vector_io_adapter.cache @@ -121,7 +121,7 @@ async def test_insert_chunks_calls_underlying_index(vector_io_adapter): async def test_insert_chunks_missing_db_raises(vector_io_adapter): - vector_io_adapter._get_and_cache_vector_store_index = AsyncMock(return_value=None) + vector_io_adapter._get_and_cache_vector_db_index = AsyncMock(return_value=None) with pytest.raises(ValueError): await vector_io_adapter.insert_chunks("db_not_exist", []) @@ -170,7 +170,7 @@ async def test_query_chunks_calls_underlying_index_and_returns(vector_io_adapter async def test_query_chunks_missing_db_raises(vector_io_adapter): - vector_io_adapter._get_and_cache_vector_store_index = AsyncMock(return_value=None) + vector_io_adapter._get_and_cache_vector_db_index = AsyncMock(return_value=None) with pytest.raises(ValueError): await vector_io_adapter.query_chunks("db_missing", "q", None) @@ -182,7 +182,7 @@ async def test_save_openai_vector_store(vector_io_adapter): "id": store_id, "name": "Test Store", "description": "A test OpenAI vector store", - "vector_store_id": "test_db", + "vector_db_id": "test_db", "embedding_model": "test_model", } @@ -198,7 +198,7 @@ async def test_update_openai_vector_store(vector_io_adapter): "id": store_id, "name": "Test Store", "description": "A test OpenAI vector store", - "vector_store_id": "test_db", + "vector_db_id": "test_db", "embedding_model": "test_model", } @@ -214,7 +214,7 @@ async def test_delete_openai_vector_store(vector_io_adapter): "id": store_id, "name": "Test Store", "description": "A test OpenAI vector store", - "vector_store_id": "test_db", + "vector_db_id": "test_db", "embedding_model": "test_model", } @@ -229,7 +229,7 @@ async def test_load_openai_vector_stores(vector_io_adapter): "id": store_id, "name": "Test Store", "description": "A test OpenAI vector store", - "vector_store_id": "test_db", + "vector_db_id": "test_db", "embedding_model": "test_model", } @@ -998,8 +998,8 @@ async def test_max_concurrent_files_per_batch(vector_io_adapter): async def test_embedding_config_from_metadata(vector_io_adapter): """Test that embedding configuration is correctly extracted from metadata.""" - # Mock register_vector_store to avoid actual registration - vector_io_adapter.register_vector_store = AsyncMock() + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() # Set provider_id attribute for the adapter vector_io_adapter.__provider_id__ = "test_provider" @@ -1015,9 +1015,9 @@ async def test_embedding_config_from_metadata(vector_io_adapter): await vector_io_adapter.openai_create_vector_store(params) - # Verify VectorStore was registered with correct embedding config from metadata - vector_io_adapter.register_vector_store.assert_called_once() - call_args = vector_io_adapter.register_vector_store.call_args[0][0] + # Verify VectorDB was registered with correct embedding config from metadata + vector_io_adapter.register_vector_db.assert_called_once() + call_args = vector_io_adapter.register_vector_db.call_args[0][0] assert call_args.embedding_model == "test-embedding-model" assert call_args.embedding_dimension == 512 @@ -1025,8 +1025,8 @@ async def test_embedding_config_from_metadata(vector_io_adapter): async def test_embedding_config_from_extra_body(vector_io_adapter): """Test that embedding configuration is correctly extracted from extra_body when metadata is empty.""" - # Mock register_vector_store to avoid actual registration - vector_io_adapter.register_vector_store = AsyncMock() + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() # Set provider_id attribute for the adapter vector_io_adapter.__provider_id__ = "test_provider" @@ -1042,9 +1042,9 @@ async def test_embedding_config_from_extra_body(vector_io_adapter): await vector_io_adapter.openai_create_vector_store(params) - # Verify VectorStore was registered with correct embedding config from extra_body - vector_io_adapter.register_vector_store.assert_called_once() - call_args = vector_io_adapter.register_vector_store.call_args[0][0] + # Verify VectorDB was registered with correct embedding config from extra_body + vector_io_adapter.register_vector_db.assert_called_once() + call_args = vector_io_adapter.register_vector_db.call_args[0][0] assert call_args.embedding_model == "extra-body-model" assert call_args.embedding_dimension == 1024 @@ -1052,8 +1052,8 @@ async def test_embedding_config_from_extra_body(vector_io_adapter): async def test_embedding_config_consistency_check_passes(vector_io_adapter): """Test that consistent embedding config in both metadata and extra_body passes validation.""" - # Mock register_vector_store to avoid actual registration - vector_io_adapter.register_vector_store = AsyncMock() + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() # Set provider_id attribute for the adapter vector_io_adapter.__provider_id__ = "test_provider" @@ -1073,17 +1073,61 @@ async def test_embedding_config_consistency_check_passes(vector_io_adapter): await vector_io_adapter.openai_create_vector_store(params) # Should not raise any error and use metadata config - vector_io_adapter.register_vector_store.assert_called_once() - call_args = vector_io_adapter.register_vector_store.call_args[0][0] + vector_io_adapter.register_vector_db.assert_called_once() + call_args = vector_io_adapter.register_vector_db.call_args[0][0] assert call_args.embedding_model == "consistent-model" assert call_args.embedding_dimension == 768 +async def test_embedding_config_inconsistency_errors(vector_io_adapter): + """Test that inconsistent embedding config between metadata and extra_body raises errors.""" + + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() + # Set provider_id attribute for the adapter + vector_io_adapter.__provider_id__ = "test_provider" + + # Test with inconsistent embedding model + params = OpenAICreateVectorStoreRequestWithExtraBody( + name="test_store", + metadata={ + "embedding_model": "metadata-model", + "embedding_dimension": "768", + }, + **{ + "embedding_model": "extra-body-model", + "embedding_dimension": 768, + }, + ) + + with pytest.raises(ValueError, match="Embedding model inconsistent between metadata"): + await vector_io_adapter.openai_create_vector_store(params) + + # Reset mock for second test + vector_io_adapter.register_vector_db.reset_mock() + + # Test with inconsistent embedding dimension + params = OpenAICreateVectorStoreRequestWithExtraBody( + name="test_store", + metadata={ + "embedding_model": "same-model", + "embedding_dimension": "512", + }, + **{ + "embedding_model": "same-model", + "embedding_dimension": 1024, + }, + ) + + with pytest.raises(ValueError, match="Embedding dimension inconsistent between metadata"): + await vector_io_adapter.openai_create_vector_store(params) + + async def test_embedding_config_defaults_when_missing(vector_io_adapter): """Test that embedding dimension defaults to 768 when not provided.""" - # Mock register_vector_store to avoid actual registration - vector_io_adapter.register_vector_store = AsyncMock() + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() # Set provider_id attribute for the adapter vector_io_adapter.__provider_id__ = "test_provider" @@ -1099,8 +1143,8 @@ async def test_embedding_config_defaults_when_missing(vector_io_adapter): await vector_io_adapter.openai_create_vector_store(params) # Should default to 768 dimensions - vector_io_adapter.register_vector_store.assert_called_once() - call_args = vector_io_adapter.register_vector_store.call_args[0][0] + vector_io_adapter.register_vector_db.assert_called_once() + call_args = vector_io_adapter.register_vector_db.call_args[0][0] assert call_args.embedding_model == "model-without-dimension" assert call_args.embedding_dimension == 768 @@ -1108,8 +1152,8 @@ async def test_embedding_config_defaults_when_missing(vector_io_adapter): async def test_embedding_config_required_model_missing(vector_io_adapter): """Test that missing embedding model raises error.""" - # Mock register_vector_store to avoid actual registration - vector_io_adapter.register_vector_store = AsyncMock() + # Mock register_vector_db to avoid actual registration + vector_io_adapter.register_vector_db = AsyncMock() # Set provider_id attribute for the adapter vector_io_adapter.__provider_id__ = "test_provider" # Mock the default model lookup to return None (no default model available) diff --git a/tests/unit/rag/test_rag_query.py b/tests/unit/rag/test_rag_query.py index c012bc4f0..a45b66f02 100644 --- a/tests/unit/rag/test_rag_query.py +++ b/tests/unit/rag/test_rag_query.py @@ -18,7 +18,7 @@ from llama_stack.providers.inline.tool_runtime.rag.memory import MemoryToolRunti class TestRagQuery: - async def test_query_raises_on_empty_vector_store_ids(self): + async def test_query_raises_on_empty_vector_db_ids(self): rag_tool = MemoryToolRuntimeImpl( config=MagicMock(), vector_io_api=MagicMock(), inference_api=MagicMock(), files_api=MagicMock() ) @@ -82,7 +82,7 @@ class TestRagQuery: with pytest.raises(ValueError): RAGQueryConfig(mode="wrong_mode") - async def test_query_adds_vector_store_id_to_chunk_metadata(self): + async def test_query_adds_vector_db_id_to_chunk_metadata(self): rag_tool = MemoryToolRuntimeImpl( config=MagicMock(), vector_io_api=MagicMock(), diff --git a/tests/unit/rag/test_vector_store.py b/tests/unit/rag/test_vector_store.py index 200da5c26..1e40c98e8 100644 --- a/tests/unit/rag/test_vector_store.py +++ b/tests/unit/rag/test_vector_store.py @@ -21,7 +21,7 @@ from llama_stack.apis.tools import RAGDocument from llama_stack.apis.vector_io import Chunk from llama_stack.providers.utils.memory.vector_store import ( URL, - VectorStoreWithIndex, + VectorDBWithIndex, _validate_embedding, content_from_doc, make_overlapped_chunks, @@ -206,15 +206,15 @@ class TestVectorStore: assert str(excinfo.value.__cause__) == "Cannot convert to string" -class TestVectorStoreWithIndex: +class TestVectorDBWithIndex: async def test_insert_chunks_without_embeddings(self): - mock_vector_store = MagicMock() - mock_vector_store.embedding_model = "test-model without embeddings" + mock_vector_db = MagicMock() + mock_vector_db.embedding_model = "test-model without embeddings" mock_index = AsyncMock() mock_inference_api = AsyncMock() - vector_store_with_index = VectorStoreWithIndex( - vector_store=mock_vector_store, index=mock_index, inference_api=mock_inference_api + vector_db_with_index = VectorDBWithIndex( + vector_db=mock_vector_db, index=mock_index, inference_api=mock_inference_api ) chunks = [ @@ -227,7 +227,7 @@ class TestVectorStoreWithIndex: OpenAIEmbeddingData(embedding=[0.4, 0.5, 0.6], index=1), ] - await vector_store_with_index.insert_chunks(chunks) + await vector_db_with_index.insert_chunks(chunks) # Verify openai_embeddings was called with correct params mock_inference_api.openai_embeddings.assert_called_once() @@ -243,14 +243,14 @@ class TestVectorStoreWithIndex: assert np.array_equal(args[1], np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]], dtype=np.float32)) async def test_insert_chunks_with_valid_embeddings(self): - mock_vector_store = MagicMock() - mock_vector_store.embedding_model = "test-model with embeddings" - mock_vector_store.embedding_dimension = 3 + mock_vector_db = MagicMock() + mock_vector_db.embedding_model = "test-model with embeddings" + mock_vector_db.embedding_dimension = 3 mock_index = AsyncMock() mock_inference_api = AsyncMock() - vector_store_with_index = VectorStoreWithIndex( - vector_store=mock_vector_store, index=mock_index, inference_api=mock_inference_api + vector_db_with_index = VectorDBWithIndex( + vector_db=mock_vector_db, index=mock_index, inference_api=mock_inference_api ) chunks = [ @@ -258,7 +258,7 @@ class TestVectorStoreWithIndex: Chunk(content="Test 2", embedding=[0.4, 0.5, 0.6], metadata={}), ] - await vector_store_with_index.insert_chunks(chunks) + await vector_db_with_index.insert_chunks(chunks) mock_inference_api.openai_embeddings.assert_not_called() mock_index.add_chunks.assert_called_once() @@ -267,14 +267,14 @@ class TestVectorStoreWithIndex: assert np.array_equal(args[1], np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]], dtype=np.float32)) async def test_insert_chunks_with_invalid_embeddings(self): - mock_vector_store = MagicMock() - mock_vector_store.embedding_dimension = 3 - mock_vector_store.embedding_model = "test-model with invalid embeddings" + mock_vector_db = MagicMock() + mock_vector_db.embedding_dimension = 3 + mock_vector_db.embedding_model = "test-model with invalid embeddings" mock_index = AsyncMock() mock_inference_api = AsyncMock() - vector_store_with_index = VectorStoreWithIndex( - vector_store=mock_vector_store, index=mock_index, inference_api=mock_inference_api + vector_db_with_index = VectorDBWithIndex( + vector_db=mock_vector_db, index=mock_index, inference_api=mock_inference_api ) # Verify Chunk raises ValueError for invalid embedding type @@ -283,7 +283,7 @@ class TestVectorStoreWithIndex: # Verify Chunk raises ValueError for invalid embedding type in insert_chunks (i.e., Chunk errors before insert_chunks is called) with pytest.raises(ValueError, match="Input should be a valid list"): - await vector_store_with_index.insert_chunks( + await vector_db_with_index.insert_chunks( [ Chunk(content="Test 1", embedding=None, metadata={}), Chunk(content="Test 2", embedding="invalid_type", metadata={}), @@ -292,7 +292,7 @@ class TestVectorStoreWithIndex: # Verify Chunk raises ValueError for invalid embedding element type in insert_chunks (i.e., Chunk errors before insert_chunks is called) with pytest.raises(ValueError, match=" Input should be a valid number, unable to parse string as a number "): - await vector_store_with_index.insert_chunks( + await vector_db_with_index.insert_chunks( Chunk(content="Test 1", embedding=[0.1, "string", 0.3], metadata={}) ) @@ -300,20 +300,20 @@ class TestVectorStoreWithIndex: Chunk(content="Test 1", embedding=[0.1, 0.2, 0.3, 0.4], metadata={}), ] with pytest.raises(ValueError, match="has dimension 4, expected 3"): - await vector_store_with_index.insert_chunks(chunks_wrong_dim) + await vector_db_with_index.insert_chunks(chunks_wrong_dim) mock_inference_api.openai_embeddings.assert_not_called() mock_index.add_chunks.assert_not_called() async def test_insert_chunks_with_partially_precomputed_embeddings(self): - mock_vector_store = MagicMock() - mock_vector_store.embedding_model = "test-model with partial embeddings" - mock_vector_store.embedding_dimension = 3 + mock_vector_db = MagicMock() + mock_vector_db.embedding_model = "test-model with partial embeddings" + mock_vector_db.embedding_dimension = 3 mock_index = AsyncMock() mock_inference_api = AsyncMock() - vector_store_with_index = VectorStoreWithIndex( - vector_store=mock_vector_store, index=mock_index, inference_api=mock_inference_api + vector_db_with_index = VectorDBWithIndex( + vector_db=mock_vector_db, index=mock_index, inference_api=mock_inference_api ) chunks = [ @@ -327,7 +327,7 @@ class TestVectorStoreWithIndex: OpenAIEmbeddingData(embedding=[0.3, 0.3, 0.3], index=1), ] - await vector_store_with_index.insert_chunks(chunks) + await vector_db_with_index.insert_chunks(chunks) # Verify openai_embeddings was called with correct params mock_inference_api.openai_embeddings.assert_called_once() diff --git a/tests/unit/registry/test_registry.py b/tests/unit/registry/test_registry.py index d4c9786d1..95022ad33 100644 --- a/tests/unit/registry/test_registry.py +++ b/tests/unit/registry/test_registry.py @@ -8,8 +8,8 @@ import pytest from llama_stack.apis.inference import Model -from llama_stack.apis.vector_stores import VectorStore -from llama_stack.core.datatypes import VectorStoreWithOwner +from llama_stack.apis.vector_dbs import VectorDB +from llama_stack.core.datatypes import VectorDBWithOwner from llama_stack.core.storage.datatypes import KVStoreReference, SqliteKVStoreConfig from llama_stack.core.store.registry import ( KEY_FORMAT, @@ -20,12 +20,12 @@ from llama_stack.providers.utils.kvstore import kvstore_impl, register_kvstore_b @pytest.fixture -def sample_vector_store(): - return VectorStore( - identifier="test_vector_store", +def sample_vector_db(): + return VectorDB( + identifier="test_vector_db", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, - provider_resource_id="test_vector_store", + provider_resource_id="test_vector_db", provider_id="test-provider", ) @@ -45,17 +45,17 @@ async def test_registry_initialization(disk_dist_registry): assert result is None -async def test_basic_registration(disk_dist_registry, sample_vector_store, sample_model): - print(f"Registering {sample_vector_store}") - await disk_dist_registry.register(sample_vector_store) +async def test_basic_registration(disk_dist_registry, sample_vector_db, sample_model): + print(f"Registering {sample_vector_db}") + await disk_dist_registry.register(sample_vector_db) print(f"Registering {sample_model}") await disk_dist_registry.register(sample_model) - print("Getting vector_store") - result_vector_store = await disk_dist_registry.get("vector_store", "test_vector_store") - assert result_vector_store is not None - assert result_vector_store.identifier == sample_vector_store.identifier - assert result_vector_store.embedding_model == sample_vector_store.embedding_model - assert result_vector_store.provider_id == sample_vector_store.provider_id + print("Getting vector_db") + result_vector_db = await disk_dist_registry.get("vector_db", "test_vector_db") + assert result_vector_db is not None + assert result_vector_db.identifier == sample_vector_db.identifier + assert result_vector_db.embedding_model == sample_vector_db.embedding_model + assert result_vector_db.provider_id == sample_vector_db.provider_id result_model = await disk_dist_registry.get("model", "test_model") assert result_model is not None @@ -63,11 +63,11 @@ async def test_basic_registration(disk_dist_registry, sample_vector_store, sampl assert result_model.provider_id == sample_model.provider_id -async def test_cached_registry_initialization(sqlite_kvstore, sample_vector_store, sample_model): +async def test_cached_registry_initialization(sqlite_kvstore, sample_vector_db, sample_model): # First populate the disk registry disk_registry = DiskDistributionRegistry(sqlite_kvstore) await disk_registry.initialize() - await disk_registry.register(sample_vector_store) + await disk_registry.register(sample_vector_db) await disk_registry.register(sample_model) # Test cached version loads from disk @@ -79,29 +79,29 @@ async def test_cached_registry_initialization(sqlite_kvstore, sample_vector_stor ) await cached_registry.initialize() - result_vector_store = await cached_registry.get("vector_store", "test_vector_store") - assert result_vector_store is not None - assert result_vector_store.identifier == sample_vector_store.identifier - assert result_vector_store.embedding_model == sample_vector_store.embedding_model - assert result_vector_store.embedding_dimension == sample_vector_store.embedding_dimension - assert result_vector_store.provider_id == sample_vector_store.provider_id + result_vector_db = await cached_registry.get("vector_db", "test_vector_db") + assert result_vector_db is not None + assert result_vector_db.identifier == sample_vector_db.identifier + assert result_vector_db.embedding_model == sample_vector_db.embedding_model + assert result_vector_db.embedding_dimension == sample_vector_db.embedding_dimension + assert result_vector_db.provider_id == sample_vector_db.provider_id async def test_cached_registry_updates(cached_disk_dist_registry): - new_vector_store = VectorStore( - identifier="test_vector_store_2", + new_vector_db = VectorDB( + identifier="test_vector_db_2", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, - provider_resource_id="test_vector_store_2", + provider_resource_id="test_vector_db_2", provider_id="baz", ) - await cached_disk_dist_registry.register(new_vector_store) + await cached_disk_dist_registry.register(new_vector_db) # Verify in cache - result_vector_store = await cached_disk_dist_registry.get("vector_store", "test_vector_store_2") - assert result_vector_store is not None - assert result_vector_store.identifier == new_vector_store.identifier - assert result_vector_store.provider_id == new_vector_store.provider_id + result_vector_db = await cached_disk_dist_registry.get("vector_db", "test_vector_db_2") + assert result_vector_db is not None + assert result_vector_db.identifier == new_vector_db.identifier + assert result_vector_db.provider_id == new_vector_db.provider_id # Verify persisted to disk db_path = cached_disk_dist_registry.kvstore.db_path @@ -111,89 +111,87 @@ async def test_cached_registry_updates(cached_disk_dist_registry): await kvstore_impl(KVStoreReference(backend=backend_name, namespace="registry")) ) await new_registry.initialize() - result_vector_store = await new_registry.get("vector_store", "test_vector_store_2") - assert result_vector_store is not None - assert result_vector_store.identifier == new_vector_store.identifier - assert result_vector_store.provider_id == new_vector_store.provider_id + result_vector_db = await new_registry.get("vector_db", "test_vector_db_2") + assert result_vector_db is not None + assert result_vector_db.identifier == new_vector_db.identifier + assert result_vector_db.provider_id == new_vector_db.provider_id async def test_duplicate_provider_registration(cached_disk_dist_registry): - original_vector_store = VectorStore( - identifier="test_vector_store_2", + original_vector_db = VectorDB( + identifier="test_vector_db_2", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, - provider_resource_id="test_vector_store_2", + provider_resource_id="test_vector_db_2", provider_id="baz", ) - assert await cached_disk_dist_registry.register(original_vector_store) + assert await cached_disk_dist_registry.register(original_vector_db) - duplicate_vector_store = VectorStore( - identifier="test_vector_store_2", + duplicate_vector_db = VectorDB( + identifier="test_vector_db_2", embedding_model="different-model", embedding_dimension=768, - provider_resource_id="test_vector_store_2", + provider_resource_id="test_vector_db_2", provider_id="baz", # Same provider_id ) - with pytest.raises( - ValueError, match="Object of type 'vector_store' and identifier 'test_vector_store_2' already exists" - ): - await cached_disk_dist_registry.register(duplicate_vector_store) + with pytest.raises(ValueError, match="Object of type 'vector_db' and identifier 'test_vector_db_2' already exists"): + await cached_disk_dist_registry.register(duplicate_vector_db) - result = await cached_disk_dist_registry.get("vector_store", "test_vector_store_2") + result = await cached_disk_dist_registry.get("vector_db", "test_vector_db_2") assert result is not None - assert result.embedding_model == original_vector_store.embedding_model # Original values preserved + assert result.embedding_model == original_vector_db.embedding_model # Original values preserved async def test_get_all_objects(cached_disk_dist_registry): # Create multiple test banks # Create multiple test banks - test_vector_stores = [ - VectorStore( - identifier=f"test_vector_store_{i}", + test_vector_dbs = [ + VectorDB( + identifier=f"test_vector_db_{i}", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, - provider_resource_id=f"test_vector_store_{i}", + provider_resource_id=f"test_vector_db_{i}", provider_id=f"provider_{i}", ) for i in range(3) ] - # Register all vector_stores - for vector_store in test_vector_stores: - await cached_disk_dist_registry.register(vector_store) + # Register all vector_dbs + for vector_db in test_vector_dbs: + await cached_disk_dist_registry.register(vector_db) # Test get_all retrieval all_results = await cached_disk_dist_registry.get_all() assert len(all_results) == 3 - # Verify each vector_store was stored correctly - for original_vector_store in test_vector_stores: - matching_vector_stores = [v for v in all_results if v.identifier == original_vector_store.identifier] - assert len(matching_vector_stores) == 1 - stored_vector_store = matching_vector_stores[0] - assert stored_vector_store.embedding_model == original_vector_store.embedding_model - assert stored_vector_store.provider_id == original_vector_store.provider_id - assert stored_vector_store.embedding_dimension == original_vector_store.embedding_dimension + # Verify each vector_db was stored correctly + for original_vector_db in test_vector_dbs: + matching_vector_dbs = [v for v in all_results if v.identifier == original_vector_db.identifier] + assert len(matching_vector_dbs) == 1 + stored_vector_db = matching_vector_dbs[0] + assert stored_vector_db.embedding_model == original_vector_db.embedding_model + assert stored_vector_db.provider_id == original_vector_db.provider_id + assert stored_vector_db.embedding_dimension == original_vector_db.embedding_dimension async def test_parse_registry_values_error_handling(sqlite_kvstore): - valid_db = VectorStore( - identifier="valid_vector_store", + valid_db = VectorDB( + identifier="valid_vector_db", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, - provider_resource_id="valid_vector_store", + provider_resource_id="valid_vector_db", provider_id="test-provider", ) await sqlite_kvstore.set( - KEY_FORMAT.format(type="vector_store", identifier="valid_vector_store"), valid_db.model_dump_json() + KEY_FORMAT.format(type="vector_db", identifier="valid_vector_db"), valid_db.model_dump_json() ) - await sqlite_kvstore.set(KEY_FORMAT.format(type="vector_store", identifier="corrupted_json"), "{not valid json") + await sqlite_kvstore.set(KEY_FORMAT.format(type="vector_db", identifier="corrupted_json"), "{not valid json") await sqlite_kvstore.set( - KEY_FORMAT.format(type="vector_store", identifier="missing_fields"), - '{"type": "vector_store", "identifier": "missing_fields"}', + KEY_FORMAT.format(type="vector_db", identifier="missing_fields"), + '{"type": "vector_db", "identifier": "missing_fields"}', ) test_registry = DiskDistributionRegistry(sqlite_kvstore) @@ -204,18 +202,18 @@ async def test_parse_registry_values_error_handling(sqlite_kvstore): # Should have filtered out the invalid entries assert len(all_objects) == 1 - assert all_objects[0].identifier == "valid_vector_store" + assert all_objects[0].identifier == "valid_vector_db" # Check that the get method also handles errors correctly - invalid_obj = await test_registry.get("vector_store", "corrupted_json") + invalid_obj = await test_registry.get("vector_db", "corrupted_json") assert invalid_obj is None - invalid_obj = await test_registry.get("vector_store", "missing_fields") + invalid_obj = await test_registry.get("vector_db", "missing_fields") assert invalid_obj is None async def test_cached_registry_error_handling(sqlite_kvstore): - valid_db = VectorStore( + valid_db = VectorDB( identifier="valid_cached_db", embedding_model="nomic-embed-text-v1.5", embedding_dimension=768, @@ -224,12 +222,12 @@ async def test_cached_registry_error_handling(sqlite_kvstore): ) await sqlite_kvstore.set( - KEY_FORMAT.format(type="vector_store", identifier="valid_cached_db"), valid_db.model_dump_json() + KEY_FORMAT.format(type="vector_db", identifier="valid_cached_db"), valid_db.model_dump_json() ) await sqlite_kvstore.set( - KEY_FORMAT.format(type="vector_store", identifier="invalid_cached_db"), - '{"type": "vector_store", "identifier": "invalid_cached_db", "embedding_model": 12345}', # Should be string + KEY_FORMAT.format(type="vector_db", identifier="invalid_cached_db"), + '{"type": "vector_db", "identifier": "invalid_cached_db", "embedding_model": 12345}', # Should be string ) cached_registry = CachedDiskDistributionRegistry(sqlite_kvstore) @@ -239,65 +237,63 @@ async def test_cached_registry_error_handling(sqlite_kvstore): assert len(all_objects) == 1 assert all_objects[0].identifier == "valid_cached_db" - invalid_obj = await cached_registry.get("vector_store", "invalid_cached_db") + invalid_obj = await cached_registry.get("vector_db", "invalid_cached_db") assert invalid_obj is None async def test_double_registration_identical_objects(disk_dist_registry): """Test that registering identical objects succeeds (idempotent).""" - vector_store = VectorStoreWithOwner( - identifier="test_vector_store", + vector_db = VectorDBWithOwner( + identifier="test_vector_db", embedding_model="all-MiniLM-L6-v2", embedding_dimension=384, - provider_resource_id="test_vector_store", + provider_resource_id="test_vector_db", provider_id="test-provider", ) # First registration should succeed - result1 = await disk_dist_registry.register(vector_store) + result1 = await disk_dist_registry.register(vector_db) assert result1 is True # Second registration of identical object should also succeed (idempotent) - result2 = await disk_dist_registry.register(vector_store) + result2 = await disk_dist_registry.register(vector_db) assert result2 is True # Verify object exists and is unchanged - retrieved = await disk_dist_registry.get("vector_store", "test_vector_store") + retrieved = await disk_dist_registry.get("vector_db", "test_vector_db") assert retrieved is not None - assert retrieved.identifier == vector_store.identifier - assert retrieved.embedding_model == vector_store.embedding_model + assert retrieved.identifier == vector_db.identifier + assert retrieved.embedding_model == vector_db.embedding_model async def test_double_registration_different_objects(disk_dist_registry): """Test that registering different objects with same identifier fails.""" - vector_store1 = VectorStoreWithOwner( - identifier="test_vector_store", + vector_db1 = VectorDBWithOwner( + identifier="test_vector_db", embedding_model="all-MiniLM-L6-v2", embedding_dimension=384, - provider_resource_id="test_vector_store", + provider_resource_id="test_vector_db", provider_id="test-provider", ) - vector_store2 = VectorStoreWithOwner( - identifier="test_vector_store", # Same identifier + vector_db2 = VectorDBWithOwner( + identifier="test_vector_db", # Same identifier embedding_model="different-model", # Different embedding model embedding_dimension=384, - provider_resource_id="test_vector_store", + provider_resource_id="test_vector_db", provider_id="test-provider", ) # First registration should succeed - result1 = await disk_dist_registry.register(vector_store1) + result1 = await disk_dist_registry.register(vector_db1) assert result1 is True # Second registration with different data should fail - with pytest.raises( - ValueError, match="Object of type 'vector_store' and identifier 'test_vector_store' already exists" - ): - await disk_dist_registry.register(vector_store2) + with pytest.raises(ValueError, match="Object of type 'vector_db' and identifier 'test_vector_db' already exists"): + await disk_dist_registry.register(vector_db2) # Verify original object is unchanged - retrieved = await disk_dist_registry.get("vector_store", "test_vector_store") + retrieved = await disk_dist_registry.get("vector_db", "test_vector_db") assert retrieved is not None assert retrieved.embedding_model == "all-MiniLM-L6-v2" # Original value diff --git a/tests/unit/server/test_auth.py b/tests/unit/server/test_auth.py index cc9397f07..75cbf518b 100644 --- a/tests/unit/server/test_auth.py +++ b/tests/unit/server/test_auth.py @@ -6,7 +6,6 @@ import base64 import json -import logging # allow-direct-logging from unittest.mock import AsyncMock, Mock, patch import pytest @@ -28,13 +27,6 @@ from llama_stack.core.server.auth_providers import ( ) -@pytest.fixture -def suppress_auth_errors(caplog): - """Suppress expected ERROR/WARNING logs for tests that deliberately trigger authentication errors""" - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.auth") - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.auth_providers") - - class MockResponse: def __init__(self, status_code, json_data): self.status_code = status_code @@ -245,20 +237,20 @@ def test_valid_http_authentication(http_client, valid_api_key): @patch("httpx.AsyncClient.post", new=mock_post_failure) -def test_invalid_http_authentication(http_client, invalid_api_key, suppress_auth_errors): +def test_invalid_http_authentication(http_client, invalid_api_key): response = http_client.get("/test", headers={"Authorization": f"Bearer {invalid_api_key}"}) assert response.status_code == 401 assert "Authentication failed" in response.json()["error"]["message"] @patch("httpx.AsyncClient.post", new=mock_post_exception) -def test_http_auth_service_error(http_client, valid_api_key, suppress_auth_errors): +def test_http_auth_service_error(http_client, valid_api_key): response = http_client.get("/test", headers={"Authorization": f"Bearer {valid_api_key}"}) assert response.status_code == 401 assert "Authentication service error" in response.json()["error"]["message"] -def test_http_auth_request_payload(http_client, valid_api_key, mock_auth_endpoint, suppress_auth_errors): +def test_http_auth_request_payload(http_client, valid_api_key, mock_auth_endpoint): with patch("httpx.AsyncClient.post") as mock_post: mock_response = MockResponse(200, {"message": "Authentication successful"}) mock_post.return_value = mock_response @@ -428,7 +420,7 @@ def test_valid_oauth2_authentication(oauth2_client, jwt_token_valid, mock_jwks_u @patch("httpx.AsyncClient.get", new=mock_jwks_response) -def test_invalid_oauth2_authentication(oauth2_client, invalid_token, suppress_auth_errors): +def test_invalid_oauth2_authentication(oauth2_client, invalid_token): response = oauth2_client.get("/test", headers={"Authorization": f"Bearer {invalid_token}"}) assert response.status_code == 401 assert "Invalid JWT token" in response.json()["error"]["message"] @@ -473,7 +465,7 @@ def oauth2_client_with_jwks_token(oauth2_app_with_jwks_token): @patch("httpx.AsyncClient.get", new=mock_auth_jwks_response) -def test_oauth2_with_jwks_token_expected(oauth2_client, jwt_token_valid, suppress_auth_errors): +def test_oauth2_with_jwks_token_expected(oauth2_client, jwt_token_valid): response = oauth2_client.get("/test", headers={"Authorization": f"Bearer {jwt_token_valid}"}) assert response.status_code == 401 @@ -734,21 +726,21 @@ def test_valid_introspection_authentication(introspection_client, valid_api_key) @patch("httpx.AsyncClient.post", new=mock_introspection_inactive) -def test_inactive_introspection_authentication(introspection_client, invalid_api_key, suppress_auth_errors): +def test_inactive_introspection_authentication(introspection_client, invalid_api_key): response = introspection_client.get("/test", headers={"Authorization": f"Bearer {invalid_api_key}"}) assert response.status_code == 401 assert "Token not active" in response.json()["error"]["message"] @patch("httpx.AsyncClient.post", new=mock_introspection_invalid) -def test_invalid_introspection_authentication(introspection_client, invalid_api_key, suppress_auth_errors): +def test_invalid_introspection_authentication(introspection_client, invalid_api_key): response = introspection_client.get("/test", headers={"Authorization": f"Bearer {invalid_api_key}"}) assert response.status_code == 401 assert "Not JSON" in response.json()["error"]["message"] @patch("httpx.AsyncClient.post", new=mock_introspection_failed) -def test_failed_introspection_authentication(introspection_client, invalid_api_key, suppress_auth_errors): +def test_failed_introspection_authentication(introspection_client, invalid_api_key): response = introspection_client.get("/test", headers={"Authorization": f"Bearer {invalid_api_key}"}) assert response.status_code == 401 assert "Token introspection failed: 500" in response.json()["error"]["message"] @@ -965,22 +957,20 @@ def test_valid_kubernetes_auth_authentication(kubernetes_auth_client, valid_toke @patch("httpx.AsyncClient.post", new=mock_kubernetes_selfsubjectreview_failure) -def test_invalid_kubernetes_auth_authentication(kubernetes_auth_client, invalid_token, suppress_auth_errors): +def test_invalid_kubernetes_auth_authentication(kubernetes_auth_client, invalid_token): response = kubernetes_auth_client.get("/test", headers={"Authorization": f"Bearer {invalid_token}"}) assert response.status_code == 401 assert "Invalid token" in response.json()["error"]["message"] @patch("httpx.AsyncClient.post", new=mock_kubernetes_selfsubjectreview_http_error) -def test_kubernetes_auth_http_error(kubernetes_auth_client, valid_token, suppress_auth_errors): +def test_kubernetes_auth_http_error(kubernetes_auth_client, valid_token): response = kubernetes_auth_client.get("/test", headers={"Authorization": f"Bearer {valid_token}"}) assert response.status_code == 401 assert "Token validation failed" in response.json()["error"]["message"] -def test_kubernetes_auth_request_payload( - kubernetes_auth_client, valid_token, mock_kubernetes_api_server, suppress_auth_errors -): +def test_kubernetes_auth_request_payload(kubernetes_auth_client, valid_token, mock_kubernetes_api_server): with patch("httpx.AsyncClient.post") as mock_post: mock_response = MockResponse( 200, diff --git a/tests/unit/server/test_auth_github.py b/tests/unit/server/test_auth_github.py index f458f9a94..d87643579 100644 --- a/tests/unit/server/test_auth_github.py +++ b/tests/unit/server/test_auth_github.py @@ -4,7 +4,6 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -import logging # allow-direct-logging from unittest.mock import AsyncMock, patch import httpx @@ -16,13 +15,6 @@ from llama_stack.core.datatypes import AuthenticationConfig, AuthProviderType, G from llama_stack.core.server.auth import AuthenticationMiddleware -@pytest.fixture -def suppress_auth_errors(caplog): - """Suppress expected ERROR logs for tests that deliberately trigger authentication errors""" - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.auth") - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.auth_providers") - - class MockResponse: def __init__(self, status_code, json_data): self.status_code = status_code @@ -127,7 +119,7 @@ def test_authenticated_endpoint_with_valid_github_token(mock_client_class, githu @patch("llama_stack.core.server.auth_providers.httpx.AsyncClient") -def test_authenticated_endpoint_with_invalid_github_token(mock_client_class, github_token_client, suppress_auth_errors): +def test_authenticated_endpoint_with_invalid_github_token(mock_client_class, github_token_client): """Test accessing protected endpoint with invalid GitHub token""" # Mock the GitHub API to return 401 Unauthorized mock_client = AsyncMock() diff --git a/tests/unit/server/test_quota.py b/tests/unit/server/test_quota.py index 0939414dd..16b1772ce 100644 --- a/tests/unit/server/test_quota.py +++ b/tests/unit/server/test_quota.py @@ -4,7 +4,6 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -import logging # allow-direct-logging from uuid import uuid4 import pytest @@ -18,12 +17,6 @@ from llama_stack.core.storage.datatypes import KVStoreReference, SqliteKVStoreCo from llama_stack.providers.utils.kvstore import register_kvstore_backends -@pytest.fixture -def suppress_quota_warnings(caplog): - """Suppress expected WARNING logs for SQLite backend and quota exceeded""" - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.quota") - - class InjectClientIDMiddleware(BaseHTTPMiddleware): """ Middleware that injects 'authenticated_client_id' to mimic AuthenticationMiddleware. @@ -77,13 +70,13 @@ def auth_app(tmp_path, request): return app -def test_authenticated_quota_allows_up_to_limit(auth_app, suppress_quota_warnings): +def test_authenticated_quota_allows_up_to_limit(auth_app): client = TestClient(auth_app) assert client.get("/test").status_code == 200 assert client.get("/test").status_code == 200 -def test_authenticated_quota_blocks_after_limit(auth_app, suppress_quota_warnings): +def test_authenticated_quota_blocks_after_limit(auth_app): client = TestClient(auth_app) client.get("/test") client.get("/test") @@ -92,7 +85,7 @@ def test_authenticated_quota_blocks_after_limit(auth_app, suppress_quota_warning assert resp.json()["error"]["message"] == "Quota exceeded" -def test_anonymous_quota_allows_up_to_limit(tmp_path, request, suppress_quota_warnings): +def test_anonymous_quota_allows_up_to_limit(tmp_path, request): inner_app = FastAPI() @inner_app.get("/test") @@ -114,7 +107,7 @@ def test_anonymous_quota_allows_up_to_limit(tmp_path, request, suppress_quota_wa assert client.get("/test").status_code == 200 -def test_anonymous_quota_blocks_after_limit(tmp_path, request, suppress_quota_warnings): +def test_anonymous_quota_blocks_after_limit(tmp_path, request): inner_app = FastAPI() @inner_app.get("/test") diff --git a/tests/unit/server/test_server.py b/tests/unit/server/test_server.py index d6d4f4f23..f21bbdd67 100644 --- a/tests/unit/server/test_server.py +++ b/tests/unit/server/test_server.py @@ -41,7 +41,7 @@ class TestTranslateException: self.identifier = identifier self.owner = owner - resource = MockResource("vector_store", "test-db") + resource = MockResource("vector_db", "test-db") exc = AccessDeniedError("create", resource, user) result = translate_exception(exc) @@ -49,7 +49,7 @@ class TestTranslateException: assert isinstance(result, HTTPException) assert result.status_code == 403 assert "test-user" in result.detail - assert "vector_store::test-db" in result.detail + assert "vector_db::test-db" in result.detail assert "create" in result.detail assert "roles=['user']" in result.detail assert "teams=['dev']" in result.detail diff --git a/tests/unit/server/test_sse.py b/tests/unit/server/test_sse.py index f36c8c181..54afe4ee4 100644 --- a/tests/unit/server/test_sse.py +++ b/tests/unit/server/test_sse.py @@ -5,21 +5,12 @@ # the root directory of this source tree. import asyncio -import logging # allow-direct-logging from unittest.mock import AsyncMock, MagicMock -import pytest - from llama_stack.apis.common.responses import PaginatedResponse from llama_stack.core.server.server import create_dynamic_typed_route, create_sse_event, sse_generator -@pytest.fixture -def suppress_sse_errors(caplog): - """Suppress expected ERROR logs for tests that deliberately trigger SSE errors""" - caplog.set_level(logging.CRITICAL, logger="llama_stack.core.server.server") - - async def test_sse_generator_basic(): # An AsyncIterator wrapped in an Awaitable, just like our web methods async def async_event_gen(): @@ -79,7 +70,7 @@ async def test_sse_generator_client_disconnected_before_response_starts(): assert len(seen_events) == 0 -async def test_sse_generator_error_before_response_starts(suppress_sse_errors): +async def test_sse_generator_error_before_response_starts(): # Raise an error before the response starts async def async_event_gen(): raise Exception("Test error") diff --git a/uv.lock b/uv.lock index aad77f6a1..7f6e0401b 100644 --- a/uv.lock +++ b/uv.lock @@ -921,16 +921,16 @@ wheels = [ [[package]] name = "fastapi" -version = "0.119.0" +version = "0.116.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/f9/5c5bcce82a7997cc0eb8c47b7800f862f6b56adc40486ed246e5010d443b/fastapi-0.119.0.tar.gz", hash = "sha256:451082403a2c1f0b99c6bd57c09110ed5463856804c8078d38e5a1f1035dbbb7", size = 336756, upload-time = "2025-10-11T17:13:40.53Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/70/584c4d7cad80f5e833715c0a29962d7c93b4d18eed522a02981a6d1b6ee5/fastapi-0.119.0-py3-none-any.whl", hash = "sha256:90a2e49ed19515320abb864df570dd766be0662c5d577688f1600170f7f73cf2", size = 107095, upload-time = "2025-10-11T17:13:39.048Z" }, + { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, ] [[package]] @@ -1756,7 +1756,7 @@ wheels = [ [[package]] name = "llama-stack" -version = "0.3.0" +version = "0.2.23" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -1889,8 +1889,8 @@ requires-dist = [ { name = "httpx" }, { name = "jinja2", specifier = ">=3.1.6" }, { name = "jsonschema" }, - { name = "llama-stack-client", specifier = ">=0.3.0" }, - { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" }, + { name = "llama-stack-client", specifier = ">=0.2.23" }, + { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.23" }, { name = "openai", specifier = ">=1.107" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" }, { name = "opentelemetry-sdk", specifier = ">=1.30.0" }, @@ -1996,7 +1996,7 @@ unit = [ [[package]] name = "llama-stack-client" -version = "0.3.0" +version = "0.2.23" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2015,9 +2015,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/d9/3c720f420fc80ce51de1a0ad90c53edc613617b68980137dcf716a86198a/llama_stack_client-0.3.0.tar.gz", hash = "sha256:1e974a74d0da285e18ba7df30b9a324e250782b130253bcef3e695830c5bb03d", size = 340443, upload-time = "2025-10-21T23:58:25.855Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/8f/306d5fcf2f97b3a6251219b03c194836a2ff4e0fcc8146c9970e50a72cd3/llama_stack_client-0.2.23.tar.gz", hash = "sha256:68f34e8ac8eea6a73ed9d4977d849992b2d8bd835804d770a11843431cd5bf74", size = 322288, upload-time = "2025-09-26T21:11:08.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/27/1c65035ce58100be22409c98e4d65b1cdaeff7811ea968f9f844641330d7/llama_stack_client-0.3.0-py3-none-any.whl", hash = "sha256:9f85d84d508ef7da44b96ca8555d7783da717cfc9135bab6a5530fe8c852690d", size = 425234, upload-time = "2025-10-21T23:58:24.246Z" }, + { url = "https://files.pythonhosted.org/packages/fa/75/3eb58e092a681804013dbec7b7f549d18f55acf6fd6e6b27de7e249766d8/llama_stack_client-0.2.23-py3-none-any.whl", hash = "sha256:eee42c74eee8f218f9455e5a06d5d4be43f8a8c82a7937ef51ce367f916df847", size = 379809, upload-time = "2025-09-26T21:11:06.856Z" }, ] [[package]] @@ -2661,7 +2661,7 @@ wheels = [ [[package]] name = "openai" -version = "2.5.0" +version = "1.107.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2673,9 +2673,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/39/aa3767c920c217ef56f27e89cbe3aaa43dd6eea3269c95f045c5761b9df1/openai-2.5.0.tar.gz", hash = "sha256:f8fa7611f96886a0f31ac6b97e58bc0ada494b255ee2cfd51c8eb502cfcb4814", size = 590333, upload-time = "2025-10-17T18:14:47.669Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/67/d6498de300f83ff57a79cb7aa96ef3bef8d6f070c3ded0f1b5b45442a6bc/openai-1.107.0.tar.gz", hash = "sha256:43e04927584e57d0e9e640ee0077c78baf8150098be96ebd5c512539b6c4e9a4", size = 566056, upload-time = "2025-09-08T19:25:47.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/f3/ebbd700d8dc1e6380a7a382969d96bc0cbea8717b52fb38ff0ca2a7653e8/openai-2.5.0-py3-none-any.whl", hash = "sha256:21380e5f52a71666dbadbf322dd518bdf2b9d11ed0bb3f96bea17310302d6280", size = 999851, upload-time = "2025-10-17T18:14:45.528Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/e8a4fd20390f2858b95227c288df8fe0c835f7c77625f7583609161684ba/openai-1.107.0-py3-none-any.whl", hash = "sha256:3dcfa3cbb116bd6924b27913b8da28c4a787379ff60049588547a1013e6d6438", size = 950968, upload-time = "2025-09-08T19:25:45.552Z" }, ] [[package]] @@ -5231,7 +5231,7 @@ wheels = [ [[package]] name = "weaviate-client" -version = "4.17.0" +version = "4.16.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, @@ -5242,9 +5242,9 @@ dependencies = [ { name = "pydantic" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/e4/6a0b1501645f17a851067fc7bd0d5b53dc9777f2818be9c43debe06eda19/weaviate_client-4.16.9.tar.gz", hash = "sha256:d461071f1ff5ebddd0fc697959628a1d8caa12af1da071401ef25583c3084eba", size = 766390, upload-time = "2025-08-20T15:00:03.924Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" }, + { url = "https://files.pythonhosted.org/packages/10/1a/fc66f5f33961351c759d56453d18176849da8f64186c941183bb574b808b/weaviate_client-4.16.9-py3-none-any.whl", hash = "sha256:8b4adabaec0d513edef94c8c1de61c89a86eba3b63a4dc1acdfc9580e80199f4", size = 579098, upload-time = "2025-08-20T15:00:01.882Z" }, ] [[package]]