feat(api): Add vector store file batches api (#3642)
Some checks failed
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 0s
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 1s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Integration Tests (Replay) / Integration Tests (, , , client=, ) (push) Failing after 2s
Python Package Build Test / build (3.13) (push) Failing after 0s
Python Package Build Test / build (3.12) (push) Failing after 2s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 5s
Vector IO Integration Tests / test-matrix (push) Failing after 4s
API Conformance Tests / check-schema-compatibility (push) Successful in 9s
Unit Tests / unit-tests (3.12) (push) Failing after 3s
Test External API and Providers / test-external (venv) (push) Failing after 5s
Unit Tests / unit-tests (3.13) (push) Failing after 3s
UI Tests / ui-tests (22) (push) Successful in 40s
Pre-commit / pre-commit (push) Successful in 1m28s

# What does this PR do?

Add Open AI Compatible vector store file batches api. This functionality
is needed to attach many files to a vector store as a batch.
https://github.com/llamastack/llama-stack/issues/3533

API Stubs have been merged
https://github.com/llamastack/llama-stack/pull/3615
Adds persistence for file batches as discussed in diff
https://github.com/llamastack/llama-stack/pull/3544
(Used claude code for generation and reviewed by me)


## Test Plan
1. Unit tests pass
2. Also verified the cc-vec integration with LLamaStackClient works with
the file batches api. https://github.com/raghotham/cc-vec
2. Integration tests pass
This commit is contained in:
slekkala1 2025-10-06 16:58:22 -07:00 committed by GitHub
parent 597d405e13
commit bba9957edd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 10322 additions and 53 deletions

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_retrieve_contents[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is the content of test file 2"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.014871168,
0.094365,
-0.098275684,
0.016189486,
0.072296426,
-0.039229725,
0.007638039,
0.035811495,
-0.03784589,
0.022591105,
0.15810202,
0.009195058,
-0.029846655,
-0.06448414,
-0.01898075,
-0.02023675,
-0.07593923,
-0.04666322,
0.010769107,
0.033283222,
0.06951838,
0.039086174,
-0.009640043,
-0.008601025,
0.039979056,
0.02799972,
-0.06578151,
0.08029443,
0.0101568075,
-0.07898879,
0.048795786,
0.057297125,
0.025737243,
0.03572965,
0.11485981,
0.030900626,
0.118485495,
-0.041167885,
-0.019413618,
-0.0009897926,
0.03717747,
-0.012367201,
-0.0026639055,
0.015703445,
-0.0046827365,
0.023138778,
0.012855939,
-0.029367425,
0.00042996072,
-0.003222942,
-0.055509202,
0.012830617,
-0.06941755,
-0.011024706,
0.07149942,
0.021040803,
0.0409756,
0.010087916,
-0.015326204,
0.06633094,
0.024846299,
0.030543685,
-0.036063526,
0.04786587,
0.08074621,
-0.051489003,
-0.03944393,
-0.025607359,
-0.030061793,
-0.119378455,
-0.14597124,
-0.0019379344,
0.008393092,
0.023913048,
0.028285578,
0.017838098,
-0.10575887,
0.008080291,
0.06388723,
-0.12506105,
-0.02536782,
-0.11007926,
0.051198784,
0.007446184,
-0.030837545,
0.09254253,
0.05638562,
-0.0155668175,
-0.031867314,
0.018337138,
0.02442871,
-0.042078987,
0.0038125275,
0.089955,
-0.008119613,
0.040103614,
0.011012824,
0.044628628,
0.0791957,
0.054247666,
-0.027651828,
-0.03190785,
0.041443683,
0.041629724,
-0.077835254,
-0.09937542,
0.029904107,
-0.05434366,
0.07058962,
-0.04535761,
0.03365359,
-0.061656676,
-0.018105442,
-0.07228336,
0.035377987,
-0.03161877,
-0.020589713,
0.058485094,
-0.049225487,
0.03934316,
0.08550028,
-0.029991213,
-0.05576064,
-0.029334918,
-0.053031918,
-0.061839186,
0.08176057,
-3.3282106e-33,
0.00018265574,
-0.09808404,
-0.00554673,
0.13180184,
0.026467713,
-0.03976283,
0.010410568,
0.022475285,
-0.07190717,
0.005138454,
-0.021325583,
-0.1046733,
0.0020021838,
0.023773609,
-0.057499945,
-0.011727483,
-0.020912478,
0.026353713,
0.01779019,
-0.0148312645,
0.064687304,
0.045060385,
-0.029312065,
-0.08633001,
-0.026792597,
0.014552106,
0.004505434,
-0.06774755,
0.034052122,
0.013713737,
-0.0075813113,
-0.059718475,
-0.016189422,
0.044314116,
0.026844766,
0.026430624,
0.024091395,
-0.0032406747,
-0.075288124,
0.032822173,
0.027104331,
-0.026295068,
0.04316082,
-0.010091815,
0.034184698,
-0.08266358,
-0.020962045,
-0.00719584,
0.068549044,
0.005033586,
0.0017975906,
0.06465498,
0.05990613,
-0.012483792,
0.024451919,
0.021659598,
-0.0046074707,
-0.004559902,
0.002713282,
0.062373567,
0.0035651235,
0.06017224,
-0.062707886,
0.039937016,
-0.0064443815,
-0.041358124,
-0.045459975,
-0.1090475,
0.08058783,
0.055110224,
-0.05126053,
-0.05976516,
0.037940193,
0.015456569,
-0.024956519,
-0.037877902,
-0.006799,
0.031685203,
-0.036858797,
-0.055584695,
-0.048513155,
-0.07101657,
-0.041681714,
-0.04429727,
-0.09584418,
-0.060873836,
0.008867621,
-0.106438614,
0.040050562,
-0.084729105,
0.018111277,
0.010153493,
-0.08883196,
-0.063969284,
0.08611972,
1.4074298e-33,
0.03433739,
0.037653737,
-0.05348675,
0.0015385789,
0.026684077,
0.026603375,
0.07006387,
-0.034265522,
-0.018221779,
0.10960259,
0.013464475,
-0.008325532,
0.019438146,
-0.039553005,
0.03469477,
-0.0123773115,
-0.013288484,
-0.048081715,
-0.019539693,
-0.0033996427,
-0.024453517,
0.061505664,
0.119236834,
0.026294904,
-0.01607055,
-0.011499089,
0.04267117,
0.0295908,
0.022084564,
0.007893738,
0.052055445,
0.05781507,
-0.13408813,
0.01778491,
0.021400984,
-0.12113228,
0.10535695,
-0.07358604,
-0.013651957,
0.04049295,
0.054150987,
0.0987462,
0.0110208625,
0.040327504,
0.034936633,
0.10400846,
0.12958324,
-0.024531014,
0.002284699,
-0.044239815,
0.049778443,
-0.055788964,
0.015235888,
0.0034493478,
-0.02607555,
0.060282644,
-0.028004775,
0.040875163,
-0.023749253,
0.002289086,
0.04982698,
0.046928305,
-0.064160004,
0.013701618,
0.015511878,
-0.054725982,
-0.0459802,
0.03258067,
0.027034523,
0.01643672,
-0.041782584,
-0.03698569,
-0.023043923,
-0.07073365,
0.028486207,
0.0017764921,
-0.03352676,
-0.009977863,
0.024488676,
-0.01789395,
0.029737154,
-0.026266927,
-0.03567072,
0.07469971,
0.028393274,
-0.029625034,
-0.01053128,
0.09147493,
-0.018718474,
0.0012933073,
-0.021214467,
0.07475739,
-0.007773536,
0.048597455,
0.005216022,
-1.6914717e-08,
-0.05724563,
-0.0938908,
-0.034359876,
-0.037500683,
-0.020235153,
0.06142227,
-0.042273093,
-0.008759724,
-0.009908796,
0.016232042,
-0.014239323,
0.024709346,
-0.030538557,
-0.05391127,
-0.051778477,
0.01277344,
0.0036140021,
-0.012569925,
-0.025041323,
-0.0203936,
0.025865255,
0.010908398,
0.027834684,
0.009661084,
-0.006598172,
0.07860872,
0.054516125,
0.042956624,
-0.06275145,
-0.025701547,
0.08085865,
0.030041302,
0.02248997,
-0.0840195,
0.00029938898,
0.10966559,
0.118907265,
0.063014604,
0.037847042,
0.032069027,
-0.05345487,
-0.022730324,
0.0071888734,
0.037573762,
-0.020178014,
-0.090167634,
-0.07191704,
-0.02604166,
-0.043885063,
-0.14087014,
-0.017230472,
-0.012063355,
-0.046736836,
0.039048597,
-0.060394738,
0.022166032,
0.025670663,
0.022949725,
-0.06707243,
-0.014654702,
0.057985142,
0.10511708,
0.05698323,
-0.017205814
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 8,
"total_tokens": 8
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_retrieve_contents[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is the content of test file 1"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.029406669,
0.08920982,
-0.11326726,
0.0065823817,
0.07725067,
-0.036890104,
0.030436223,
0.041454185,
-0.049156666,
0.018258564,
0.14662577,
0.01744915,
-0.012837422,
-0.06889876,
-0.039401636,
-0.038800705,
-0.08963421,
-0.059656583,
0.001375945,
0.045138627,
0.042796962,
0.053700265,
-0.035706885,
0.010138017,
0.060920056,
0.017344126,
-0.05633907,
0.063370295,
0.0021257724,
-0.083796844,
0.050487563,
0.047987595,
0.069071226,
0.049588464,
0.117036626,
0.05339311,
0.10129953,
-0.048230153,
-0.014987975,
0.0250915,
0.031392053,
-0.008863942,
0.0073650074,
-0.0009767569,
-0.016403567,
0.015523393,
-0.010998956,
-0.014870063,
0.0061682137,
-0.0017961137,
-0.022682818,
0.018210242,
-0.07757007,
-0.0015845516,
0.069547005,
0.000419109,
0.038414054,
0.005823485,
-0.028931383,
0.07009549,
-0.0018009909,
0.033516172,
-0.014593847,
0.03922457,
0.08240545,
-0.050596908,
-0.039732855,
-0.024425076,
-0.015055329,
-0.11705068,
-0.15979129,
-0.008256823,
-0.0100719705,
0.03266482,
0.0029998205,
0.0316428,
-0.094554916,
0.017661797,
0.058996264,
-0.119718134,
-0.027414676,
-0.09155906,
0.040038,
0.01091849,
-0.029446004,
0.10225186,
0.06583262,
-0.003439552,
-0.009694834,
0.016906522,
0.023685955,
-0.032616187,
-0.010238839,
0.07891618,
-0.007330681,
0.05238444,
0.00943625,
0.042121,
0.08491511,
0.049208272,
-0.01868227,
-0.013585418,
0.06727199,
0.084571496,
-0.103213035,
-0.08387524,
0.03641842,
-0.047227863,
0.057315867,
-0.04463932,
0.006783099,
-0.08934107,
-0.015040418,
-0.08107057,
0.013285569,
-0.060907867,
-0.042128306,
0.057306163,
-0.058711898,
0.04628304,
0.070194095,
-0.041729517,
-0.0338408,
-0.012369257,
-0.044708908,
-0.059450094,
0.08251312,
-3.443368e-33,
0.0121309515,
-0.11084454,
-0.020510655,
0.10916455,
0.033683147,
-0.02845083,
0.024345158,
0.034192592,
-0.08367815,
0.0064610844,
-0.00912456,
-0.0663567,
-0.0028754657,
0.008272698,
-0.09166764,
0.0089771375,
-0.03963948,
0.019947624,
-0.01321528,
-0.019034218,
0.051933073,
0.028107261,
-0.039153125,
-0.080395184,
-0.050503474,
0.02060341,
-0.012718284,
-0.046732575,
0.017907938,
-0.0028334607,
-0.011695137,
-0.05667005,
-0.043894444,
0.034919597,
0.022352098,
0.046777196,
0.045085873,
-0.008840106,
-0.06373453,
0.036720857,
0.012829601,
-0.035169926,
0.046209145,
-0.014361767,
0.03706697,
-0.056797564,
-0.06310496,
0.010818958,
0.047810175,
0.0029118094,
-0.003235893,
0.061511047,
0.072056666,
-0.03286638,
0.005070082,
0.021947902,
-0.017779002,
-0.022738373,
-0.021926457,
0.047074158,
0.010847615,
0.05539702,
-0.07119971,
0.033833236,
0.012342855,
-0.047586687,
-0.026776271,
-0.09885727,
0.10053448,
0.036877092,
-0.07049897,
-0.059692938,
0.016129492,
-0.0016443401,
-0.026804024,
-0.013527272,
-0.015385511,
0.055627547,
-0.060485132,
-0.055540122,
-0.04329072,
-0.07097361,
-0.04857043,
-0.03726256,
-0.09059366,
-0.036855534,
0.024561211,
-0.10113953,
0.056738112,
-0.10995085,
0.042282794,
0.014222368,
-0.07067843,
-0.05902307,
0.06426122,
1.6036318e-33,
0.037851896,
0.032911286,
-0.04029648,
-0.00049357174,
0.028011942,
0.048672136,
0.07279598,
-0.027471887,
-0.02847654,
0.114492,
0.001777095,
-0.009519909,
0.0025862327,
-0.056408145,
0.023462169,
-0.006209674,
-0.010567065,
-0.05877587,
-0.032393616,
0.011836781,
-0.038905054,
0.05516299,
0.09564333,
0.028543225,
-0.023832332,
-0.0015711841,
0.047049087,
0.03128219,
0.02811091,
0.007177092,
0.055283513,
0.06574452,
-0.1020208,
0.021213628,
0.020237882,
-0.10449357,
0.09608935,
-0.06253181,
0.015293753,
0.042053986,
0.06105009,
0.0909162,
0.018404186,
0.031023262,
0.03562763,
0.112073965,
0.10124763,
-0.007683015,
0.013140281,
-0.042280227,
0.051135287,
-0.02950743,
0.027794402,
-0.010734668,
-0.011067552,
0.058104575,
-0.009284788,
0.056184508,
-0.040822964,
0.010282754,
0.0374409,
0.054198533,
-0.061418086,
0.030569963,
0.0023648597,
-0.054184474,
-0.020570045,
0.012422129,
0.025696559,
-0.007607385,
-0.026194826,
-0.024159024,
0.0012979766,
-0.07461716,
0.051458035,
-0.004183808,
-0.040804464,
-0.023975441,
0.009455526,
-0.0018798193,
0.03668693,
-0.019319497,
-0.06195781,
0.06456675,
0.040328216,
-0.010790134,
0.013190221,
0.09067539,
-0.0051480443,
0.013312647,
-0.029548675,
0.07769003,
0.0027328292,
0.04533781,
-0.0017606319,
-1.661594e-08,
-0.040610366,
-0.09883059,
-0.05522113,
-0.02916469,
-0.019305382,
0.088138185,
-0.038325552,
-0.03327639,
-0.012629364,
0.006948921,
0.010438818,
0.026771523,
-0.040855426,
-0.03958403,
-0.051137064,
-0.016159322,
-0.020525131,
-0.023726366,
-0.013322245,
-0.008097836,
0.028000915,
0.02806969,
0.015645925,
-0.0043166955,
0.0054488196,
0.06720413,
0.068473674,
0.07172716,
-0.06339439,
-0.02540609,
0.08468492,
0.041936778,
0.021067144,
-0.07596481,
0.017143335,
0.1260291,
0.121315174,
0.08431059,
0.040587336,
0.036687315,
-0.04717,
-0.022659328,
-0.006820436,
0.005210712,
-0.033785924,
-0.08449115,
-0.0844501,
-0.03192747,
-0.036649443,
-0.13791409,
-0.036417518,
-0.00080547476,
-0.047578912,
0.038795993,
-0.06757743,
0.016941966,
0.036312684,
0.0125779435,
-0.058240637,
0.004471269,
0.03226526,
0.09821741,
0.053010236,
-0.016268
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 8,
"total_tokens": 8
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_create_and_retrieve[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is batch test file 1"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.009745733,
0.03363038,
-0.10852256,
0.026609829,
-0.0060599064,
-0.020473678,
0.0692486,
0.032276765,
-0.11532835,
-0.0005207133,
0.11814916,
0.0119809555,
0.03685765,
-0.10744223,
-0.046515625,
0.0015449532,
-0.06319664,
-0.04640812,
-0.037318822,
-0.025718328,
-0.00026058854,
-0.011890766,
-0.050925612,
0.014111713,
0.029467698,
0.006379121,
-0.012013293,
-0.0024293982,
-0.044318773,
-0.08100101,
0.02009568,
0.055713937,
0.078816675,
0.054973654,
0.20367871,
-0.004309458,
0.03877001,
0.03825522,
-0.002538199,
-0.0007973801,
0.044761047,
-0.054529082,
-0.008856888,
-0.04078078,
0.011367262,
-0.022404457,
-0.06209053,
0.02558725,
-0.0034454092,
-0.03743928,
-0.062026348,
-0.030812219,
-0.034592565,
-0.014926672,
0.018588377,
0.013435887,
0.08169151,
0.053658403,
-0.03557856,
0.033325985,
-0.01637577,
-0.0222152,
-0.039247517,
0.00094368146,
0.10228945,
-0.04305617,
-0.052200828,
-0.02007385,
0.054805383,
-0.08231377,
-0.14736547,
0.048954617,
-0.0212168,
0.02872658,
-0.0671409,
0.021436114,
-0.023599947,
0.03677982,
0.010577411,
-0.0966004,
-0.06367233,
-0.10277648,
0.0273993,
-0.06292906,
-0.046344172,
0.039919835,
0.02682899,
0.025460077,
-0.013083559,
-0.002667712,
-0.016529463,
0.012605053,
-0.0064383023,
0.015841383,
-0.01710707,
0.12320292,
-0.0077660284,
0.05845043,
0.07362552,
0.038426086,
0.004742023,
-0.0155985365,
0.01418979,
0.07865995,
-0.026352523,
-0.037174653,
0.06787817,
-0.060126718,
0.06111402,
-0.034931272,
-0.009446326,
-0.006150886,
0.02892313,
-0.09361577,
0.0335364,
-0.09088912,
0.009241144,
0.07092964,
-0.08954648,
0.04494549,
0.040462427,
-0.04167353,
0.0076030386,
-0.0066417656,
-0.07275736,
-0.043690544,
0.07685007,
-1.0508795e-33,
-0.019583685,
-0.13087204,
-0.03574564,
0.070223756,
0.08133056,
-0.009436003,
0.046778366,
0.03478148,
-0.09441185,
-0.040857755,
-0.02127058,
-0.106959894,
0.024023255,
0.022780996,
-0.09042505,
-0.035755932,
0.011359196,
0.050059184,
0.0050815986,
-0.07676938,
0.05453651,
0.04191775,
-0.009206564,
-0.022437057,
-0.04617258,
-0.038608693,
-0.00036489012,
-0.025092375,
0.039146807,
-0.0072839926,
0.03675482,
-0.011301064,
-0.08863303,
0.059421506,
0.015851071,
0.033407707,
0.056883834,
-0.01203776,
0.027333334,
-0.009560535,
-0.05030555,
-0.009787559,
0.023205005,
-0.007937716,
0.003991047,
-0.036422852,
-0.06979188,
0.046075627,
0.056377746,
0.0071927872,
-0.00020658698,
0.017678235,
0.023745935,
-0.0031295705,
0.016370842,
0.027585855,
-0.03440131,
-0.05594279,
0.036442764,
0.03577988,
-0.005324585,
0.015240975,
-0.09071462,
0.072764605,
0.02343818,
-0.093097225,
0.05842133,
-0.061913762,
0.045556016,
0.07639311,
-0.035199754,
-0.009256856,
0.038682748,
-0.040795818,
0.017686425,
-0.025513103,
0.06860537,
0.085520275,
-0.1023457,
-0.0036474275,
-0.014826131,
-0.05045756,
-0.09065474,
-0.076476775,
-0.008538021,
-0.04111943,
-0.035473913,
-0.061549038,
0.114327826,
-0.09601482,
0.022990143,
0.0022396755,
-0.023026146,
-0.028128328,
0.07969127,
-4.1765383e-34,
0.07866384,
0.11484068,
0.016687382,
0.009315677,
0.01664128,
0.024303248,
0.046507504,
-0.043804675,
-0.09136995,
0.106353745,
-0.06948852,
0.018747667,
0.0053492193,
-0.033229355,
0.042339083,
-0.0017468681,
0.05323157,
0.0058223205,
-0.05331342,
0.016506517,
-0.02325185,
0.097519755,
-0.0045558517,
0.08866843,
-0.028221445,
-0.012007969,
-0.009742725,
0.061458003,
0.01574456,
-0.00039456616,
0.02444834,
0.065891184,
-0.054779086,
0.04863689,
0.043890025,
-0.062467597,
0.07615393,
0.0067509366,
0.019150084,
0.06994535,
0.027900916,
0.08902746,
-0.027433047,
0.031390887,
0.02271287,
0.08119532,
0.06855678,
0.0023552915,
-0.06764184,
0.00704173,
-0.034521427,
-0.053785548,
-0.03075216,
0.007947864,
-0.025317406,
-0.040664013,
0.036144093,
0.017730465,
-0.040179063,
0.013665757,
0.004815376,
0.009095556,
0.0072483593,
0.012753351,
-0.047865536,
-0.046072423,
-0.014048283,
0.031082962,
-0.034945205,
-0.023550391,
0.033062257,
-0.022966444,
0.007744228,
0.015939556,
-0.0012224894,
0.0010534802,
-0.015109,
-0.021597888,
-0.029862719,
0.03983828,
0.062536344,
0.0106168175,
-0.027220478,
0.02410377,
-0.0023566757,
0.085310005,
0.04843323,
0.090823516,
0.005126319,
0.020297319,
-0.01739127,
0.047677357,
0.11080086,
0.030030197,
0.029773563,
-1.5454503e-08,
-0.03580758,
-0.12177604,
0.019753791,
0.05854353,
-0.01590761,
0.085781366,
-0.09558486,
-0.0016744126,
0.00773199,
-0.04790156,
0.01175936,
0.006536077,
-0.032027386,
0.0031026274,
-0.07580574,
-0.039700802,
-0.00170645,
-0.070955865,
0.043680355,
0.029966798,
0.0039943648,
0.031923376,
0.08119928,
0.038820695,
0.013302812,
0.041675337,
0.044349737,
0.060403902,
-0.1058191,
-0.05287386,
0.050275758,
0.039101604,
0.0599918,
-0.025067834,
-0.019554066,
0.06748813,
0.12508559,
0.059007537,
-0.019899847,
-0.030194808,
-0.046559453,
0.034567222,
-0.021644907,
-0.03327634,
-0.0075667608,
-0.100658834,
-0.0639619,
-0.055270903,
-0.0111757815,
-0.11671873,
-0.07208087,
0.023208033,
0.027215267,
0.063635156,
-0.05858023,
0.020345282,
0.018325811,
-0.0036095325,
0.006916675,
0.06541716,
0.009575581,
0.046839867,
0.0070611075,
-0.09470841
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 6,
"total_tokens": 6
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,39 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_retrieve_contents[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://localhost:11434/api/ps",
"headers": {},
"body": {},
"endpoint": "/api/ps",
"model": ""
},
"response": {
"body": {
"__type__": "ollama._types.ProcessResponse",
"__data__": {
"models": [
{
"model": "all-minilm:l6-v2",
"name": "all-minilm:l6-v2",
"digest": "1b226e2802dbb772b5fc32a58f103ca1804ef7501331012de126ab22f67475ef",
"expires_at": "2025-10-06T16:41:45.231544-07:00",
"size": 590204928,
"size_vram": 590204928,
"details": {
"parent_model": "",
"format": "gguf",
"family": "bert",
"families": [
"bert"
],
"parameter_size": "23M",
"quantization_level": "F16"
}
}
]
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_create_and_retrieve[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is batch test file 0"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
0.020637129,
0.048547756,
-0.12516363,
0.01991118,
-0.006535745,
-0.017178575,
0.027727997,
0.032170568,
-0.07302972,
0.008939002,
0.11493648,
0.0058907545,
0.0058539375,
-0.077171296,
-0.06883132,
0.0039748913,
-0.046849657,
-0.072902456,
-0.010890429,
-0.0019311906,
-0.011614798,
0.003689495,
-0.03695609,
-0.009029024,
0.017461002,
-0.004713484,
-0.010254731,
-0.026636763,
-0.026125714,
-0.046913657,
0.017024228,
0.0713477,
0.07881179,
0.03789051,
0.21716279,
-0.0077837943,
0.04686894,
0.020414647,
7.314368e-05,
0.0103133675,
0.059848394,
-0.04321678,
-0.011937493,
-0.021149047,
0.021315353,
-0.00072822213,
-0.046116166,
-0.0046820445,
0.016943695,
-0.03249135,
-0.055184096,
4.1543382e-05,
-0.034172166,
-0.023247559,
0.020267941,
0.012827845,
0.065036125,
0.07180022,
-0.013490698,
0.06376413,
-0.017730094,
-0.01806601,
-0.034191083,
0.008955718,
0.098446764,
-0.0061265854,
-0.06815829,
-0.039525956,
0.060588058,
-0.094874755,
-0.11774928,
0.019538416,
-0.014697532,
0.04773719,
-0.061298393,
0.030337377,
-0.0022184649,
0.019007793,
0.024370821,
-0.07063359,
-0.07582954,
-0.10816809,
0.031845964,
-0.057830192,
-0.04169559,
0.0752806,
0.019289386,
0.028845867,
0.0077010663,
0.013930818,
-0.067987345,
0.012679873,
-0.07907268,
0.0143718915,
-0.021433424,
0.11880779,
-0.016258432,
0.07099568,
0.035778854,
0.028776454,
0.013304291,
-0.05192297,
0.026758345,
0.10282426,
-0.003306269,
-0.03239622,
0.083044045,
-0.0412691,
0.043435257,
-0.043423533,
-0.013239603,
-0.0029038454,
0.038365215,
-0.10401672,
0.012744224,
-0.122984126,
-0.008942817,
0.06162198,
-0.120285526,
0.043005254,
0.04814879,
-0.036352232,
-0.003885529,
-0.018503373,
-0.088186465,
-0.0031517749,
0.09290919,
-1.1695094e-33,
-0.015589721,
-0.13189551,
0.008088751,
0.06899503,
0.07353927,
-0.030646399,
0.05110342,
0.03081624,
-0.07850498,
-0.021147482,
0.00017823944,
-0.10502706,
0.030078856,
0.02572523,
-0.068158925,
-0.025015576,
0.021830637,
0.049748335,
0.01520941,
-0.080153145,
0.06796621,
0.021865685,
-0.034017574,
-0.030821111,
-0.048006665,
0.0005615041,
-0.0137883695,
-0.04500587,
0.015368256,
-0.0043663937,
0.037706476,
0.0049090013,
-0.06216566,
0.03060772,
0.030548712,
0.029262561,
0.020701125,
0.0056516766,
0.010610447,
0.019530762,
-0.05664136,
-0.022654066,
-0.0010107337,
-0.020805702,
-0.012242364,
-0.05591731,
-0.049421698,
0.024721064,
0.05803342,
0.010474127,
-0.008790625,
0.025362873,
0.020258408,
0.004368581,
-0.01018003,
0.012385932,
-0.037656736,
-0.05642639,
0.020923307,
0.022813153,
-0.005735433,
0.015326356,
-0.108707875,
0.048076265,
0.023256551,
-0.10311626,
0.061980195,
-0.07340407,
0.051583096,
0.07360003,
-0.029443117,
-0.014564469,
0.042043358,
-0.020252181,
0.0147808045,
-0.0285806,
0.07891856,
0.056849223,
-0.106308356,
0.0197874,
0.0269322,
-0.04749746,
-0.066681586,
-0.10474516,
0.012599429,
-0.056163482,
-0.04901015,
-0.04571026,
0.09704481,
-0.105899766,
0.044303197,
-0.020125533,
-0.0368709,
-0.015417924,
0.042297333,
-8.289866e-35,
0.07415767,
0.10998298,
-0.016995763,
0.01066263,
-0.0012327223,
0.028000232,
0.0714317,
-0.02320065,
-0.07778205,
0.11864239,
-0.016559754,
0.037961867,
0.02930022,
-0.008237686,
0.059777655,
0.008086454,
0.02075205,
0.025284613,
-0.055471037,
0.0073576584,
-0.013398135,
0.11896543,
-0.014611002,
0.07691816,
-0.019711656,
-0.01920917,
-0.004744884,
0.08173054,
0.019665759,
-0.013193461,
0.06215852,
0.07420406,
-0.073212065,
0.036052067,
0.07328616,
-0.057373393,
0.08346425,
0.018834447,
0.03309735,
0.041197047,
0.033917964,
0.09151449,
-0.051731598,
0.049615093,
0.01124018,
0.06661862,
0.07268375,
-0.013245848,
-0.039673895,
-0.012173254,
0.0017787582,
-0.05746287,
-0.013884767,
0.020205025,
-0.029692367,
-0.031010685,
0.0149556715,
0.026381323,
-0.025382591,
0.0074336748,
-0.00949915,
0.015655186,
-0.0012397208,
-0.0032508406,
-0.046632554,
-0.0030316226,
-0.007273208,
0.064231135,
-0.034431897,
-0.06433184,
0.045421343,
-0.010773523,
-0.017881984,
0.010312532,
-0.024369273,
-0.008478495,
-0.02457377,
-0.0263535,
-0.027263613,
0.047060315,
0.08128726,
0.0045517692,
-0.010821656,
0.026526682,
0.018961033,
0.059243083,
0.001561823,
0.09838158,
0.00822081,
0.008796511,
-0.0060577285,
0.028892087,
0.08253284,
0.049560018,
0.023363132,
-1.498271e-08,
-0.036891207,
-0.10629833,
0.030452948,
0.049268734,
-0.0030453752,
0.07413954,
-0.07043819,
-0.034285706,
-0.009679971,
-0.046219327,
0.013510038,
-0.018686565,
-0.048570327,
0.0028313443,
-0.06190722,
-0.053201936,
0.0060967463,
-0.043467365,
0.042226154,
0.03455835,
-0.0375257,
0.023590367,
0.054896712,
0.029878648,
0.019286606,
0.026097741,
0.06938145,
0.06272366,
-0.09566521,
-0.07481147,
0.025204772,
0.039396077,
0.036375154,
-0.01104443,
-0.028223084,
0.111878626,
0.13400707,
0.06680113,
-0.011737675,
-0.03585406,
-0.07978788,
0.032793757,
-0.0021075818,
-0.028365146,
-0.042218164,
-0.08132239,
-0.0753423,
-0.043771427,
-0.015633285,
-0.14193884,
-0.055949364,
0.025526602,
-0.023186589,
0.061106257,
-0.056208834,
0.00838827,
0.014720396,
-0.014650135,
-0.012830787,
0.08434067,
0.024660436,
0.05366935,
0.005782819,
-0.10599063
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 6,
"total_tokens": 6
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_cancel[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is batch cancel test file 0 with substantial content"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.010706507,
0.11740309,
-0.070396945,
0.036590267,
0.03445541,
-0.037278067,
0.033794403,
-0.013823747,
-0.032249726,
0.039381154,
0.09738964,
0.043944314,
-0.015195914,
-0.08339148,
-0.12092182,
-0.0144716315,
-0.06525938,
0.008907217,
-0.016506711,
-0.011929026,
-0.0519942,
0.07381637,
0.028294124,
0.056386005,
0.028838597,
0.02860147,
-0.046813786,
-0.018329943,
-0.037620317,
-0.06344129,
0.037448265,
0.0807444,
0.08218735,
-0.018610513,
0.16465282,
0.006478139,
0.009052014,
0.024081843,
0.04604129,
-0.016105218,
0.050088186,
-0.014189308,
-0.055208918,
-0.024689473,
0.009216049,
0.0032953622,
-0.08004139,
-0.050898325,
0.030319132,
0.0038868543,
-0.03242241,
-0.008002084,
-0.05405017,
0.0034951256,
0.026613077,
-0.03749797,
0.074383445,
0.05947148,
-0.037571322,
0.07424358,
-0.031258598,
-0.010979168,
-0.115162514,
0.016076973,
0.12323825,
0.057677355,
-0.08872208,
-0.028623635,
0.05342226,
-0.060159575,
-0.07479101,
-0.01794232,
-0.0049816607,
0.08948416,
-0.042007502,
0.0925552,
-0.016678093,
0.013261441,
-0.0068968083,
0.00078877964,
-0.070652686,
-0.14053895,
0.054617904,
-0.064937904,
-0.036082774,
0.04364618,
0.039191015,
0.009325763,
0.055350192,
0.007441803,
-0.04520714,
0.0070686075,
0.029522296,
0.016590035,
-0.020568646,
0.083674796,
0.0076218233,
0.006881344,
0.013654858,
0.03697504,
0.04504176,
-0.012595865,
-0.006368664,
-0.006188894,
-0.02347456,
-0.014876863,
0.07330545,
-0.008524341,
0.03080002,
-0.079184264,
-0.002168809,
-0.04496155,
0.02353669,
-0.061784163,
0.019026963,
-0.034334134,
0.07823938,
0.086644776,
-0.100164026,
0.00979978,
0.043132447,
-0.00027732752,
-0.007950898,
-0.03439145,
-0.07176784,
-0.010847044,
0.10318583,
1.28398045e-33,
-0.057539165,
-0.10064088,
-0.036363184,
0.070467934,
0.12267441,
0.023121687,
0.036528632,
0.043095388,
-0.053614546,
0.034320176,
-0.015772322,
-0.07880764,
0.019716268,
0.017762613,
-0.094458655,
-0.08139035,
0.027233537,
0.07888667,
-0.024265131,
-0.054107342,
0.11021126,
-0.016241824,
-0.05417309,
-0.028439889,
-0.027373016,
-0.01668086,
-0.031238388,
-0.03203346,
0.017995317,
-0.011522754,
-0.0029258654,
0.022844825,
-0.019639384,
0.05111425,
-0.0015511515,
0.04084381,
0.0043716393,
-0.05789265,
0.024110112,
0.03920258,
-0.08151888,
-0.008190904,
-0.0645496,
-0.014420588,
0.00016276255,
-0.10466175,
-0.015631696,
-0.054435816,
0.03390489,
0.042083304,
0.041493565,
0.033552594,
0.027098974,
-0.035584476,
-0.025616122,
0.015369336,
0.025080213,
-0.047622968,
0.0076927147,
0.048611037,
0.07658855,
0.030115629,
-0.10192636,
0.009031788,
-0.026905872,
-0.07093241,
0.009540495,
-0.0967732,
0.006907292,
0.008907563,
-0.036709655,
-0.0074325944,
0.06927971,
-0.044891518,
-0.0022573345,
-0.05632572,
0.03744841,
0.026788702,
-0.00916575,
0.008179489,
0.08744597,
-0.046512436,
-0.061149366,
-0.13555244,
0.0010608839,
-0.06323009,
-0.039003603,
-0.07015582,
0.03916791,
-0.07763432,
-0.00032964678,
-0.026286542,
-0.053487364,
0.009920836,
0.104119115,
-1.9471978e-33,
0.04772588,
0.04490678,
-0.04262699,
0.03524018,
-0.003943472,
0.033365145,
0.06762878,
-0.021556355,
-0.043953415,
0.023543492,
0.005500359,
0.03756542,
0.025656395,
-0.014806406,
0.01845547,
0.015662882,
0.06915146,
0.010516805,
-0.08958506,
0.008974718,
-0.035460126,
0.05160542,
0.01763933,
0.067841165,
-0.02522728,
-0.022180483,
-0.085712284,
0.061407775,
0.07101853,
-0.0015686463,
0.055281166,
0.04126171,
-0.04599903,
-0.037977487,
0.09936549,
-0.064348385,
0.07501729,
0.06690245,
0.01264843,
0.011582279,
0.06661292,
0.083571374,
-0.05528334,
0.03757593,
0.043382253,
0.059041474,
0.056976013,
-0.02765602,
-0.00018057597,
-0.010140114,
-0.023275468,
-0.040977187,
-0.0051338123,
0.06462851,
-0.015096949,
-0.04108323,
0.013806998,
-0.013243718,
-0.04096836,
-0.021470992,
0.0037039437,
0.04606251,
0.027378108,
-0.009201031,
0.024913032,
0.027817363,
0.011912681,
0.072464235,
-0.04599433,
-0.033524342,
0.031872187,
-0.0017134893,
-0.030329237,
0.021338675,
0.050125677,
-0.006607719,
0.005844466,
-0.049508642,
2.296406e-05,
0.033044532,
0.07586271,
0.0094868485,
-0.0023229877,
0.063257135,
0.0073867897,
0.067748606,
-0.088573374,
0.06831021,
0.0047544846,
0.08063805,
-0.02170177,
0.020645779,
0.082571074,
0.039116666,
0.03906674,
-1.756136e-08,
-0.01928442,
-0.123927765,
0.0188664,
0.03889619,
0.003943178,
0.017261649,
-0.072421774,
0.010595731,
-0.032426827,
-0.07068102,
0.027171727,
-0.032465994,
-0.03428293,
0.00012704723,
-0.07441139,
-0.061249517,
0.003310212,
-0.030616615,
0.037538156,
0.013060206,
-0.02899822,
0.002607385,
0.023053044,
-0.008261543,
0.027366797,
0.041916996,
0.07509514,
0.093088634,
-0.05660954,
-0.10259794,
0.041243467,
-0.025973666,
0.013900956,
0.0023358895,
-0.075266555,
0.07490993,
0.14500652,
0.04697599,
-0.03860971,
0.009254478,
-0.06991552,
0.011762797,
0.02150895,
0.010407091,
-0.016874894,
-0.057741348,
-0.075219,
-0.07250321,
-0.03090426,
-0.110799745,
-0.024827298,
0.0065941666,
-0.027638538,
0.08827356,
-0.044589255,
-0.04193462,
0.021976525,
0.015851181,
-0.07105447,
0.106275305,
0.058465168,
0.0026831257,
-0.006616897,
-0.086507544
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 10,
"total_tokens": 10
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,39 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_cancel[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://localhost:11434/api/ps",
"headers": {},
"body": {},
"endpoint": "/api/ps",
"model": ""
},
"response": {
"body": {
"__type__": "ollama._types.ProcessResponse",
"__data__": {
"models": [
{
"model": "all-minilm:l6-v2",
"name": "all-minilm:l6-v2",
"digest": "1b226e2802dbb772b5fc32a58f103ca1804ef7501331012de126ab22f67475ef",
"expires_at": "2025-10-06T16:40:13.262640-07:00",
"size": 590204928,
"size_vram": 590204928,
"details": {
"parent_model": "",
"format": "gguf",
"family": "bert",
"families": [
"bert"
],
"parameter_size": "23M",
"quantization_level": "F16"
}
}
]
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,423 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_cancel[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": [
"This is batch cancel test file 1 with substantial content"
],
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.024848156,
0.10927085,
-0.0545053,
0.038470518,
0.046556868,
-0.034411646,
0.04878781,
-0.014318634,
-0.046015147,
0.044597667,
0.09629065,
0.058968317,
-0.007982022,
-0.10140896,
-0.10389055,
-0.019553911,
-0.07593768,
0.025729634,
-0.029175822,
-0.02637171,
-0.050457876,
0.066799924,
0.022711048,
0.06541894,
0.041600663,
0.030976223,
-0.056684654,
-0.0035002322,
-0.050632603,
-0.08931927,
0.040150054,
0.06798157,
0.08541512,
-0.0107848635,
0.15392521,
0.009335848,
0.010962297,
0.029146858,
0.047823314,
-0.026440151,
0.04159767,
-0.010160618,
-0.05779408,
-0.039702307,
-0.004494967,
-0.004617397,
-0.08862508,
-0.034483556,
0.024042498,
0.0051155766,
-0.0317056,
-0.01699217,
-0.053024635,
0.015636722,
0.03557156,
-0.039649993,
0.081902996,
0.06176357,
-0.05502012,
0.06357122,
-0.030193875,
-0.012515638,
-0.12543206,
0.012063709,
0.12448795,
0.040869392,
-0.07753088,
-0.021475459,
0.04500842,
-0.056871727,
-0.09496841,
-0.01180043,
-0.0017254521,
0.08008634,
-0.047713377,
0.08782804,
-0.02004271,
0.033268984,
-0.016207146,
-0.010731495,
-0.063805684,
-0.14302677,
0.0575187,
-0.06904251,
-0.037962824,
0.0182337,
0.042886198,
0.01039097,
0.044122625,
-0.0020459748,
-0.014757414,
0.0011372506,
0.07999029,
0.018020395,
-0.018433796,
0.07817236,
0.012330995,
0.007078602,
0.03731257,
0.03993665,
0.039117657,
0.0077354074,
-0.009170466,
-0.018691367,
-0.028763011,
-0.019665359,
0.062140632,
-0.020356707,
0.038877316,
-0.08305566,
0.00014209712,
-0.05700167,
0.021387467,
-0.054998472,
0.03538585,
-0.023105556,
0.089621656,
0.09418147,
-0.08390289,
0.009763535,
0.043676704,
-0.0022283366,
0.00070641236,
-0.03374215,
-0.07274797,
-0.034256138,
0.09228734,
1.2329422e-33,
-0.06229734,
-0.10348473,
-0.05939012,
0.07817319,
0.12856846,
0.03253048,
0.03706221,
0.03843275,
-0.06781762,
0.027851813,
-0.03286515,
-0.07305933,
0.011496317,
0.016992282,
-0.10859345,
-0.089275,
0.02053902,
0.07540007,
-0.030434899,
-0.057486024,
0.1028371,
-0.011332772,
-0.040277272,
-0.022627348,
-0.029583039,
-0.042487655,
-0.01710431,
-0.028937005,
0.034644134,
-0.015131404,
-0.005402634,
0.0111823045,
-0.024323324,
0.061144948,
-0.0068504023,
0.04550556,
0.017341396,
-0.063010655,
0.033939265,
0.029030005,
-0.07075115,
0.0076140417,
-0.056033216,
-0.01839173,
0.006444027,
-0.10148905,
-0.024238782,
-0.045753844,
0.029873326,
0.03732028,
0.05342056,
0.024428835,
0.03200607,
-0.045322895,
-0.009412481,
0.01895284,
0.026068604,
-0.043451786,
0.017836504,
0.060751975,
0.0770648,
0.037520513,
-0.094844334,
0.018022675,
-0.028010713,
-0.05970307,
0.0042470302,
-0.08537647,
0.0025366507,
0.0059753954,
-0.040670317,
-0.008420785,
0.070101276,
-0.05581281,
0.009997155,
-0.053269707,
0.030278698,
0.034753144,
-0.0069992156,
-0.0018294669,
0.052869115,
-0.047554925,
-0.07009094,
-0.12028551,
-0.016411684,
-0.0558196,
-0.026485136,
-0.07406597,
0.052336086,
-0.07966716,
-0.009600498,
-0.016012779,
-0.04670444,
0.0040856744,
0.13087922,
-1.9130171e-33,
0.04951988,
0.04144521,
-0.030660233,
0.02966906,
-0.0019053655,
0.038034633,
0.053598672,
-0.03873592,
-0.050682254,
0.0163216,
-0.018117629,
0.02705123,
0.014957701,
-0.029251544,
0.010732444,
0.01150037,
0.08527361,
0.000666767,
-0.09031944,
0.007236525,
-0.0394124,
0.032647807,
0.029387591,
0.0696317,
-0.028400488,
-0.019728381,
-0.08580391,
0.050916594,
0.07555233,
0.0013333871,
0.036405865,
0.03485496,
-0.035891958,
-0.03518406,
0.08422707,
-0.07100648,
0.066512264,
0.0566844,
0.005254722,
0.026210023,
0.06271422,
0.07715752,
-0.042685844,
0.029498853,
0.048694577,
0.06829996,
0.05471948,
-0.014717811,
-0.0084376065,
-0.007800526,
-0.033968475,
-0.035792083,
-0.01680357,
0.056615632,
-0.008940466,
-0.044396702,
0.033141203,
-0.020710811,
-0.052891865,
-0.012946567,
0.013425288,
0.045469046,
0.02655372,
-7.159544e-06,
0.033383444,
0.012771919,
0.0050781234,
0.05739414,
-0.05292731,
-0.009027621,
0.019719183,
-0.0046205786,
-0.012921344,
0.021115582,
0.063510135,
0.006540324,
0.008657973,
-0.044172782,
-0.0010352373,
0.025917202,
0.07357742,
0.012915724,
-0.010159995,
0.05862044,
0.0032137444,
0.08368076,
-0.06552963,
0.06294139,
0.004963379,
0.08497223,
-0.030302247,
0.028541481,
0.103464715,
0.03432187,
0.039947473,
-1.757192e-08,
-0.020163277,
-0.12507844,
0.015846072,
0.038265407,
-0.0031526515,
0.01804952,
-0.0817553,
0.030486222,
-0.02073271,
-0.069118954,
0.0252006,
-0.016496325,
-0.018695008,
-0.0063493066,
-0.08448383,
-0.05474651,
0.008191211,
-0.04699509,
0.03820692,
0.019186925,
-0.006977571,
-0.0002934883,
0.030278133,
-0.009153849,
0.030300315,
0.04737054,
0.06026962,
0.09765302,
-0.05529498,
-0.09553832,
0.06008278,
-0.025960611,
0.034287665,
-0.012333093,
-0.07106284,
0.05141244,
0.14179605,
0.04709826,
-0.049292527,
0.014455253,
-0.047851674,
0.011403938,
0.014072481,
0.010494679,
-0.0009859774,
-0.06089218,
-0.07293921,
-0.07961594,
-0.03404924,
-0.10086713,
-0.031331882,
0.0042822976,
-0.0045380252,
0.09583955,
-0.044172354,
-0.034359995,
0.023726532,
0.02167657,
-0.06509328,
0.09268318,
0.055370033,
0.003980954,
-0.0053826002,
-0.07774321
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 10,
"total_tokens": 10
}
}
},
"is_streaming": false
}
}

View file

@ -0,0 +1,20 @@
{
"test_id": "tests/integration/vector_io/test_openai_vector_stores.py::test_openai_vector_store_file_batch_create_and_retrieve[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-ollama/all-minilm:l6-v2-None-384]",
"request": {
"method": "POST",
"url": "http://localhost:11434/api/ps",
"headers": {},
"body": {},
"endpoint": "/api/ps",
"model": ""
},
"response": {
"body": {
"__type__": "ollama._types.ProcessResponse",
"__data__": {
"models": []
}
},
"is_streaming": false
}
}

View file

@ -902,3 +902,290 @@ def test_openai_vector_store_search_modes(llama_stack_client, client_with_models
search_mode=search_mode,
)
assert search_response is not None
def test_openai_vector_store_file_batch_create_and_retrieve(compat_client_with_empty_stores, client_with_models):
"""Test creating and retrieving a vector store file batch."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
# Create a vector store
vector_store = compat_client.vector_stores.create(name="batch_test_store")
# Create multiple files
file_ids = []
for i in range(2):
with BytesIO(f"This is batch test file {i}".encode()) as file_buffer:
file_buffer.name = f"batch_test_{i}.txt"
file = compat_client.files.create(file=file_buffer, purpose="assistants")
file_ids.append(file.id)
# Create a file batch
batch = compat_client.vector_stores.file_batches.create(
vector_store_id=vector_store.id,
file_ids=file_ids,
)
assert batch is not None
assert batch.object == "vector_store.file_batch"
assert batch.vector_store_id == vector_store.id
assert batch.status in ["in_progress", "completed"]
assert batch.file_counts.total == len(file_ids)
assert hasattr(batch, "id")
assert hasattr(batch, "created_at")
# Wait for batch processing to complete
max_retries = 60 # 60 seconds max wait (increased for file processing delays)
retries = 0
retrieved_batch = None
while retries < max_retries:
retrieved_batch = compat_client.vector_stores.file_batches.retrieve(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
if retrieved_batch.status in ["completed", "failed"]:
break
time.sleep(1)
retries += 1
assert retrieved_batch is not None
assert retrieved_batch.id == batch.id
assert retrieved_batch.vector_store_id == vector_store.id
assert retrieved_batch.object == "vector_store.file_batch"
assert retrieved_batch.file_counts.total == len(file_ids)
assert retrieved_batch.status == "completed" # Should be completed after processing
def test_openai_vector_store_file_batch_list_files(compat_client_with_empty_stores, client_with_models):
"""Test listing files in a vector store file batch."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
# Create a vector store
vector_store = compat_client.vector_stores.create(name="batch_list_test_store")
# Create multiple files
file_ids = []
for i in range(2):
with BytesIO(f"This is batch list test file {i}".encode()) as file_buffer:
file_buffer.name = f"batch_list_test_{i}.txt"
file = compat_client.files.create(file=file_buffer, purpose="assistants")
file_ids.append(file.id)
# Create a file batch
batch = compat_client.vector_stores.file_batches.create(
vector_store_id=vector_store.id,
file_ids=file_ids,
)
# Wait for batch processing to complete
max_retries = 60 # 60 seconds max wait (increased for file processing delays)
retries = 0
while retries < max_retries:
retrieved_batch = compat_client.vector_stores.file_batches.retrieve(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
if retrieved_batch.status in ["completed", "failed"]:
break
time.sleep(1)
retries += 1
# List all files in the batch
files_response = compat_client.vector_stores.file_batches.list_files(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
assert files_response is not None
assert files_response.object == "list"
assert hasattr(files_response, "data")
assert len(files_response.data) == len(file_ids)
# Verify all files are in the response
response_file_ids = {file.id for file in files_response.data}
assert response_file_ids == set(file_ids)
# Test pagination with limit
limited_response = compat_client.vector_stores.file_batches.list_files(
vector_store_id=vector_store.id,
batch_id=batch.id,
limit=3,
)
assert len(limited_response.data) == 2
assert limited_response.has_more is False
# Test pagination with after cursor
first_page = compat_client.vector_stores.file_batches.list_files(
vector_store_id=vector_store.id,
batch_id=batch.id,
limit=2,
)
second_page = compat_client.vector_stores.file_batches.list_files(
vector_store_id=vector_store.id,
batch_id=batch.id,
limit=2,
after=first_page.data[-1].id,
)
assert len(first_page.data) == 2
assert len(second_page.data) <= 3 # Should be <= remaining files
# Ensure no overlap between pages
first_page_ids = {file.id for file in first_page.data}
second_page_ids = {file.id for file in second_page.data}
assert first_page_ids.isdisjoint(second_page_ids)
def test_openai_vector_store_file_batch_cancel(compat_client_with_empty_stores, client_with_models):
"""Test cancelling a vector store file batch."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
# Create a vector store
vector_store = compat_client.vector_stores.create(name="batch_cancel_test_store")
# Create a batch to test cancellation
file_ids = []
for i in range(2): # Batch size that allows time for cancellation
with BytesIO(f"This is batch cancel test file {i} with substantial content".encode()) as file_buffer:
file_buffer.name = f"batch_cancel_test_{i}.txt"
file = compat_client.files.create(file=file_buffer, purpose="assistants")
file_ids.append(file.id)
# Create a file batch
batch = compat_client.vector_stores.file_batches.create(
vector_store_id=vector_store.id,
file_ids=file_ids,
)
try:
# Cancel the batch immediately after creation
cancelled_batch = compat_client.vector_stores.file_batches.cancel(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
assert cancelled_batch is not None
assert cancelled_batch.id == batch.id
assert cancelled_batch.vector_store_id == vector_store.id
assert cancelled_batch.status == "cancelled"
assert cancelled_batch.object == "vector_store.file_batch"
except Exception:
# If cancellation fails (e.g., batch completed too quickly),
# verify the batch reached completion instead
final_batch = compat_client.vector_stores.file_batches.retrieve(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
assert final_batch.status in ["completed", "cancelled"]
def test_openai_vector_store_file_batch_retrieve_contents(compat_client_with_empty_stores, client_with_models):
"""Test retrieving file contents after file batch processing."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
# Create a vector store
vector_store = compat_client.vector_stores.create(name="batch_contents_test_store")
# Create multiple files with known content
file_data = [
("test_file_1.txt", b"This is the content of test file 1"),
("test_file_2.txt", b"This is the content of test file 2"),
]
file_ids = []
for filename, content in file_data:
with BytesIO(content) as file_buffer:
file_buffer.name = filename
file = compat_client.files.create(file=file_buffer, purpose="assistants")
file_ids.append(file.id)
# Create a file batch
batch = compat_client.vector_stores.file_batches.create(
vector_store_id=vector_store.id,
file_ids=file_ids,
)
# Wait for batch processing to complete
max_retries = 60 # 60 seconds max wait (increased for file processing delays)
retries = 0
while retries < max_retries:
retrieved_batch = compat_client.vector_stores.file_batches.retrieve(
vector_store_id=vector_store.id,
batch_id=batch.id,
)
if retrieved_batch.status in ["completed", "failed"]:
break
time.sleep(1)
retries += 1
assert retrieved_batch.status == "completed"
# Retrieve file contents for each file in the batch
for i, file_id in enumerate(file_ids):
file_contents = compat_client.vector_stores.files.content(
vector_store_id=vector_store.id,
file_id=file_id,
)
assert file_contents is not None
assert file_contents.filename == file_data[i][0]
assert len(file_contents.content) > 0
# Verify the content matches what we uploaded
content_text = (
file_contents.content[0].text
if hasattr(file_contents.content[0], "text")
else file_contents.content[0]["text"]
)
assert file_data[i][1].decode("utf-8") in content_text
def test_openai_vector_store_file_batch_error_handling(compat_client_with_empty_stores, client_with_models):
"""Test error handling for file batch operations."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
# Create a vector store
vector_store = compat_client.vector_stores.create(name="batch_error_test_store")
# Test with invalid file IDs (should handle gracefully)
file_ids = ["invalid_file_id_1", "invalid_file_id_2"]
batch = compat_client.vector_stores.file_batches.create(
vector_store_id=vector_store.id,
file_ids=file_ids,
)
assert batch is not None
assert batch.file_counts.total == len(file_ids)
# Invalid files should be marked as failed
assert batch.file_counts.failed >= 0 # Implementation may vary
# Determine expected errors based on client type
if isinstance(compat_client, LlamaStackAsLibraryClient):
errors = ValueError
else:
errors = (BadRequestError, OpenAIBadRequestError)
# Test retrieving non-existent batch
with pytest.raises(errors): # Should raise an error for non-existent batch
compat_client.vector_stores.file_batches.retrieve(
vector_store_id=vector_store.id,
batch_id="non_existent_batch_id",
)
# Test operations on non-existent vector store
with pytest.raises(errors): # Should raise an error for non-existent vector store
compat_client.vector_stores.file_batches.create(
vector_store_id="non_existent_vector_store",
file_ids=["any_file_id"],
)