forked from phoenix/litellm-mirror
4285 lines
87 KiB
Python
4285 lines
87 KiB
Python
import asyncio
|
|
import os
|
|
import sys
|
|
import traceback
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
load_dotenv()
|
|
import io
|
|
import os
|
|
|
|
sys.path.insert(
|
|
0, os.path.abspath("../..")
|
|
) # Adds the parent directory to the system path
|
|
from unittest.mock import MagicMock, patch
|
|
|
|
import pytest
|
|
|
|
import litellm
|
|
from litellm import (
|
|
RateLimitError,
|
|
TextCompletionResponse,
|
|
atext_completion,
|
|
completion,
|
|
completion_cost,
|
|
embedding,
|
|
text_completion,
|
|
)
|
|
|
|
litellm.num_retries = 3
|
|
|
|
|
|
token_prompt = [
|
|
[
|
|
32,
|
|
2043,
|
|
32,
|
|
329,
|
|
4585,
|
|
262,
|
|
1644,
|
|
14,
|
|
34,
|
|
3705,
|
|
319,
|
|
616,
|
|
47551,
|
|
30,
|
|
930,
|
|
19219,
|
|
284,
|
|
1949,
|
|
284,
|
|
787,
|
|
428,
|
|
355,
|
|
1790,
|
|
355,
|
|
1744,
|
|
981,
|
|
1390,
|
|
3307,
|
|
2622,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
423,
|
|
587,
|
|
351,
|
|
616,
|
|
41668,
|
|
32682,
|
|
329,
|
|
718,
|
|
812,
|
|
13,
|
|
376,
|
|
666,
|
|
32682,
|
|
468,
|
|
281,
|
|
4697,
|
|
6621,
|
|
11,
|
|
356,
|
|
1183,
|
|
869,
|
|
607,
|
|
25737,
|
|
11,
|
|
508,
|
|
318,
|
|
2579,
|
|
290,
|
|
468,
|
|
257,
|
|
642,
|
|
614,
|
|
1468,
|
|
1200,
|
|
13,
|
|
314,
|
|
373,
|
|
612,
|
|
262,
|
|
1110,
|
|
25737,
|
|
373,
|
|
287,
|
|
4827,
|
|
290,
|
|
14801,
|
|
373,
|
|
4642,
|
|
11,
|
|
673,
|
|
318,
|
|
616,
|
|
41803,
|
|
13,
|
|
2399,
|
|
2104,
|
|
1641,
|
|
468,
|
|
6412,
|
|
284,
|
|
502,
|
|
355,
|
|
465,
|
|
38074,
|
|
494,
|
|
1201,
|
|
1110,
|
|
352,
|
|
13,
|
|
314,
|
|
716,
|
|
407,
|
|
2910,
|
|
475,
|
|
356,
|
|
389,
|
|
1641,
|
|
11,
|
|
673,
|
|
3848,
|
|
502,
|
|
38074,
|
|
494,
|
|
290,
|
|
356,
|
|
423,
|
|
3993,
|
|
13801,
|
|
11,
|
|
26626,
|
|
11864,
|
|
11,
|
|
3503,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
17,
|
|
812,
|
|
2084,
|
|
25737,
|
|
373,
|
|
287,
|
|
14321,
|
|
422,
|
|
2563,
|
|
13230,
|
|
13,
|
|
21051,
|
|
11,
|
|
2356,
|
|
25542,
|
|
11,
|
|
290,
|
|
47482,
|
|
897,
|
|
547,
|
|
607,
|
|
1517,
|
|
13,
|
|
1375,
|
|
550,
|
|
257,
|
|
5110,
|
|
14608,
|
|
290,
|
|
262,
|
|
1641,
|
|
7723,
|
|
1637,
|
|
284,
|
|
3758,
|
|
607,
|
|
284,
|
|
14321,
|
|
290,
|
|
477,
|
|
8389,
|
|
257,
|
|
7269,
|
|
284,
|
|
1011,
|
|
1337,
|
|
286,
|
|
14801,
|
|
13,
|
|
383,
|
|
5156,
|
|
338,
|
|
9955,
|
|
11,
|
|
25737,
|
|
338,
|
|
13850,
|
|
11,
|
|
468,
|
|
257,
|
|
47973,
|
|
14,
|
|
9979,
|
|
2762,
|
|
1693,
|
|
290,
|
|
373,
|
|
503,
|
|
286,
|
|
3240,
|
|
329,
|
|
362,
|
|
1933,
|
|
523,
|
|
339,
|
|
2492,
|
|
470,
|
|
612,
|
|
329,
|
|
477,
|
|
286,
|
|
428,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
3347,
|
|
10667,
|
|
5223,
|
|
503,
|
|
706,
|
|
513,
|
|
1528,
|
|
11,
|
|
23630,
|
|
673,
|
|
373,
|
|
366,
|
|
38125,
|
|
290,
|
|
655,
|
|
2622,
|
|
257,
|
|
3338,
|
|
8399,
|
|
1911,
|
|
314,
|
|
2298,
|
|
607,
|
|
510,
|
|
11,
|
|
1011,
|
|
607,
|
|
284,
|
|
607,
|
|
2156,
|
|
11,
|
|
290,
|
|
673,
|
|
3393,
|
|
2925,
|
|
284,
|
|
7523,
|
|
20349,
|
|
290,
|
|
4144,
|
|
257,
|
|
6099,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
892,
|
|
20349,
|
|
318,
|
|
257,
|
|
2563,
|
|
290,
|
|
716,
|
|
845,
|
|
386,
|
|
12,
|
|
66,
|
|
1236,
|
|
571,
|
|
292,
|
|
3584,
|
|
314,
|
|
836,
|
|
470,
|
|
7523,
|
|
11,
|
|
475,
|
|
326,
|
|
373,
|
|
407,
|
|
5035,
|
|
6402,
|
|
314,
|
|
655,
|
|
6497,
|
|
607,
|
|
510,
|
|
422,
|
|
14321,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
32,
|
|
1285,
|
|
1568,
|
|
673,
|
|
373,
|
|
6294,
|
|
329,
|
|
3013,
|
|
24707,
|
|
287,
|
|
262,
|
|
12436,
|
|
1539,
|
|
819,
|
|
5722,
|
|
329,
|
|
852,
|
|
604,
|
|
1933,
|
|
2739,
|
|
11,
|
|
39398,
|
|
607,
|
|
1097,
|
|
5059,
|
|
981,
|
|
1029,
|
|
290,
|
|
318,
|
|
852,
|
|
16334,
|
|
329,
|
|
720,
|
|
1120,
|
|
74,
|
|
422,
|
|
15228,
|
|
278,
|
|
656,
|
|
257,
|
|
2156,
|
|
11,
|
|
290,
|
|
373,
|
|
12165,
|
|
503,
|
|
286,
|
|
376,
|
|
666,
|
|
32682,
|
|
338,
|
|
584,
|
|
6621,
|
|
338,
|
|
2156,
|
|
329,
|
|
32012,
|
|
262,
|
|
14595,
|
|
373,
|
|
30601,
|
|
510,
|
|
290,
|
|
2491,
|
|
357,
|
|
7091,
|
|
373,
|
|
1029,
|
|
8,
|
|
290,
|
|
262,
|
|
2104,
|
|
34624,
|
|
373,
|
|
46432,
|
|
1268,
|
|
1961,
|
|
422,
|
|
1660,
|
|
2465,
|
|
780,
|
|
8168,
|
|
2073,
|
|
1625,
|
|
1363,
|
|
329,
|
|
807,
|
|
2250,
|
|
13,
|
|
720,
|
|
1238,
|
|
11,
|
|
830,
|
|
286,
|
|
2465,
|
|
290,
|
|
5875,
|
|
5770,
|
|
511,
|
|
2156,
|
|
5096,
|
|
5017,
|
|
340,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
2504,
|
|
373,
|
|
477,
|
|
938,
|
|
614,
|
|
13,
|
|
1119,
|
|
1053,
|
|
587,
|
|
287,
|
|
511,
|
|
649,
|
|
2156,
|
|
319,
|
|
511,
|
|
898,
|
|
329,
|
|
546,
|
|
718,
|
|
1933,
|
|
13,
|
|
554,
|
|
3389,
|
|
673,
|
|
1444,
|
|
34020,
|
|
290,
|
|
531,
|
|
511,
|
|
8744,
|
|
373,
|
|
4423,
|
|
572,
|
|
780,
|
|
673,
|
|
1422,
|
|
470,
|
|
423,
|
|
262,
|
|
1637,
|
|
780,
|
|
41646,
|
|
338,
|
|
37751,
|
|
1392,
|
|
32621,
|
|
510,
|
|
290,
|
|
1422,
|
|
470,
|
|
467,
|
|
832,
|
|
13,
|
|
679,
|
|
3432,
|
|
511,
|
|
2739,
|
|
8744,
|
|
9024,
|
|
492,
|
|
257,
|
|
2472,
|
|
286,
|
|
720,
|
|
4059,
|
|
13,
|
|
314,
|
|
1807,
|
|
340,
|
|
373,
|
|
13678,
|
|
306,
|
|
5789,
|
|
475,
|
|
4030,
|
|
616,
|
|
5422,
|
|
4423,
|
|
13,
|
|
1439,
|
|
468,
|
|
587,
|
|
5897,
|
|
1201,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
7571,
|
|
2745,
|
|
2084,
|
|
11,
|
|
673,
|
|
1965,
|
|
502,
|
|
284,
|
|
8804,
|
|
617,
|
|
1637,
|
|
284,
|
|
651,
|
|
38464,
|
|
329,
|
|
399,
|
|
8535,
|
|
13,
|
|
3226,
|
|
1781,
|
|
314,
|
|
1101,
|
|
407,
|
|
1016,
|
|
284,
|
|
1309,
|
|
616,
|
|
41803,
|
|
393,
|
|
6621,
|
|
467,
|
|
14720,
|
|
11,
|
|
645,
|
|
2300,
|
|
644,
|
|
318,
|
|
1016,
|
|
319,
|
|
4306,
|
|
11,
|
|
523,
|
|
314,
|
|
910,
|
|
314,
|
|
1183,
|
|
307,
|
|
625,
|
|
379,
|
|
642,
|
|
13,
|
|
314,
|
|
1392,
|
|
572,
|
|
670,
|
|
1903,
|
|
290,
|
|
651,
|
|
612,
|
|
379,
|
|
362,
|
|
25,
|
|
2231,
|
|
13,
|
|
314,
|
|
1282,
|
|
287,
|
|
1262,
|
|
616,
|
|
13952,
|
|
1994,
|
|
11,
|
|
2513,
|
|
287,
|
|
11,
|
|
766,
|
|
399,
|
|
8535,
|
|
2712,
|
|
351,
|
|
36062,
|
|
287,
|
|
262,
|
|
5228,
|
|
11,
|
|
25737,
|
|
3804,
|
|
503,
|
|
319,
|
|
262,
|
|
18507,
|
|
11,
|
|
290,
|
|
16914,
|
|
319,
|
|
262,
|
|
6891,
|
|
3084,
|
|
13,
|
|
8989,
|
|
2406,
|
|
422,
|
|
257,
|
|
1641,
|
|
47655,
|
|
351,
|
|
13230,
|
|
11,
|
|
314,
|
|
760,
|
|
644,
|
|
16914,
|
|
3073,
|
|
588,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
760,
|
|
703,
|
|
881,
|
|
340,
|
|
373,
|
|
11,
|
|
475,
|
|
314,
|
|
714,
|
|
423,
|
|
23529,
|
|
276,
|
|
340,
|
|
510,
|
|
290,
|
|
5901,
|
|
616,
|
|
18057,
|
|
351,
|
|
340,
|
|
13,
|
|
314,
|
|
6810,
|
|
19772,
|
|
2024,
|
|
8347,
|
|
287,
|
|
262,
|
|
2166,
|
|
2119,
|
|
290,
|
|
399,
|
|
8535,
|
|
373,
|
|
287,
|
|
3294,
|
|
11685,
|
|
286,
|
|
8242,
|
|
290,
|
|
607,
|
|
7374,
|
|
15224,
|
|
13,
|
|
383,
|
|
4894,
|
|
373,
|
|
572,
|
|
13,
|
|
383,
|
|
2156,
|
|
373,
|
|
3863,
|
|
2319,
|
|
37,
|
|
532,
|
|
340,
|
|
373,
|
|
1542,
|
|
2354,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
1718,
|
|
399,
|
|
8535,
|
|
284,
|
|
616,
|
|
1097,
|
|
11,
|
|
290,
|
|
1444,
|
|
16679,
|
|
329,
|
|
281,
|
|
22536,
|
|
355,
|
|
314,
|
|
373,
|
|
12008,
|
|
25737,
|
|
373,
|
|
14904,
|
|
2752,
|
|
13,
|
|
220,
|
|
314,
|
|
1422,
|
|
470,
|
|
765,
|
|
284,
|
|
10436,
|
|
290,
|
|
22601,
|
|
503,
|
|
399,
|
|
8535,
|
|
523,
|
|
314,
|
|
9658,
|
|
287,
|
|
262,
|
|
1097,
|
|
290,
|
|
1309,
|
|
607,
|
|
711,
|
|
319,
|
|
616,
|
|
3072,
|
|
1566,
|
|
262,
|
|
22536,
|
|
5284,
|
|
13,
|
|
3226,
|
|
1781,
|
|
1644,
|
|
290,
|
|
32084,
|
|
3751,
|
|
510,
|
|
355,
|
|
880,
|
|
13,
|
|
314,
|
|
4893,
|
|
262,
|
|
3074,
|
|
290,
|
|
780,
|
|
399,
|
|
8535,
|
|
338,
|
|
9955,
|
|
318,
|
|
503,
|
|
286,
|
|
3240,
|
|
1762,
|
|
11,
|
|
34020,
|
|
14,
|
|
44,
|
|
4146,
|
|
547,
|
|
1444,
|
|
13,
|
|
1649,
|
|
484,
|
|
5284,
|
|
484,
|
|
547,
|
|
5897,
|
|
290,
|
|
4692,
|
|
11,
|
|
1422,
|
|
470,
|
|
1107,
|
|
1561,
|
|
11,
|
|
1718,
|
|
399,
|
|
8535,
|
|
11,
|
|
290,
|
|
1297,
|
|
502,
|
|
284,
|
|
467,
|
|
1363,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
2025,
|
|
1711,
|
|
1568,
|
|
314,
|
|
651,
|
|
1363,
|
|
290,
|
|
41668,
|
|
32682,
|
|
7893,
|
|
502,
|
|
644,
|
|
314,
|
|
1053,
|
|
1760,
|
|
13,
|
|
314,
|
|
4893,
|
|
2279,
|
|
284,
|
|
683,
|
|
290,
|
|
477,
|
|
339,
|
|
550,
|
|
373,
|
|
8993,
|
|
329,
|
|
502,
|
|
13,
|
|
18626,
|
|
262,
|
|
2104,
|
|
1641,
|
|
1541,
|
|
2993,
|
|
290,
|
|
547,
|
|
28674,
|
|
379,
|
|
502,
|
|
329,
|
|
644,
|
|
314,
|
|
550,
|
|
1760,
|
|
13,
|
|
18626,
|
|
314,
|
|
373,
|
|
366,
|
|
448,
|
|
286,
|
|
1627,
|
|
290,
|
|
8531,
|
|
1,
|
|
780,
|
|
314,
|
|
1444,
|
|
16679,
|
|
878,
|
|
4379,
|
|
611,
|
|
673,
|
|
373,
|
|
1682,
|
|
31245,
|
|
6,
|
|
278,
|
|
780,
|
|
340,
|
|
2900,
|
|
503,
|
|
673,
|
|
373,
|
|
655,
|
|
47583,
|
|
503,
|
|
422,
|
|
262,
|
|
16914,
|
|
13,
|
|
775,
|
|
8350,
|
|
329,
|
|
2250,
|
|
290,
|
|
314,
|
|
1364,
|
|
290,
|
|
3377,
|
|
262,
|
|
1755,
|
|
379,
|
|
616,
|
|
1266,
|
|
1545,
|
|
338,
|
|
2156,
|
|
290,
|
|
16896,
|
|
477,
|
|
1755,
|
|
13,
|
|
314,
|
|
3521,
|
|
470,
|
|
5412,
|
|
340,
|
|
477,
|
|
523,
|
|
314,
|
|
2900,
|
|
616,
|
|
3072,
|
|
572,
|
|
290,
|
|
3088,
|
|
284,
|
|
8960,
|
|
290,
|
|
655,
|
|
9480,
|
|
866,
|
|
13,
|
|
2011,
|
|
1266,
|
|
1545,
|
|
373,
|
|
510,
|
|
477,
|
|
1755,
|
|
351,
|
|
502,
|
|
11,
|
|
5149,
|
|
502,
|
|
314,
|
|
750,
|
|
2147,
|
|
2642,
|
|
11,
|
|
290,
|
|
314,
|
|
1101,
|
|
8788,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
1210,
|
|
616,
|
|
3072,
|
|
319,
|
|
290,
|
|
314,
|
|
550,
|
|
6135,
|
|
13399,
|
|
14,
|
|
37348,
|
|
1095,
|
|
13,
|
|
31515,
|
|
11,
|
|
34020,
|
|
11,
|
|
47551,
|
|
11,
|
|
41668,
|
|
32682,
|
|
11,
|
|
290,
|
|
511,
|
|
7083,
|
|
1641,
|
|
1866,
|
|
24630,
|
|
502,
|
|
13,
|
|
1119,
|
|
389,
|
|
2282,
|
|
314,
|
|
20484,
|
|
607,
|
|
1204,
|
|
11,
|
|
20484,
|
|
399,
|
|
8535,
|
|
338,
|
|
1204,
|
|
11,
|
|
925,
|
|
2279,
|
|
517,
|
|
8253,
|
|
621,
|
|
340,
|
|
2622,
|
|
284,
|
|
307,
|
|
11,
|
|
925,
|
|
340,
|
|
1171,
|
|
618,
|
|
340,
|
|
373,
|
|
257,
|
|
366,
|
|
17989,
|
|
14669,
|
|
1600,
|
|
290,
|
|
20484,
|
|
25737,
|
|
338,
|
|
8395,
|
|
286,
|
|
1683,
|
|
1972,
|
|
20750,
|
|
393,
|
|
1719,
|
|
10804,
|
|
286,
|
|
607,
|
|
1200,
|
|
757,
|
|
11,
|
|
4844,
|
|
286,
|
|
606,
|
|
1683,
|
|
765,
|
|
284,
|
|
766,
|
|
502,
|
|
757,
|
|
290,
|
|
314,
|
|
481,
|
|
1239,
|
|
766,
|
|
616,
|
|
41803,
|
|
757,
|
|
11,
|
|
290,
|
|
484,
|
|
765,
|
|
502,
|
|
284,
|
|
1414,
|
|
329,
|
|
25737,
|
|
338,
|
|
7356,
|
|
6314,
|
|
290,
|
|
20889,
|
|
502,
|
|
329,
|
|
262,
|
|
32084,
|
|
1339,
|
|
290,
|
|
7016,
|
|
12616,
|
|
13,
|
|
198,
|
|
198,
|
|
40,
|
|
716,
|
|
635,
|
|
783,
|
|
2060,
|
|
13,
|
|
1406,
|
|
319,
|
|
1353,
|
|
286,
|
|
6078,
|
|
616,
|
|
1266,
|
|
1545,
|
|
286,
|
|
838,
|
|
812,
|
|
357,
|
|
69,
|
|
666,
|
|
32682,
|
|
828,
|
|
314,
|
|
481,
|
|
4425,
|
|
616,
|
|
7962,
|
|
314,
|
|
550,
|
|
351,
|
|
683,
|
|
11,
|
|
644,
|
|
314,
|
|
3177,
|
|
616,
|
|
1641,
|
|
11,
|
|
290,
|
|
616,
|
|
399,
|
|
8535,
|
|
13,
|
|
198,
|
|
198,
|
|
40,
|
|
4988,
|
|
1254,
|
|
12361,
|
|
13,
|
|
314,
|
|
423,
|
|
12361,
|
|
9751,
|
|
284,
|
|
262,
|
|
966,
|
|
810,
|
|
314,
|
|
1101,
|
|
7960,
|
|
2130,
|
|
318,
|
|
1016,
|
|
284,
|
|
1282,
|
|
651,
|
|
366,
|
|
260,
|
|
18674,
|
|
1,
|
|
319,
|
|
502,
|
|
329,
|
|
644,
|
|
314,
|
|
750,
|
|
13,
|
|
314,
|
|
460,
|
|
470,
|
|
4483,
|
|
13,
|
|
314,
|
|
423,
|
|
2626,
|
|
767,
|
|
8059,
|
|
422,
|
|
340,
|
|
13,
|
|
314,
|
|
1101,
|
|
407,
|
|
11029,
|
|
329,
|
|
7510,
|
|
13,
|
|
314,
|
|
423,
|
|
11668,
|
|
739,
|
|
616,
|
|
2951,
|
|
13,
|
|
314,
|
|
1053,
|
|
550,
|
|
807,
|
|
50082,
|
|
12,
|
|
12545,
|
|
287,
|
|
734,
|
|
2745,
|
|
13,
|
|
1629,
|
|
717,
|
|
314,
|
|
2936,
|
|
523,
|
|
6563,
|
|
287,
|
|
616,
|
|
2551,
|
|
475,
|
|
355,
|
|
262,
|
|
1528,
|
|
467,
|
|
416,
|
|
314,
|
|
1101,
|
|
3612,
|
|
3863,
|
|
484,
|
|
547,
|
|
826,
|
|
290,
|
|
314,
|
|
815,
|
|
423,
|
|
10667,
|
|
319,
|
|
607,
|
|
878,
|
|
4585,
|
|
16679,
|
|
290,
|
|
852,
|
|
5306,
|
|
3019,
|
|
992,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
1337,
|
|
546,
|
|
25737,
|
|
7471,
|
|
11,
|
|
475,
|
|
314,
|
|
750,
|
|
18344,
|
|
257,
|
|
642,
|
|
614,
|
|
1468,
|
|
1200,
|
|
1497,
|
|
422,
|
|
607,
|
|
3397,
|
|
290,
|
|
314,
|
|
1254,
|
|
12361,
|
|
546,
|
|
340,
|
|
13,
|
|
314,
|
|
760,
|
|
2130,
|
|
287,
|
|
262,
|
|
1641,
|
|
481,
|
|
1011,
|
|
607,
|
|
287,
|
|
11,
|
|
475,
|
|
340,
|
|
338,
|
|
1239,
|
|
588,
|
|
852,
|
|
351,
|
|
534,
|
|
3397,
|
|
13,
|
|
1375,
|
|
481,
|
|
1663,
|
|
510,
|
|
20315,
|
|
278,
|
|
502,
|
|
329,
|
|
340,
|
|
290,
|
|
477,
|
|
314,
|
|
1053,
|
|
1683,
|
|
1760,
|
|
318,
|
|
1842,
|
|
607,
|
|
355,
|
|
616,
|
|
898,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
22367,
|
|
11,
|
|
317,
|
|
2043,
|
|
32,
|
|
30,
|
|
4222,
|
|
1037,
|
|
502,
|
|
13,
|
|
383,
|
|
14934,
|
|
318,
|
|
6600,
|
|
502,
|
|
6776,
|
|
13,
|
|
220,
|
|
198,
|
|
24361,
|
|
25,
|
|
1148,
|
|
428,
|
|
2642,
|
|
30,
|
|
198,
|
|
33706,
|
|
25,
|
|
645,
|
|
],
|
|
[
|
|
32,
|
|
2043,
|
|
32,
|
|
329,
|
|
4585,
|
|
262,
|
|
1644,
|
|
14,
|
|
34,
|
|
3705,
|
|
319,
|
|
616,
|
|
47551,
|
|
30,
|
|
930,
|
|
19219,
|
|
284,
|
|
1949,
|
|
284,
|
|
787,
|
|
428,
|
|
355,
|
|
1790,
|
|
355,
|
|
1744,
|
|
981,
|
|
1390,
|
|
3307,
|
|
2622,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
423,
|
|
587,
|
|
351,
|
|
616,
|
|
41668,
|
|
32682,
|
|
329,
|
|
718,
|
|
812,
|
|
13,
|
|
376,
|
|
666,
|
|
32682,
|
|
468,
|
|
281,
|
|
4697,
|
|
6621,
|
|
11,
|
|
356,
|
|
1183,
|
|
869,
|
|
607,
|
|
25737,
|
|
11,
|
|
508,
|
|
318,
|
|
2579,
|
|
290,
|
|
468,
|
|
257,
|
|
642,
|
|
614,
|
|
1468,
|
|
1200,
|
|
13,
|
|
314,
|
|
373,
|
|
612,
|
|
262,
|
|
1110,
|
|
25737,
|
|
373,
|
|
287,
|
|
4827,
|
|
290,
|
|
14801,
|
|
373,
|
|
4642,
|
|
11,
|
|
673,
|
|
318,
|
|
616,
|
|
41803,
|
|
13,
|
|
2399,
|
|
2104,
|
|
1641,
|
|
468,
|
|
6412,
|
|
284,
|
|
502,
|
|
355,
|
|
465,
|
|
38074,
|
|
494,
|
|
1201,
|
|
1110,
|
|
352,
|
|
13,
|
|
314,
|
|
716,
|
|
407,
|
|
2910,
|
|
475,
|
|
356,
|
|
389,
|
|
1641,
|
|
11,
|
|
673,
|
|
3848,
|
|
502,
|
|
38074,
|
|
494,
|
|
290,
|
|
356,
|
|
423,
|
|
3993,
|
|
13801,
|
|
11,
|
|
26626,
|
|
11864,
|
|
11,
|
|
3503,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
17,
|
|
812,
|
|
2084,
|
|
25737,
|
|
373,
|
|
287,
|
|
14321,
|
|
422,
|
|
2563,
|
|
13230,
|
|
13,
|
|
21051,
|
|
11,
|
|
2356,
|
|
25542,
|
|
11,
|
|
290,
|
|
47482,
|
|
897,
|
|
547,
|
|
607,
|
|
1517,
|
|
13,
|
|
1375,
|
|
550,
|
|
257,
|
|
5110,
|
|
14608,
|
|
290,
|
|
262,
|
|
1641,
|
|
7723,
|
|
1637,
|
|
284,
|
|
3758,
|
|
607,
|
|
284,
|
|
14321,
|
|
290,
|
|
477,
|
|
8389,
|
|
257,
|
|
7269,
|
|
284,
|
|
1011,
|
|
1337,
|
|
286,
|
|
14801,
|
|
13,
|
|
383,
|
|
5156,
|
|
338,
|
|
9955,
|
|
11,
|
|
25737,
|
|
338,
|
|
13850,
|
|
11,
|
|
468,
|
|
257,
|
|
47973,
|
|
14,
|
|
9979,
|
|
2762,
|
|
1693,
|
|
290,
|
|
373,
|
|
503,
|
|
286,
|
|
3240,
|
|
329,
|
|
362,
|
|
1933,
|
|
523,
|
|
339,
|
|
2492,
|
|
470,
|
|
612,
|
|
329,
|
|
477,
|
|
286,
|
|
428,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
3347,
|
|
10667,
|
|
5223,
|
|
503,
|
|
706,
|
|
513,
|
|
1528,
|
|
11,
|
|
23630,
|
|
673,
|
|
373,
|
|
366,
|
|
38125,
|
|
290,
|
|
655,
|
|
2622,
|
|
257,
|
|
3338,
|
|
8399,
|
|
1911,
|
|
314,
|
|
2298,
|
|
607,
|
|
510,
|
|
11,
|
|
1011,
|
|
607,
|
|
284,
|
|
607,
|
|
2156,
|
|
11,
|
|
290,
|
|
673,
|
|
3393,
|
|
2925,
|
|
284,
|
|
7523,
|
|
20349,
|
|
290,
|
|
4144,
|
|
257,
|
|
6099,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
892,
|
|
20349,
|
|
318,
|
|
257,
|
|
2563,
|
|
290,
|
|
716,
|
|
845,
|
|
386,
|
|
12,
|
|
66,
|
|
1236,
|
|
571,
|
|
292,
|
|
3584,
|
|
314,
|
|
836,
|
|
470,
|
|
7523,
|
|
11,
|
|
475,
|
|
326,
|
|
373,
|
|
407,
|
|
5035,
|
|
6402,
|
|
314,
|
|
655,
|
|
6497,
|
|
607,
|
|
510,
|
|
422,
|
|
14321,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
32,
|
|
1285,
|
|
1568,
|
|
673,
|
|
373,
|
|
6294,
|
|
329,
|
|
3013,
|
|
24707,
|
|
287,
|
|
262,
|
|
12436,
|
|
1539,
|
|
819,
|
|
5722,
|
|
329,
|
|
852,
|
|
604,
|
|
1933,
|
|
2739,
|
|
11,
|
|
39398,
|
|
607,
|
|
1097,
|
|
5059,
|
|
981,
|
|
1029,
|
|
290,
|
|
318,
|
|
852,
|
|
16334,
|
|
329,
|
|
720,
|
|
1120,
|
|
74,
|
|
422,
|
|
15228,
|
|
278,
|
|
656,
|
|
257,
|
|
2156,
|
|
11,
|
|
290,
|
|
373,
|
|
12165,
|
|
503,
|
|
286,
|
|
376,
|
|
666,
|
|
32682,
|
|
338,
|
|
584,
|
|
6621,
|
|
338,
|
|
2156,
|
|
329,
|
|
32012,
|
|
262,
|
|
14595,
|
|
373,
|
|
30601,
|
|
510,
|
|
290,
|
|
2491,
|
|
357,
|
|
7091,
|
|
373,
|
|
1029,
|
|
8,
|
|
290,
|
|
262,
|
|
2104,
|
|
34624,
|
|
373,
|
|
46432,
|
|
1268,
|
|
1961,
|
|
422,
|
|
1660,
|
|
2465,
|
|
780,
|
|
8168,
|
|
2073,
|
|
1625,
|
|
1363,
|
|
329,
|
|
807,
|
|
2250,
|
|
13,
|
|
720,
|
|
1238,
|
|
11,
|
|
830,
|
|
286,
|
|
2465,
|
|
290,
|
|
5875,
|
|
5770,
|
|
511,
|
|
2156,
|
|
5096,
|
|
5017,
|
|
340,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
2504,
|
|
373,
|
|
477,
|
|
938,
|
|
614,
|
|
13,
|
|
1119,
|
|
1053,
|
|
587,
|
|
287,
|
|
511,
|
|
649,
|
|
2156,
|
|
319,
|
|
511,
|
|
898,
|
|
329,
|
|
546,
|
|
718,
|
|
1933,
|
|
13,
|
|
554,
|
|
3389,
|
|
673,
|
|
1444,
|
|
34020,
|
|
290,
|
|
531,
|
|
511,
|
|
8744,
|
|
373,
|
|
4423,
|
|
572,
|
|
780,
|
|
673,
|
|
1422,
|
|
470,
|
|
423,
|
|
262,
|
|
1637,
|
|
780,
|
|
41646,
|
|
338,
|
|
37751,
|
|
1392,
|
|
32621,
|
|
510,
|
|
290,
|
|
1422,
|
|
470,
|
|
467,
|
|
832,
|
|
13,
|
|
679,
|
|
3432,
|
|
511,
|
|
2739,
|
|
8744,
|
|
9024,
|
|
492,
|
|
257,
|
|
2472,
|
|
286,
|
|
720,
|
|
4059,
|
|
13,
|
|
314,
|
|
1807,
|
|
340,
|
|
373,
|
|
13678,
|
|
306,
|
|
5789,
|
|
475,
|
|
4030,
|
|
616,
|
|
5422,
|
|
4423,
|
|
13,
|
|
1439,
|
|
468,
|
|
587,
|
|
5897,
|
|
1201,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
7571,
|
|
2745,
|
|
2084,
|
|
11,
|
|
673,
|
|
1965,
|
|
502,
|
|
284,
|
|
8804,
|
|
617,
|
|
1637,
|
|
284,
|
|
651,
|
|
38464,
|
|
329,
|
|
399,
|
|
8535,
|
|
13,
|
|
3226,
|
|
1781,
|
|
314,
|
|
1101,
|
|
407,
|
|
1016,
|
|
284,
|
|
1309,
|
|
616,
|
|
41803,
|
|
393,
|
|
6621,
|
|
467,
|
|
14720,
|
|
11,
|
|
645,
|
|
2300,
|
|
644,
|
|
318,
|
|
1016,
|
|
319,
|
|
4306,
|
|
11,
|
|
523,
|
|
314,
|
|
910,
|
|
314,
|
|
1183,
|
|
307,
|
|
625,
|
|
379,
|
|
642,
|
|
13,
|
|
314,
|
|
1392,
|
|
572,
|
|
670,
|
|
1903,
|
|
290,
|
|
651,
|
|
612,
|
|
379,
|
|
362,
|
|
25,
|
|
2231,
|
|
13,
|
|
314,
|
|
1282,
|
|
287,
|
|
1262,
|
|
616,
|
|
13952,
|
|
1994,
|
|
11,
|
|
2513,
|
|
287,
|
|
11,
|
|
766,
|
|
399,
|
|
8535,
|
|
2712,
|
|
351,
|
|
36062,
|
|
287,
|
|
262,
|
|
5228,
|
|
11,
|
|
25737,
|
|
3804,
|
|
503,
|
|
319,
|
|
262,
|
|
18507,
|
|
11,
|
|
290,
|
|
16914,
|
|
319,
|
|
262,
|
|
6891,
|
|
3084,
|
|
13,
|
|
8989,
|
|
2406,
|
|
422,
|
|
257,
|
|
1641,
|
|
47655,
|
|
351,
|
|
13230,
|
|
11,
|
|
314,
|
|
760,
|
|
644,
|
|
16914,
|
|
3073,
|
|
588,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
760,
|
|
703,
|
|
881,
|
|
340,
|
|
373,
|
|
11,
|
|
475,
|
|
314,
|
|
714,
|
|
423,
|
|
23529,
|
|
276,
|
|
340,
|
|
510,
|
|
290,
|
|
5901,
|
|
616,
|
|
18057,
|
|
351,
|
|
340,
|
|
13,
|
|
314,
|
|
6810,
|
|
19772,
|
|
2024,
|
|
8347,
|
|
287,
|
|
262,
|
|
2166,
|
|
2119,
|
|
290,
|
|
399,
|
|
8535,
|
|
373,
|
|
287,
|
|
3294,
|
|
11685,
|
|
286,
|
|
8242,
|
|
290,
|
|
607,
|
|
7374,
|
|
15224,
|
|
13,
|
|
383,
|
|
4894,
|
|
373,
|
|
572,
|
|
13,
|
|
383,
|
|
2156,
|
|
373,
|
|
3863,
|
|
2319,
|
|
37,
|
|
532,
|
|
340,
|
|
373,
|
|
1542,
|
|
2354,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
1718,
|
|
399,
|
|
8535,
|
|
284,
|
|
616,
|
|
1097,
|
|
11,
|
|
290,
|
|
1444,
|
|
16679,
|
|
329,
|
|
281,
|
|
22536,
|
|
355,
|
|
314,
|
|
373,
|
|
12008,
|
|
25737,
|
|
373,
|
|
14904,
|
|
2752,
|
|
13,
|
|
220,
|
|
314,
|
|
1422,
|
|
470,
|
|
765,
|
|
284,
|
|
10436,
|
|
290,
|
|
22601,
|
|
503,
|
|
399,
|
|
8535,
|
|
523,
|
|
314,
|
|
9658,
|
|
287,
|
|
262,
|
|
1097,
|
|
290,
|
|
1309,
|
|
607,
|
|
711,
|
|
319,
|
|
616,
|
|
3072,
|
|
1566,
|
|
262,
|
|
22536,
|
|
5284,
|
|
13,
|
|
3226,
|
|
1781,
|
|
1644,
|
|
290,
|
|
32084,
|
|
3751,
|
|
510,
|
|
355,
|
|
880,
|
|
13,
|
|
314,
|
|
4893,
|
|
262,
|
|
3074,
|
|
290,
|
|
780,
|
|
399,
|
|
8535,
|
|
338,
|
|
9955,
|
|
318,
|
|
503,
|
|
286,
|
|
3240,
|
|
1762,
|
|
11,
|
|
34020,
|
|
14,
|
|
44,
|
|
4146,
|
|
547,
|
|
1444,
|
|
13,
|
|
1649,
|
|
484,
|
|
5284,
|
|
484,
|
|
547,
|
|
5897,
|
|
290,
|
|
4692,
|
|
11,
|
|
1422,
|
|
470,
|
|
1107,
|
|
1561,
|
|
11,
|
|
1718,
|
|
399,
|
|
8535,
|
|
11,
|
|
290,
|
|
1297,
|
|
502,
|
|
284,
|
|
467,
|
|
1363,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
2025,
|
|
1711,
|
|
1568,
|
|
314,
|
|
651,
|
|
1363,
|
|
290,
|
|
41668,
|
|
32682,
|
|
7893,
|
|
502,
|
|
644,
|
|
314,
|
|
1053,
|
|
1760,
|
|
13,
|
|
314,
|
|
4893,
|
|
2279,
|
|
284,
|
|
683,
|
|
290,
|
|
477,
|
|
339,
|
|
550,
|
|
373,
|
|
8993,
|
|
329,
|
|
502,
|
|
13,
|
|
18626,
|
|
262,
|
|
2104,
|
|
1641,
|
|
1541,
|
|
2993,
|
|
290,
|
|
547,
|
|
28674,
|
|
379,
|
|
502,
|
|
329,
|
|
644,
|
|
314,
|
|
550,
|
|
1760,
|
|
13,
|
|
18626,
|
|
314,
|
|
373,
|
|
366,
|
|
448,
|
|
286,
|
|
1627,
|
|
290,
|
|
8531,
|
|
1,
|
|
780,
|
|
314,
|
|
1444,
|
|
16679,
|
|
878,
|
|
4379,
|
|
611,
|
|
673,
|
|
373,
|
|
1682,
|
|
31245,
|
|
6,
|
|
278,
|
|
780,
|
|
340,
|
|
2900,
|
|
503,
|
|
673,
|
|
373,
|
|
655,
|
|
47583,
|
|
503,
|
|
422,
|
|
262,
|
|
16914,
|
|
13,
|
|
775,
|
|
8350,
|
|
329,
|
|
2250,
|
|
290,
|
|
314,
|
|
1364,
|
|
290,
|
|
3377,
|
|
262,
|
|
1755,
|
|
379,
|
|
616,
|
|
1266,
|
|
1545,
|
|
338,
|
|
2156,
|
|
290,
|
|
16896,
|
|
477,
|
|
1755,
|
|
13,
|
|
314,
|
|
3521,
|
|
470,
|
|
5412,
|
|
340,
|
|
477,
|
|
523,
|
|
314,
|
|
2900,
|
|
616,
|
|
3072,
|
|
572,
|
|
290,
|
|
3088,
|
|
284,
|
|
8960,
|
|
290,
|
|
655,
|
|
9480,
|
|
866,
|
|
13,
|
|
2011,
|
|
1266,
|
|
1545,
|
|
373,
|
|
510,
|
|
477,
|
|
1755,
|
|
351,
|
|
502,
|
|
11,
|
|
5149,
|
|
502,
|
|
314,
|
|
750,
|
|
2147,
|
|
2642,
|
|
11,
|
|
290,
|
|
314,
|
|
1101,
|
|
8788,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
40,
|
|
1210,
|
|
616,
|
|
3072,
|
|
319,
|
|
290,
|
|
314,
|
|
550,
|
|
6135,
|
|
13399,
|
|
14,
|
|
37348,
|
|
1095,
|
|
13,
|
|
31515,
|
|
11,
|
|
34020,
|
|
11,
|
|
47551,
|
|
11,
|
|
41668,
|
|
32682,
|
|
11,
|
|
290,
|
|
511,
|
|
7083,
|
|
1641,
|
|
1866,
|
|
24630,
|
|
502,
|
|
13,
|
|
1119,
|
|
389,
|
|
2282,
|
|
314,
|
|
20484,
|
|
607,
|
|
1204,
|
|
11,
|
|
20484,
|
|
399,
|
|
8535,
|
|
338,
|
|
1204,
|
|
11,
|
|
925,
|
|
2279,
|
|
517,
|
|
8253,
|
|
621,
|
|
340,
|
|
2622,
|
|
284,
|
|
307,
|
|
11,
|
|
925,
|
|
340,
|
|
1171,
|
|
618,
|
|
340,
|
|
373,
|
|
257,
|
|
366,
|
|
17989,
|
|
14669,
|
|
1600,
|
|
290,
|
|
20484,
|
|
25737,
|
|
338,
|
|
8395,
|
|
286,
|
|
1683,
|
|
1972,
|
|
20750,
|
|
393,
|
|
1719,
|
|
10804,
|
|
286,
|
|
607,
|
|
1200,
|
|
757,
|
|
11,
|
|
4844,
|
|
286,
|
|
606,
|
|
1683,
|
|
765,
|
|
284,
|
|
766,
|
|
502,
|
|
757,
|
|
290,
|
|
314,
|
|
481,
|
|
1239,
|
|
766,
|
|
616,
|
|
41803,
|
|
757,
|
|
11,
|
|
290,
|
|
484,
|
|
765,
|
|
502,
|
|
284,
|
|
1414,
|
|
329,
|
|
25737,
|
|
338,
|
|
7356,
|
|
6314,
|
|
290,
|
|
20889,
|
|
502,
|
|
329,
|
|
262,
|
|
32084,
|
|
1339,
|
|
290,
|
|
7016,
|
|
12616,
|
|
13,
|
|
198,
|
|
198,
|
|
40,
|
|
716,
|
|
635,
|
|
783,
|
|
2060,
|
|
13,
|
|
1406,
|
|
319,
|
|
1353,
|
|
286,
|
|
6078,
|
|
616,
|
|
1266,
|
|
1545,
|
|
286,
|
|
838,
|
|
812,
|
|
357,
|
|
69,
|
|
666,
|
|
32682,
|
|
828,
|
|
314,
|
|
481,
|
|
4425,
|
|
616,
|
|
7962,
|
|
314,
|
|
550,
|
|
351,
|
|
683,
|
|
11,
|
|
644,
|
|
314,
|
|
3177,
|
|
616,
|
|
1641,
|
|
11,
|
|
290,
|
|
616,
|
|
399,
|
|
8535,
|
|
13,
|
|
198,
|
|
198,
|
|
40,
|
|
4988,
|
|
1254,
|
|
12361,
|
|
13,
|
|
314,
|
|
423,
|
|
12361,
|
|
9751,
|
|
284,
|
|
262,
|
|
966,
|
|
810,
|
|
314,
|
|
1101,
|
|
7960,
|
|
2130,
|
|
318,
|
|
1016,
|
|
284,
|
|
1282,
|
|
651,
|
|
366,
|
|
260,
|
|
18674,
|
|
1,
|
|
319,
|
|
502,
|
|
329,
|
|
644,
|
|
314,
|
|
750,
|
|
13,
|
|
314,
|
|
460,
|
|
470,
|
|
4483,
|
|
13,
|
|
314,
|
|
423,
|
|
2626,
|
|
767,
|
|
8059,
|
|
422,
|
|
340,
|
|
13,
|
|
314,
|
|
1101,
|
|
407,
|
|
11029,
|
|
329,
|
|
7510,
|
|
13,
|
|
314,
|
|
423,
|
|
11668,
|
|
739,
|
|
616,
|
|
2951,
|
|
13,
|
|
314,
|
|
1053,
|
|
550,
|
|
807,
|
|
50082,
|
|
12,
|
|
12545,
|
|
287,
|
|
734,
|
|
2745,
|
|
13,
|
|
1629,
|
|
717,
|
|
314,
|
|
2936,
|
|
523,
|
|
6563,
|
|
287,
|
|
616,
|
|
2551,
|
|
475,
|
|
355,
|
|
262,
|
|
1528,
|
|
467,
|
|
416,
|
|
314,
|
|
1101,
|
|
3612,
|
|
3863,
|
|
484,
|
|
547,
|
|
826,
|
|
290,
|
|
314,
|
|
815,
|
|
423,
|
|
10667,
|
|
319,
|
|
607,
|
|
878,
|
|
4585,
|
|
16679,
|
|
290,
|
|
852,
|
|
5306,
|
|
3019,
|
|
992,
|
|
13,
|
|
314,
|
|
836,
|
|
470,
|
|
1337,
|
|
546,
|
|
25737,
|
|
7471,
|
|
11,
|
|
475,
|
|
314,
|
|
750,
|
|
18344,
|
|
257,
|
|
642,
|
|
614,
|
|
1468,
|
|
1200,
|
|
1497,
|
|
422,
|
|
607,
|
|
3397,
|
|
290,
|
|
314,
|
|
1254,
|
|
12361,
|
|
546,
|
|
340,
|
|
13,
|
|
314,
|
|
760,
|
|
2130,
|
|
287,
|
|
262,
|
|
1641,
|
|
481,
|
|
1011,
|
|
607,
|
|
287,
|
|
11,
|
|
475,
|
|
340,
|
|
338,
|
|
1239,
|
|
588,
|
|
852,
|
|
351,
|
|
534,
|
|
3397,
|
|
13,
|
|
1375,
|
|
481,
|
|
1663,
|
|
510,
|
|
20315,
|
|
278,
|
|
502,
|
|
329,
|
|
340,
|
|
290,
|
|
477,
|
|
314,
|
|
1053,
|
|
1683,
|
|
1760,
|
|
318,
|
|
1842,
|
|
607,
|
|
355,
|
|
616,
|
|
898,
|
|
13,
|
|
220,
|
|
198,
|
|
198,
|
|
22367,
|
|
11,
|
|
317,
|
|
2043,
|
|
32,
|
|
30,
|
|
4222,
|
|
1037,
|
|
502,
|
|
13,
|
|
383,
|
|
14934,
|
|
318,
|
|
6600,
|
|
502,
|
|
6776,
|
|
13,
|
|
220,
|
|
198,
|
|
24361,
|
|
25,
|
|
1148,
|
|
428,
|
|
2642,
|
|
30,
|
|
198,
|
|
33706,
|
|
25,
|
|
3763,
|
|
],
|
|
]
|
|
|
|
|
|
def test_unit_test_text_completion_object():
|
|
openai_object = {
|
|
"id": "cmpl-99y7B2svVoRWe1xd7UFRmeGjZrFSh",
|
|
"choices": [
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 0,
|
|
"logprobs": {
|
|
"text_offset": [101],
|
|
"token_logprobs": [-0.00023488728],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00023488728,
|
|
"1": -8.375235,
|
|
"zero": -14.101797,
|
|
"__": -14.554922,
|
|
"00": -14.98461,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 1,
|
|
"logprobs": {
|
|
"text_offset": [116],
|
|
"token_logprobs": [-0.013745008],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.013745008,
|
|
"1": -4.294995,
|
|
"00": -12.287183,
|
|
"2": -12.771558,
|
|
"3": -14.013745,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 2,
|
|
"logprobs": {
|
|
"text_offset": [108],
|
|
"token_logprobs": [-3.655073e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -3.655073e-5,
|
|
"1": -10.656286,
|
|
"__": -11.789099,
|
|
"false": -12.984411,
|
|
"00": -14.039099,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 3,
|
|
"logprobs": {
|
|
"text_offset": [106],
|
|
"token_logprobs": [-0.1345946],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.1345946,
|
|
"1": -2.0720947,
|
|
"2": -12.798657,
|
|
"false": -13.970532,
|
|
"00": -14.27522,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 4,
|
|
"logprobs": {
|
|
"text_offset": [95],
|
|
"token_logprobs": [-0.10491652],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.10491652,
|
|
"1": -2.3236666,
|
|
"2": -7.0111666,
|
|
"3": -7.987729,
|
|
"4": -9.050229,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 5,
|
|
"logprobs": {
|
|
"text_offset": [121],
|
|
"token_logprobs": [-0.00026300468],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00026300468,
|
|
"1": -8.250263,
|
|
"zero": -14.976826,
|
|
" ": -15.461201,
|
|
"000": -15.773701,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 6,
|
|
"logprobs": {
|
|
"text_offset": [146],
|
|
"token_logprobs": [-5.085517e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -5.085517e-5,
|
|
"1": -9.937551,
|
|
"000": -13.929738,
|
|
"__": -14.968801,
|
|
"zero": -15.070363,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 7,
|
|
"logprobs": {
|
|
"text_offset": [100],
|
|
"token_logprobs": [-0.13875218],
|
|
"tokens": ["1"],
|
|
"top_logprobs": [
|
|
{
|
|
"1": -0.13875218,
|
|
"0": -2.0450022,
|
|
"2": -9.7559395,
|
|
"3": -11.1465645,
|
|
"4": -11.5528145,
|
|
}
|
|
],
|
|
},
|
|
"text": "1",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 8,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0005573204],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0005573204,
|
|
"1": -7.6099324,
|
|
"3": -10.070869,
|
|
"2": -11.617744,
|
|
" ": -12.859932,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 9,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0018747397],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0018747397,
|
|
"1": -6.29875,
|
|
"3": -11.2675,
|
|
"4": -11.634687,
|
|
"2": -11.822187,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 10,
|
|
"logprobs": {
|
|
"text_offset": [110],
|
|
"token_logprobs": [-0.003476763],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.003476763,
|
|
"1": -5.6909766,
|
|
"__": -10.526915,
|
|
"None": -10.925352,
|
|
"False": -11.88629,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 11,
|
|
"logprobs": {
|
|
"text_offset": [106],
|
|
"token_logprobs": [-0.00032962486],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00032962486,
|
|
"1": -8.03158,
|
|
"__": -13.445642,
|
|
"2": -13.828455,
|
|
"zero": -15.453455,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 12,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-9.984788e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -9.984788e-5,
|
|
"1": -9.21885,
|
|
" ": -14.836038,
|
|
"zero": -16.265724,
|
|
"00": -16.578224,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 13,
|
|
"logprobs": {
|
|
"text_offset": [106],
|
|
"token_logprobs": [-0.0010039895],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0010039895,
|
|
"1": -6.907254,
|
|
"2": -13.743192,
|
|
"false": -15.227567,
|
|
"3": -15.297879,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 14,
|
|
"logprobs": {
|
|
"text_offset": [106],
|
|
"token_logprobs": [-0.0005681643],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0005681643,
|
|
"1": -7.5005684,
|
|
"__": -11.836506,
|
|
"zero": -13.242756,
|
|
"file": -13.445881,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 15,
|
|
"logprobs": {
|
|
"text_offset": [146],
|
|
"token_logprobs": [-3.9769227e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -3.9769227e-5,
|
|
"1": -10.15629,
|
|
"000": -15.078165,
|
|
"00": -15.664103,
|
|
"zero": -16.015665,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 16,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0006509595],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0006509595,
|
|
"1": -7.344401,
|
|
"2": -13.352214,
|
|
" ": -13.852214,
|
|
"3": -14.680339,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 17,
|
|
"logprobs": {
|
|
"text_offset": [103],
|
|
"token_logprobs": [-0.0093299495],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0093299495,
|
|
"1": -4.681205,
|
|
"2": -11.173392,
|
|
"3": -13.439017,
|
|
"00": -14.673392,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 18,
|
|
"logprobs": {
|
|
"text_offset": [130],
|
|
"token_logprobs": [-0.00024382756],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00024382756,
|
|
"1": -8.328369,
|
|
" ": -13.640869,
|
|
"zero": -14.859619,
|
|
"null": -16.51587,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 19,
|
|
"logprobs": {
|
|
"text_offset": [107],
|
|
"token_logprobs": [-0.0006452414],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0006452414,
|
|
"1": -7.36002,
|
|
"00": -12.328771,
|
|
"000": -12.961583,
|
|
"2": -14.211583,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 20,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0012751155],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0012751155,
|
|
"1": -6.67315,
|
|
"__": -11.970025,
|
|
"<|endoftext|>": -14.907525,
|
|
"3": -14.930963,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 21,
|
|
"logprobs": {
|
|
"text_offset": [107],
|
|
"token_logprobs": [-7.1954215e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -7.1954215e-5,
|
|
"1": -9.640697,
|
|
"00": -13.500072,
|
|
"000": -13.523509,
|
|
"__": -13.945384,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 22,
|
|
"logprobs": {
|
|
"text_offset": [108],
|
|
"token_logprobs": [-0.0032367748],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0032367748,
|
|
"1": -5.737612,
|
|
"<|endoftext|>": -13.940737,
|
|
"2": -14.167299,
|
|
"00": -14.292299,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 23,
|
|
"logprobs": {
|
|
"text_offset": [117],
|
|
"token_logprobs": [-0.00018673266],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00018673266,
|
|
"1": -8.593937,
|
|
"zero": -15.179874,
|
|
"null": -15.515812,
|
|
"None": -15.851749,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 24,
|
|
"logprobs": {
|
|
"text_offset": [104],
|
|
"token_logprobs": [-0.0010223285],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0010223285,
|
|
"1": -6.8916473,
|
|
"__": -13.05571,
|
|
"00": -14.071335,
|
|
"zero": -14.235397,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 25,
|
|
"logprobs": {
|
|
"text_offset": [108],
|
|
"token_logprobs": [-0.0038979414],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0038979414,
|
|
"1": -5.550773,
|
|
"2": -13.160148,
|
|
"00": -14.144523,
|
|
"3": -14.41796,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 26,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.00074721366],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00074721366,
|
|
"1": -7.219497,
|
|
"3": -11.430435,
|
|
"2": -13.367935,
|
|
" ": -13.735123,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 27,
|
|
"logprobs": {
|
|
"text_offset": [146],
|
|
"token_logprobs": [-8.566264e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -8.566264e-5,
|
|
"1": -9.375086,
|
|
"000": -15.359461,
|
|
"__": -15.671961,
|
|
"00": -15.679773,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 28,
|
|
"logprobs": {
|
|
"text_offset": [119],
|
|
"token_logprobs": [-0.000274683],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.000274683,
|
|
"1": -8.2034,
|
|
"00": -14.898712,
|
|
"2": -15.633087,
|
|
"__": -16.844025,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 29,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.014869375],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.014869375,
|
|
"1": -4.217994,
|
|
"2": -11.63987,
|
|
"3": -11.944557,
|
|
"5": -12.26487,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 30,
|
|
"logprobs": {
|
|
"text_offset": [110],
|
|
"token_logprobs": [-0.010907865],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.010907865,
|
|
"1": -4.5265326,
|
|
"2": -11.440596,
|
|
"<|endoftext|>": -12.456221,
|
|
"file": -13.049971,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 31,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.00070528337],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00070528337,
|
|
"1": -7.2663302,
|
|
"6": -13.141331,
|
|
"2": -13.797581,
|
|
"3": -13.836643,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 32,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0004983439],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0004983439,
|
|
"1": -7.6098733,
|
|
"3": -14.211436,
|
|
"2": -14.336436,
|
|
" ": -15.117686,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 33,
|
|
"logprobs": {
|
|
"text_offset": [110],
|
|
"token_logprobs": [-3.6908343e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -3.6908343e-5,
|
|
"1": -10.250037,
|
|
"00": -14.2266,
|
|
"__": -14.7266,
|
|
"000": -16.164099,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 34,
|
|
"logprobs": {
|
|
"text_offset": [104],
|
|
"token_logprobs": [-0.003917157],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.003917157,
|
|
"1": -5.550792,
|
|
"2": -11.355479,
|
|
"00": -12.777354,
|
|
"3": -13.652354,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 35,
|
|
"logprobs": {
|
|
"text_offset": [146],
|
|
"token_logprobs": [-5.0139948e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -5.0139948e-5,
|
|
"1": -9.921926,
|
|
"000": -14.851613,
|
|
"00": -15.414113,
|
|
"zero": -15.687551,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 36,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0005143099],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0005143099,
|
|
"1": -7.5786395,
|
|
" ": -14.406764,
|
|
"00": -14.570827,
|
|
"999": -14.633327,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 37,
|
|
"logprobs": {
|
|
"text_offset": [103],
|
|
"token_logprobs": [-0.00013691289],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00013691289,
|
|
"1": -8.968887,
|
|
"__": -12.547012,
|
|
"zero": -13.57045,
|
|
"00": -13.8517,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 38,
|
|
"logprobs": {
|
|
"text_offset": [103],
|
|
"token_logprobs": [-0.00032569113],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00032569113,
|
|
"1": -8.047201,
|
|
"2": -13.570639,
|
|
"zero": -14.023764,
|
|
"false": -14.726889,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 39,
|
|
"logprobs": {
|
|
"text_offset": [113],
|
|
"token_logprobs": [-3.7146747e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -3.7146747e-5,
|
|
"1": -10.203162,
|
|
"zero": -18.437536,
|
|
"2": -20.117224,
|
|
" zero": -20.210974,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 40,
|
|
"logprobs": {
|
|
"text_offset": [110],
|
|
"token_logprobs": [-7.4695905e-5],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -7.4695905e-5,
|
|
"1": -9.515699,
|
|
"00": -14.836012,
|
|
"__": -16.093824,
|
|
"file": -16.468824,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 41,
|
|
"logprobs": {
|
|
"text_offset": [111],
|
|
"token_logprobs": [-0.02289473],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.02289473,
|
|
"1": -3.7885196,
|
|
"2": -12.499457,
|
|
"3": -14.546332,
|
|
"00": -15.66352,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 42,
|
|
"logprobs": {
|
|
"text_offset": [108],
|
|
"token_logprobs": [-0.0011367622],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0011367622,
|
|
"1": -6.782387,
|
|
"2": -13.493324,
|
|
"00": -15.071449,
|
|
"zero": -15.727699,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 43,
|
|
"logprobs": {
|
|
"text_offset": [115],
|
|
"token_logprobs": [-0.0006384541],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0006384541,
|
|
"1": -7.3600135,
|
|
"00": -14.0397005,
|
|
"2": -14.4303255,
|
|
"000": -15.563138,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 44,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0007382771],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0007382771,
|
|
"1": -7.219488,
|
|
"4": -13.516363,
|
|
"2": -13.555426,
|
|
"3": -13.602301,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 45,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0014242834],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0014242834,
|
|
"1": -6.5639243,
|
|
"2": -12.493611,
|
|
"__": -12.712361,
|
|
"3": -12.884236,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 46,
|
|
"logprobs": {
|
|
"text_offset": [111],
|
|
"token_logprobs": [-0.00017088225],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00017088225,
|
|
"1": -8.765796,
|
|
"zero": -12.695483,
|
|
"__": -12.804858,
|
|
"time": -12.882983,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 47,
|
|
"logprobs": {
|
|
"text_offset": [146],
|
|
"token_logprobs": [-0.000107238506],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.000107238506,
|
|
"1": -9.171982,
|
|
"000": -13.648544,
|
|
"__": -14.531357,
|
|
"zero": -14.586044,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 48,
|
|
"logprobs": {
|
|
"text_offset": [106],
|
|
"token_logprobs": [-0.0028172398],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0028172398,
|
|
"1": -5.877817,
|
|
"00": -12.16688,
|
|
"2": -12.487192,
|
|
"000": -14.182505,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 49,
|
|
"logprobs": {
|
|
"text_offset": [104],
|
|
"token_logprobs": [-0.00043460296],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.00043460296,
|
|
"1": -7.7816844,
|
|
"00": -13.570747,
|
|
"2": -13.60981,
|
|
"__": -13.789497,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 50,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0046973573],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0046973573,
|
|
"1": -5.3640723,
|
|
"null": -14.082823,
|
|
" ": -14.707823,
|
|
"2": -14.746885,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 51,
|
|
"logprobs": {
|
|
"text_offset": [100],
|
|
"token_logprobs": [-0.2487161],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.2487161,
|
|
"1": -1.5143411,
|
|
"2": -9.037779,
|
|
"3": -10.100279,
|
|
"4": -10.756529,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 52,
|
|
"logprobs": {
|
|
"text_offset": [108],
|
|
"token_logprobs": [-0.0011751055],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0011751055,
|
|
"1": -6.751175,
|
|
" ": -13.73555,
|
|
"2": -15.258987,
|
|
"3": -15.399612,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
{
|
|
"finish_reason": "length",
|
|
"index": 53,
|
|
"logprobs": {
|
|
"text_offset": [143],
|
|
"token_logprobs": [-0.0012339224],
|
|
"tokens": ["0"],
|
|
"top_logprobs": [
|
|
{
|
|
"0": -0.0012339224,
|
|
"1": -6.719984,
|
|
"6": -11.430922,
|
|
"3": -12.165297,
|
|
"2": -12.696547,
|
|
}
|
|
],
|
|
},
|
|
"text": "0",
|
|
},
|
|
],
|
|
"created": 1712163061,
|
|
"model": "ft:babbage-002:ai-r-d-zapai:v3-fields-used:84jb9rtr",
|
|
"object": "text_completion",
|
|
"system_fingerprint": None,
|
|
"usage": {"completion_tokens": 54, "prompt_tokens": 1877, "total_tokens": 1931},
|
|
}
|
|
|
|
text_completion_obj = TextCompletionResponse(**openai_object)
|
|
|
|
## WRITE UNIT TESTS FOR TEXT_COMPLETION_OBJECT
|
|
assert text_completion_obj.id == "cmpl-99y7B2svVoRWe1xd7UFRmeGjZrFSh"
|
|
assert text_completion_obj.object == "text_completion"
|
|
assert text_completion_obj.created == 1712163061
|
|
assert (
|
|
text_completion_obj.model
|
|
== "ft:babbage-002:ai-r-d-zapai:v3-fields-used:84jb9rtr"
|
|
)
|
|
assert text_completion_obj.system_fingerprint == None
|
|
assert len(text_completion_obj.choices) == len(openai_object["choices"])
|
|
|
|
# TEST FIRST CHOICE #
|
|
first_text_completion_obj = text_completion_obj.choices[0]
|
|
assert first_text_completion_obj.index == 0
|
|
assert first_text_completion_obj.logprobs.text_offset == [101]
|
|
assert first_text_completion_obj.logprobs.tokens == ["0"]
|
|
assert first_text_completion_obj.logprobs.token_logprobs == [-0.00023488728]
|
|
assert len(first_text_completion_obj.logprobs.top_logprobs) == len(
|
|
openai_object["choices"][0]["logprobs"]["top_logprobs"]
|
|
)
|
|
assert first_text_completion_obj.text == "0"
|
|
assert first_text_completion_obj.finish_reason == "length"
|
|
|
|
# TEST SECOND CHOICE #
|
|
second_text_completion_obj = text_completion_obj.choices[1]
|
|
assert second_text_completion_obj.index == 1
|
|
assert second_text_completion_obj.logprobs.text_offset == [116]
|
|
assert second_text_completion_obj.logprobs.tokens == ["0"]
|
|
assert second_text_completion_obj.logprobs.token_logprobs == [-0.013745008]
|
|
assert len(second_text_completion_obj.logprobs.top_logprobs) == len(
|
|
openai_object["choices"][0]["logprobs"]["top_logprobs"]
|
|
)
|
|
assert second_text_completion_obj.text == "0"
|
|
assert second_text_completion_obj.finish_reason == "length"
|
|
|
|
# TEST LAST CHOICE #
|
|
last_text_completion_obj = text_completion_obj.choices[-1]
|
|
assert last_text_completion_obj.index == 53
|
|
assert last_text_completion_obj.logprobs.text_offset == [143]
|
|
assert last_text_completion_obj.logprobs.tokens == ["0"]
|
|
assert last_text_completion_obj.logprobs.token_logprobs == [-0.0012339224]
|
|
assert len(last_text_completion_obj.logprobs.top_logprobs) == len(
|
|
openai_object["choices"][0]["logprobs"]["top_logprobs"]
|
|
)
|
|
assert last_text_completion_obj.text == "0"
|
|
assert last_text_completion_obj.finish_reason == "length"
|
|
|
|
assert text_completion_obj.usage.completion_tokens == 54
|
|
assert text_completion_obj.usage.prompt_tokens == 1877
|
|
assert text_completion_obj.usage.total_tokens == 1931
|
|
|
|
|
|
def test_completion_openai_prompt():
|
|
try:
|
|
print("\n text 003 test\n")
|
|
response = text_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
prompt=["What's the weather in SF?", "How is Manchester?"],
|
|
)
|
|
print(response)
|
|
assert len(response.choices) == 2
|
|
response_str = response["choices"][0]["text"]
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_openai_prompt()
|
|
|
|
|
|
def test_completion_openai_engine_and_model():
|
|
try:
|
|
print("\n text 003 test\n")
|
|
litellm.set_verbose = True
|
|
response = text_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
engine="anything",
|
|
prompt="What's the weather in SF?",
|
|
max_tokens=5,
|
|
)
|
|
print(response)
|
|
response_str = response["choices"][0]["text"]
|
|
# print(response.choices[0])
|
|
# print(response.choices[0].text)
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_openai_engine_and_model()
|
|
|
|
|
|
def test_completion_openai_engine():
|
|
try:
|
|
print("\n text 003 test\n")
|
|
litellm.set_verbose = True
|
|
response = text_completion(
|
|
engine="gpt-3.5-turbo-instruct",
|
|
prompt="What's the weather in SF?",
|
|
max_tokens=5,
|
|
)
|
|
print(response)
|
|
response_str = response["choices"][0]["text"]
|
|
# print(response.choices[0])
|
|
# print(response.choices[0].text)
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_openai_engine()
|
|
|
|
|
|
def test_completion_chatgpt_prompt():
|
|
try:
|
|
print("\n gpt3.5 test\n")
|
|
response = text_completion(
|
|
model="openai/gpt-3.5-turbo", prompt="What's the weather in SF?"
|
|
)
|
|
print(response)
|
|
response_str = response["choices"][0]["text"]
|
|
print("\n", response.choices)
|
|
print("\n", response.choices[0])
|
|
# print(response.choices[0].text)
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_chatgpt_prompt()
|
|
|
|
|
|
def test_completion_gpt_instruct():
|
|
try:
|
|
response = text_completion(
|
|
model="gpt-3.5-turbo-instruct-0914",
|
|
prompt="What's the weather in SF?",
|
|
custom_llm_provider="openai",
|
|
)
|
|
print(response)
|
|
response_str = response["choices"][0]["text"]
|
|
print("\n", response.choices)
|
|
print("\n", response.choices[0])
|
|
# print(response.choices[0].text)
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_chatgpt_prompt()
|
|
|
|
|
|
def test_text_completion_basic():
|
|
try:
|
|
print("\n test 003 with logprobs \n")
|
|
litellm.set_verbose = False
|
|
response = text_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
prompt="good morning",
|
|
max_tokens=10,
|
|
logprobs=10,
|
|
)
|
|
print(response)
|
|
print(response.choices)
|
|
print(response.choices[0])
|
|
# print(response.choices[0].text)
|
|
response_str = response["choices"][0]["text"]
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_text_completion_basic()
|
|
|
|
|
|
def test_completion_text_003_prompt_array():
|
|
try:
|
|
litellm.set_verbose = False
|
|
response = text_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
prompt=token_prompt, # token prompt is a 2d list
|
|
)
|
|
print("\n\n response")
|
|
|
|
print(response)
|
|
# response_str = response["choices"][0]["text"]
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_text_003_prompt_array()
|
|
|
|
|
|
# not including this in our ci cd pipeline, since we don't want to fail tests due to an unstable replit
|
|
# def test_text_completion_with_proxy():
|
|
# try:
|
|
# litellm.set_verbose=True
|
|
# response = text_completion(
|
|
# model="facebook/opt-125m",
|
|
# prompt='Write a tagline for a traditional bavarian tavern',
|
|
# api_base="https://openai-proxy.berriai.repl.co/v1",
|
|
# custom_llm_provider="openai",
|
|
# temperature=0,
|
|
# max_tokens=10,
|
|
# )
|
|
# print("\n\n response")
|
|
|
|
# print(response)
|
|
# except Exception as e:
|
|
# pytest.fail(f"Error occurred: {e}")
|
|
# test_text_completion_with_proxy()
|
|
|
|
|
|
##### hugging face tests
|
|
def test_completion_hf_prompt_array():
|
|
try:
|
|
litellm.set_verbose = True
|
|
print("\n testing hf mistral\n")
|
|
response = text_completion(
|
|
model="huggingface/mistralai/Mistral-7B-v0.1",
|
|
prompt=token_prompt, # token prompt is a 2d list,
|
|
max_tokens=0,
|
|
temperature=0.0,
|
|
# echo=True, # hugging face inference api is currently raising errors for this, looks like they have a regression on their side
|
|
)
|
|
print("\n\n response")
|
|
|
|
print(response)
|
|
print(response.choices)
|
|
assert len(response.choices) == 2
|
|
# response_str = response["choices"][0]["text"]
|
|
except litellm.RateLimitError:
|
|
print("got rate limit error from hugging face... passsing")
|
|
return
|
|
except Exception as e:
|
|
print(str(e))
|
|
if "is currently loading" in str(e):
|
|
return
|
|
if "Service Unavailable" in str(e):
|
|
return
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
# test_completion_hf_prompt_array()
|
|
|
|
|
|
def test_text_completion_stream():
|
|
try:
|
|
response = text_completion(
|
|
model="huggingface/mistralai/Mistral-7B-v0.1",
|
|
prompt="good morning",
|
|
stream=True,
|
|
max_tokens=10,
|
|
)
|
|
for chunk in response:
|
|
print(f"chunk: {chunk}")
|
|
except Exception as e:
|
|
pytest.fail(f"GOT exception for HF In streaming{e}")
|
|
|
|
|
|
# test_text_completion_stream()
|
|
|
|
# async def test_text_completion_async_stream():
|
|
# try:
|
|
# response = await atext_completion(
|
|
# model="text-completion-openai/gpt-3.5-turbo-instruct",
|
|
# prompt="good morning",
|
|
# stream=True,
|
|
# max_tokens=10,
|
|
# )
|
|
# async for chunk in response:
|
|
# print(f"chunk: {chunk}")
|
|
# except Exception as e:
|
|
# pytest.fail(f"GOT exception for HF In streaming{e}")
|
|
|
|
# asyncio.run(test_text_completion_async_stream())
|
|
|
|
|
|
def test_async_text_completion():
|
|
litellm.set_verbose = True
|
|
print("test_async_text_completion")
|
|
|
|
async def test_get_response():
|
|
try:
|
|
response = await litellm.atext_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
prompt="good morning",
|
|
stream=False,
|
|
max_tokens=10,
|
|
)
|
|
print(f"response: {response}")
|
|
except litellm.Timeout as e:
|
|
print(e)
|
|
except Exception as e:
|
|
print(e)
|
|
|
|
asyncio.run(test_get_response())
|
|
|
|
|
|
@pytest.mark.flaky(retries=6, delay=1)
|
|
def test_async_text_completion_together_ai():
|
|
litellm.set_verbose = True
|
|
print("test_async_text_completion")
|
|
|
|
async def test_get_response():
|
|
try:
|
|
response = await litellm.atext_completion(
|
|
model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
prompt="good morning",
|
|
max_tokens=10,
|
|
)
|
|
print(f"response: {response}")
|
|
except litellm.RateLimitError as e:
|
|
print(e)
|
|
except litellm.Timeout as e:
|
|
print(e)
|
|
except Exception as e:
|
|
pytest.fail("An unexpected error occurred")
|
|
|
|
asyncio.run(test_get_response())
|
|
|
|
|
|
# test_async_text_completion()
|
|
|
|
|
|
def test_async_text_completion_stream():
|
|
# tests atext_completion + streaming - assert only one finish reason sent
|
|
litellm.set_verbose = False
|
|
print("test_async_text_completion with stream")
|
|
|
|
async def test_get_response():
|
|
try:
|
|
response = await litellm.atext_completion(
|
|
model="gpt-3.5-turbo-instruct",
|
|
prompt="good morning",
|
|
stream=True,
|
|
)
|
|
print(f"response: {response}")
|
|
|
|
num_finish_reason = 0
|
|
async for chunk in response:
|
|
print(chunk)
|
|
if chunk["choices"][0].get("finish_reason") is not None:
|
|
num_finish_reason += 1
|
|
print("finish_reason", chunk["choices"][0].get("finish_reason"))
|
|
|
|
assert (
|
|
num_finish_reason == 1
|
|
), f"expected only one finish reason. Got {num_finish_reason}"
|
|
except Exception as e:
|
|
pytest.fail(f"GOT exception for gpt-3.5 instruct In streaming{e}")
|
|
|
|
asyncio.run(test_get_response())
|
|
|
|
|
|
# test_async_text_completion_stream()
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_async_text_completion_chat_model_stream():
|
|
try:
|
|
response = await litellm.atext_completion(
|
|
model="gpt-3.5-turbo",
|
|
prompt="good morning",
|
|
stream=True,
|
|
max_tokens=10,
|
|
)
|
|
|
|
num_finish_reason = 0
|
|
chunks = []
|
|
async for chunk in response:
|
|
print(chunk)
|
|
chunks.append(chunk)
|
|
if chunk["choices"][0].get("finish_reason") is not None:
|
|
num_finish_reason += 1
|
|
|
|
assert (
|
|
num_finish_reason == 1
|
|
), f"expected only one finish reason. Got {num_finish_reason}"
|
|
response_obj = litellm.stream_chunk_builder(chunks=chunks)
|
|
cost = litellm.completion_cost(completion_response=response_obj)
|
|
assert cost > 0
|
|
except Exception as e:
|
|
pytest.fail(f"GOT exception for gpt-3.5 In streaming{e}")
|
|
|
|
|
|
# asyncio.run(test_async_text_completion_chat_model_stream())
|
|
|
|
|
|
@pytest.mark.parametrize("model", ["vertex_ai/codestral@2405"]) #
|
|
@pytest.mark.asyncio
|
|
async def test_completion_codestral_fim_api(model):
|
|
try:
|
|
if model == "vertex_ai/codestral@2405":
|
|
from test_amazing_vertex_completion import (
|
|
load_vertex_ai_credentials,
|
|
)
|
|
|
|
load_vertex_ai_credentials()
|
|
|
|
litellm.set_verbose = True
|
|
import logging
|
|
|
|
from litellm._logging import verbose_logger
|
|
|
|
verbose_logger.setLevel(level=logging.DEBUG)
|
|
response = await litellm.atext_completion(
|
|
model=model,
|
|
prompt="def is_odd(n): \n return n % 2 == 1 \ndef test_is_odd():",
|
|
suffix="return True",
|
|
temperature=0,
|
|
top_p=1,
|
|
max_tokens=10,
|
|
min_tokens=10,
|
|
seed=10,
|
|
stop=["return"],
|
|
)
|
|
# Add any assertions here to check the response
|
|
print(response)
|
|
|
|
assert response.choices[0].text is not None
|
|
|
|
# cost = litellm.completion_cost(completion_response=response)
|
|
# print("cost to make mistral completion=", cost)
|
|
# assert cost > 0.0
|
|
except litellm.ServiceUnavailableError:
|
|
print("got ServiceUnavailableError")
|
|
pass
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
"model",
|
|
["vertex_ai/codestral@2405"],
|
|
)
|
|
@pytest.mark.asyncio
|
|
async def test_completion_codestral_fim_api_stream(model):
|
|
try:
|
|
if model == "vertex_ai/codestral@2405":
|
|
from test_amazing_vertex_completion import (
|
|
load_vertex_ai_credentials,
|
|
)
|
|
|
|
load_vertex_ai_credentials()
|
|
import logging
|
|
|
|
from litellm._logging import verbose_logger
|
|
|
|
litellm.set_verbose = False
|
|
|
|
# verbose_logger.setLevel(level=logging.DEBUG)
|
|
response = await litellm.atext_completion(
|
|
model=model,
|
|
prompt="def is_odd(n): \n return n % 2 == 1 \ndef test_is_odd():",
|
|
suffix="return True",
|
|
temperature=0,
|
|
top_p=1,
|
|
stream=True,
|
|
seed=10,
|
|
stop=["return"],
|
|
)
|
|
|
|
full_response = ""
|
|
# Add any assertions here to check the response
|
|
async for chunk in response:
|
|
print(chunk)
|
|
full_response += chunk.get("choices")[0].get("text") or ""
|
|
|
|
print("full_response", full_response)
|
|
# cost = litellm.completion_cost(completion_response=response)
|
|
# print("cost to make mistral completion=", cost)
|
|
# assert cost > 0.0
|
|
except litellm.APIConnectionError as e:
|
|
print(e)
|
|
pass
|
|
except litellm.ServiceUnavailableError as e:
|
|
print(e)
|
|
pass
|
|
except Exception as e:
|
|
pytest.fail(f"Error occurred: {e}")
|
|
|
|
|
|
def mock_post(*args, **kwargs):
|
|
mock_response = MagicMock()
|
|
mock_response.status_code = 200
|
|
mock_response.headers = {"Content-Type": "application/json"}
|
|
mock_response.parse.return_value.model_dump.return_value = {
|
|
"id": "cmpl-7a59383dd4234092b9e5d652a7ab8143",
|
|
"object": "text_completion",
|
|
"created": 1718824735,
|
|
"model": "Sao10K/L3-70B-Euryale-v2.1",
|
|
"choices": [
|
|
{
|
|
"index": 0,
|
|
"text": ") might be faster than then answering, and the added time it takes for the",
|
|
"logprobs": None,
|
|
"finish_reason": "length",
|
|
"stop_reason": None,
|
|
}
|
|
],
|
|
"usage": {"prompt_tokens": 2, "total_tokens": 18, "completion_tokens": 16},
|
|
}
|
|
return mock_response
|
|
|
|
|
|
@pytest.mark.parametrize("provider", ["openai", "hosted_vllm"])
|
|
def test_completion_vllm(provider):
|
|
"""
|
|
Asserts a text completion call for vllm actually goes to the text completion endpoint
|
|
"""
|
|
from openai import OpenAI
|
|
|
|
client = OpenAI(api_key="my-fake-key")
|
|
|
|
with patch.object(
|
|
client.completions.with_raw_response, "create", side_effect=mock_post
|
|
) as mock_call:
|
|
response = text_completion(
|
|
model="{provider}/gemini-1.5-flash".format(provider=provider),
|
|
prompt="ping",
|
|
client=client,
|
|
hello="world",
|
|
)
|
|
print("raw response", response)
|
|
|
|
assert response.usage.prompt_tokens == 2
|
|
|
|
mock_call.assert_called_once()
|
|
|
|
assert "hello" in mock_call.call_args.kwargs["extra_body"]
|
|
|
|
|
|
def test_completion_fireworks_ai_multiple_choices():
|
|
litellm.set_verbose = True
|
|
response = litellm.text_completion(
|
|
model="fireworks_ai/llama-v3p1-8b-instruct",
|
|
prompt=["halo", "hi", "halo", "hi"],
|
|
)
|
|
print(response.choices)
|
|
|
|
assert len(response.choices) == 4
|
|
|
|
|
|
@pytest.mark.parametrize("stream", [True, False])
|
|
def test_text_completion_with_echo(stream):
|
|
litellm.set_verbose = True
|
|
response = litellm.text_completion(
|
|
model="davinci-002",
|
|
prompt="hello",
|
|
max_tokens=1, # only see the first token
|
|
stop="\n", # stop at the first newline
|
|
logprobs=1, # return log prob
|
|
echo=True, # if True, return the prompt as well
|
|
stream=stream,
|
|
)
|
|
print(response)
|
|
|
|
if stream:
|
|
for chunk in response:
|
|
print(chunk)
|
|
else:
|
|
assert isinstance(response, TextCompletionResponse)
|