From b76372cee0bf7c46e7e84e270f8758296dfe6a55 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Thu, 21 Sep 2023 10:45:00 -0700 Subject: [PATCH] add metadata logging to promptlayer --- .../observability/promptlayer_integration.md | 32 ++++++++++++++- litellm/__pycache__/main.cpython-311.pyc | Bin 47209 -> 47407 bytes litellm/__pycache__/utils.cpython-311.pyc | Bin 116979 -> 117011 bytes litellm/integrations/prompt_layer.py | 19 ++++++++- litellm/main.py | 5 ++- litellm/tests/test_promptlayer_integration.py | 38 +++++++++++++++--- litellm/utils.py | 24 ++++++----- pyproject.toml | 2 +- 8 files changed, 100 insertions(+), 20 deletions(-) diff --git a/docs/my-website/docs/observability/promptlayer_integration.md b/docs/my-website/docs/observability/promptlayer_integration.md index 55a8bbb7b5..676c0107fc 100644 --- a/docs/my-website/docs/observability/promptlayer_integration.md +++ b/docs/my-website/docs/observability/promptlayer_integration.md @@ -25,7 +25,7 @@ Complete code from litellm import completion ## set env variables -os.environ["PROMPTLAYER_API_KEY"] = "your" +os.environ["PROMPTLAYER_API_KEY"] = "your-promptlayer-key" os.environ["OPENAI_API_KEY"], os.environ["COHERE_API_KEY"] = "", "" @@ -38,3 +38,33 @@ response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content #cohere call response = completion(model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}]) ``` + +### Logging Metadata + +You can also log completion call metadata to Promptlayer. + +You can add metadata to a completion call through the metadata param: +```python +completion(model,messages, metadata={"model": "ai21"}) +``` + +**Complete Code** +```python +from litellm import completion + +## set env variables +os.environ["PROMPTLAYER_API_KEY"] = "your-promptlayer-key" + +os.environ["OPENAI_API_KEY"], os.environ["COHERE_API_KEY"] = "", "" + +# set callbacks +litellm.success_callback = ["promptlayer"] + +#openai call - log llm provider is openai +response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}], metadata={"provider": "openai"}) + +#cohere call - log llm provider is cohere +response = completion(model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}], metadata={"provider": "cohere"}) +``` + +Credits to [Nick Bradford](https://github.com/nsbradford), from [Vim-GPT](https://github.com/nsbradford/VimGPT), for the suggestion. \ No newline at end of file diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 24ef31d5f0186628449fb413f709e8db26ec4af3..9a91ccf3bcb67c40b10999900a2004d48a64b896 100644 GIT binary patch delta 6952 zcmcIodvue>mH$SvB)@IhmL)%A%b3`fACj>RwlQGKwy=5l0Vce`aXbP9wq-^i;q@sd zhLi+MO1aQ-vL^K=JlfzCazxU!f3(okre}N7K`>oR{LNgrV&{Mg2b&yT&D zAFU!QpiJd65SmBxX(lb8(oTtHx+Q8IHo+U}1@O75vD}ds9xTyH*Fx8lN$o1vQd-U5 z#jffdg|z0F?lSLtRRWTb8o9ZiE;%M`Zh*&B#c)xrCrjaVwac;Wm~L|;t>qFYt>d`d zm86nfDb#V9@S*$-J?IE^P$K>^cj##SF*2dNcykkN;DeiKBgYlg$#ErJ&T$oOa;@H> zqRq!5Hn&jHAz4@YUx=vI6BFDq+=zol=A@`fM~wDdLXgC`C|b%gwPV5{*>~Mhh=Ta69}Rvtv_i9TK-?;M6F@1f@b0xVdJWi74ORU;w506Y#^m5IFP_e^@Sv6++AfalX{Ym?N zvR^u&-X!fO|7*7(m{simdA66ROBFF~{?$XjITd#2xJfD8%rvIym1J5yteGVQiysmK zgNAEWY-4&;;kkJYY8$PA>sM0Y(|K{a1N2?nuIHuITIKdhq2p_oN;}*-7}ATOIuhs@4~zdos^RStbMA}*3`dS zRvX7`c866-o}TkmJuT}CM!V;*HjmG0tIB?bEQUYXGaL__4;F^|_t>s*4W+waq&O!B zXGdqX_+)7lsezm2C5GKS%X^!AOM07qOM6%NmJvy*8CeNyd?}E*;BdHz@xt}Jirlz{ zaAbMKPKEd~tERTkpe%*NvV2XgQajQr#0^@PW!uY#{9g0@AW@uwH1u}p~|AA58DX^qu$F=e6{%-ddANho489D z;LFN%yLH-{OO(xOxvr{c;X^Gey3u{p&p8;WdSnJ`@~qarsQSOmGkj;l-Sfn(c1Eh5 z#0l*cY4CORc+Tqho}mX7Cp0cDGCKLWTsn34_?GiH;;rGr;!MNx4v8N2H7Uy*rES^1 zW~E7K9=l%r>tb_M(~yO8CN3P6`my~)eTsC5FOOn{T{R1N6?vh?%rE$THFna3w0Whn zLJ=?aqEGTOX7U{*Z^~strII0EVEYQ8V$~7qwn99G`ek%3UitLj_Rno$L(7^c87jpSyV!c(n8fCxmT;S z!0XF)^6Il1EVagx7F@lZM2S)&6pfNipB~oGZw#wOk6-5ct_q!~*Vn4Fj`W0Uw62Z+ zWN4$$h)d(e+B99e(mr_&2WG5^eM$1~QCH}pCzSkukS?gE?y^CGxApt(;U=G5{zG`A zxPhiAb|ruGiWe0LfzlA1D3HT{FBL2K;7MPvBlFbyFGnfJ=VpR z_Aam2;_kC_QR)scE#tjqk45gz9zG4$i_nMQLFh*);t+`N%AS6<7uyWm`W?)* zcb}`T`(S6ki#sVFWL^}?@ZKu3fg&k>@ZoPg@vF}o12Lb1J+NZGFAbhjU6nTo1Ruwx1 z*V?>>?;<}WL~0Ad2voM`lT*;yzRfu9t5rSO5%_KU`j~MUdk!krc(P~Ig$i-5+}_XD zluz#?Bq5`sk3VItiR7F^x&-0d9K5(OEEZOZjPGzbly&B*6fm!g)_%c7>^$u4*gy9% zW`6JBV@lSQwLZ{c3=zX$@7Cx^v&PE}d+-=|W!}fm2;s-DW%Fy=pCWk^Qn$RD^cIpj4%Rhe5rOLiChPcSVhVN`-%HGg z&Cu>a>Pe{Ex}WR;|JFRyM7D_puH*6mak1y&?$)%#kFfX02!BTS3#4zeb$*8AT@HZ+ z*|XCn?{cxuE_df1*Fp9Z6ixCQJ4JWz>XJKqJ=E0`h(MZ!CW5k9UNB_Wfsm9%@c%Pn zKZh&Z@)O@gMZZLdLS`I%u+2`^K>YR&xOP+vJGLi8W@kv!5h+N)x4oFG_rJV-nJV>; z!5lQkrbpkEIDCQdOnfli|M$mUG7aQPKU$IGSd%OLDz?Fh-(Sx!XpNKJjwRT5J1)8{ zQT?{DqAf=KPK<`r?-}j2QsCv zGDMfJGW7>a)L)e}M4{;Kl?}`A`}HD+(0Yp-5oT`Hmpcq7xz!j|pCE|kX-?tMt<@@r z-I0s3ma>LYAzPCh?O3Z(-&#|W>{u(QZ>^OyyzT$#p(H{SxP7>V%m<57p()_euY^s? zON9FWQyEqb9Kbb6M!-72vN%{1*sPWPA;~D}@pe>3QQ26578TZyTujhzG=>kDj=T}4 z<4#%C0dC)6d*}@`Mw@Dy!tZtg+Kf2Q*DFGF}!H8SOhU}ns#x-z00M6C};2EE`HnT+sWR; z-nZe_aMnhVH}Twi7C>ej!UqVV+Qx<^DSQ)?*@U7_gvSuJAnZn{Lr6qDVl|VX^1>sU zExi2)f7gX1HF+N75esRCb0bxn$2dFNe|zLvV#gTykTpd41f&uQBcLEIYvJ~#t;FWHU;b5u_7`ZX6TW`gT(+Idc%&Qnhpj-i2N@dnIMThy zU4@j$&St@b)~k7Wm{ROdylpiw8`3lN?}X>CE?Ojv;mt6Rv}0diH(oD8*S5Ycp0&be zCTc+K0d4m#kGtE&_JVelY7TP#3cqLcU6tVzE)|O&P}33x^7NB!5RFYyjH?!baqtNc4&J)kDL;Ey-T)OXA|pnolGU<26| zPojg;1m1~>52{5|DtXvf{d0@jKLT|#!5-XV4NW1rC5D1L6C`3WK2*m z$RsIFA50cxs-#a18U<;RjG4hSK@Ma}dPC4G=q!m_&K6{Wl#+SA;q20&RWu8w_|((g zL)(HyqFF2@rJv6|YYvu(rcFvPp7sv)2JNERNU$vr^#z@wxveBpO!DdSp_1UL5Q9h4 zm=SEjMqPBUl}O3PU|UGEkx1I);HFSxQc&*dR4UaM5{J8LNfmi4H5e(3t@m$uJxTT7 DYU`PI delta 6441 zcmbt23sf7|mGd-6;txXB2MHmAY$PFsz<~LG#2*6&;`m1b#Ae1eHXt&RKS@S~*u-&a zoZ7i|(wyMmah7aiQU_~Or)N*v`1HHUY2vg^L$&K|)7@;>+09mVce_5@({|q-fiW0^ zdpct7yYIgH?(f}q-y1I7lHCZ&tgpqzMKkm{_{|u;^K*vz7wpVFSu@D*gehyZj5NX; znO~j3S-BL>#xXk?E~Ss*?BhCkpUs8;k=0ttl4qwfTp?f1716htFWzC}N=B6vbnGGn z9SPdV&T6i7lihku%1tnG5iXyYJ$+I^bJ3# zn5*K5^h?>H{t&7TtVnn0g zDq7%=I-?@vRq{O907H5m6zB^`2JF$-5nEtNZzejsXcwZnXwklp5fogEU(Ln(lSMoH zCfSpm3WPI>W(rfqGz!h)N}!Vbncy+xYbRGu&_%i^EfiYfcZk+=v2fn3fop~cHdA!K zrv|q?OH>aTKxNlLRB8fL8_y6cd}chR%i?U~N-+~n^EfrgbUBpFM3|g*uT)u?MB}=-jlT(4Bb4H{FZrEet-Sk2l2(u*_;w`cA z95F|<(Vm0Srkpz1Zpl~MxXLmb2$2zOoI%3EWA9m#Vr*QMM6vmuaNSZ!azV4o!{+*0 zf!LBOZNB!o2#Rz$MXm3lyC}KT+S&yvV4EMmG#k}x<=m5Xi!LPUwX8nDuJLs}kj6C+ zrjcv7ZyZtMh73Q+gO4+e%X*mifchGj)aS8&jpW0htwz;75e0{BwK<>8ZkdRoiB8|} z<6k6>T&6Y@R7ksuVP4bUhQs?=c22uhya`NIz2O9ll15N&N!iXhc&L4mm zb63q4Dq5UA$hW}l+<3P1T&q;Ts|pg86@wbFY_2b_5G&5D2ltWm-Tq2ZDOhBTSh>4x zHf2TE4${ZWq>s(#nx)gyg9|ZmKSL!ccM8osDLGQXre%*VNDe6T%&MIEN3K-tGQqWW zwdUS3hg%Hw;dy1{`z5PI&v}Vy1yLST(-EZAQjPL0}NBMHeY@PH@MA70#-|%y% zxMfSHy#@?L2A6HY$}*g=q~_itN8z%XMdjq)g$ItngGs(mP(b+MzA9$C!km`P-&h9; zy+J-+r76w!{o^dBbUREIv&D?bqx0`+_uh}~$F}i-t5v^O1d1B}#769)GT^Zhjmklh zV1(D_84`vlDm)I4wdfQAqF(T~m^8hJN*^-#4I_Qc*rVZ6i#=^0?FW;3Jp)7iysytQ z*weeMzrUxCdp$y^4bT5m%Nyh{yxy8mj=(>+rjY0053RkF(z9M;IE*Y?5MXf@jv~MU zD;$RB*MEadL3vxXd5#Ik?Q%(QP%$J>r9R4ertM_L2u^w$!6Os|Bl*Ez57#%iGc0$E z((E!}47Rj;HD{1(1i>AC;Vk^B-AcwFvZG6P1c%TZI~#VP4he@r?AR1@XG}N-*E>As zMP<#-r~39hl(&#aaukge&LS8Ky!()zq?|IEtul}^8KJafF6PEWYBZ__> z3NNABYBWYJyo|V%g#(BUBIt$A&F_$Xpx*MNLdu3G0k#|`M`6|0W`$(#S=hVvSuzBF z-1<1V44%$j@-FBKp^I>dsTLSx` zHoq;J{U};a@sAR$IPg(n-70K9NvShp8!W1pW?xUNj>G;hQ>)Xk{Wa5Q!}be%O(M2m zmbjC#{q2hCbZm!j7D{}#k|Tsv=9Wp)dMi~WxY%2+8Z)x|KA~EH?QLzu0V8wUAhFyw zst#nax3g-ZaOe+fYbvn)bAeludeyBDtGQae#;w7TtF@8U2@+DTuahjg+Ctnew*|*q zb8B*>v5xd;_d^lv)s8H+yQ7k}m2%n-JpNP?Ay2`m*h(z$6ERJ<82<&*8@S{j zml^isf~XPTrly|Igi8=+)2#LZ(oCwO-8dP`uzjYmN>%AHQb*w1Bj1ZtP{npO$o6}E zf(O3#bUE1zKYsd4@*@2GK|47JpC8OgyoT&QLGUqxpTg=xX>_|Z9Ll_N7MPbY2wp#A zBE9g#Lym+IDkG>Ac<H$8 zfsSKUN<|e?UGUP$;f4l8rAZc)fhvPh{NUaJUfAa2gYqGvZ_qbqgrrqiOS$Z^cdLf3 zQ@WKXHW)?S9P$i$d9P$fknQ6R%-8}aPr2nADbpK);Hmc%8-}UIhlPWvw+q1`1fvL^ zp@1GLM-i6}z0HU{i&U8)9W7r)0^Zex;|Na748fP<1;ZDyGjk+;h@+Si_9WqFh{qz3 zCN7XJ9XCIxqrs@Uf$cme9g5S)jVTb+dxrQyEIj<|V7RV$1u0kJs9VA>kh@Kb@Hzsi zX2yoSKLS@yn>2k$u0oK26Qspc!Hv_a<(;%Y9Z;W1V#&*paegIffQIu$@*YY~35=e9 zj=0drpmN3x$!&TfN$Dw>{!b|NF7Kf1f`LrK<_r1qUMgf&;P?dvQA@tWczItBEsong zUS4Q}*I!MKK7+Fn6a_wcbrT^UL)L_m#6jakp?o);UIwERj}m9#vx)a36yHPUZkW1g zTGK_xXsoMgBQzm-5D9W&JL3IFZAMH=#zu+u!emAUCXnz;+O=y0C*q5-cSFnM+OV#E zI5KHc8<7)*1QorzJbk^qFa+;Ra`FR|ULQz%^HZ7Tb5wz=8)Ustc!dwJ8wGK1wNr<> z-?~IvA%4n1&O*tQkz9vOQyKYKh6LP(0&Wli(^J5;5HRurMlO8JV-SK7{e84(9Dui` zOzP(-f&MYxGP=wmmJk|ja@p|0W#bE%6#?Vry)5D3#M^6ze-#}&tM1PM)KB4a`-2~A?+R3SAVKTK6<8I?AqlgN5Tml8^r&=f|c37I4`m7$u=5^7^s zq

RWO>Lgbu*cG?eX5Ru8>3OW-&>IuUk%K=&iZPNyA`Zj~F_Z9zr zDQ$;>z3@+aK(}7Mt|d$78^_O1PKi$UP3)EEbPuHSu7!zwv^C9;;hW66-bsz|P3{-7 zXx|i`WY0FDhEsg&l$_|yf7yEgUq8jjf1DBqS^m(ek3*zt_Aqy&$JgL!aWTHDJ2QN@ zqMDYjaIiWrFF6y(*O%^tOkOeVz(6%tYdV9P&T!Y&VTX{iZE0^Cb-H%l=}=2&OyL!S zJ}m0G4k0MPR1Q3KmX%*D`(0#V`X{|c6>*|=uZ+&qUe*2MN+hH-a91baoL>oe@%CE6}}TsmQ-4Tz@!Jk1vPA!)>FK z!*L}swqeRA*(PLtGE70%C!-8R7vxk9_RpQM(`fn-mzjOUO;zRaF|VwugAS3t9R~4^ zDl6>t->hPK({XC@IrmjRnKT2G*r4v8aDkqu&q?r)nez_7XZ+iH7b4ENZ?w{dJx}}B z+;=)ORdj1jiY9X zi+xH=Im_dGzq<@R=ZdE&t>372l6Me1LLhAn@&%ruF{kPM3c)pktNht@aeS9&7)gfAAMmL&3`TKQSgU9vM~>qlV1FZ$2?7lE!1y&Kky&c=no1 z(-`99^BHT>;fB9)O%_a!#@Y2gkx1aT&|S+$5V|&<3@gSqr7f&2yv^Je(H7YjRj+fx z)`*qSZPBxJR=v#{!9QJ_2i;u1u5gta+UW8*oDQFZ9YhsuD1tSHO`=hsB1Q>Ps^VyE zW-hjx7?X(+v81`K4!y`;>!@+Dal|tF-1u$ryV@P~teaX+AtmzLO(m+7U>d>WDD}o5 zq>Df?#PPS*S*A=!K`~CnI}Z8nx?r|0DW`K_a%a+zPD?5gX=6I$26qk`M*pXdmUK!m zDKg#^)FHInf~o%4U#FSjZl1S!FpT52&3C{IzH)N|RPYO%J&@y{|IFji=p_RvShGrk znFLJ)HDq!bK^46=5-cK^C8Y$*2%exO%|tnfk~6)JUKbE7B={8yycPuhYL|t(whU5M zA$x)Ums^g7Lbu3sK$4$5UvG#azfny!Zd^57MSgzgpxpaOK`lWA3C)fMmuHFH(coF) zV2g?ECSd;IyGmf_a-vrdj7Lyo-54U6yV-{yq0#MhHF(*BBy8os*OU$DPLc^}CLzq;RTR@i%}n&10jJDxWuKddyrORWwP9N~E%#aX_g zk&n`Q2f-&!ZpBGkr$PuXx^}lGKlpRAaraCLe3D`xx$DJ9D7N$j+U7 z{h>kOUz0|HZ2#Uvhhlrrv(=|(!AXAV^fBl&63GD16DE|GdC_g1Vg~=;U8~ zo(*5{&@(nu7dC6n=Ht6lc*~h2>nV~`P_TxpMqi7^r7EuFWDMw0;2`Of1gH3$XC59P z&!kLz->ui8&u9GZv&qv`Y9Y_IFc*F=mz;n+e=dnn%=8{^Hre$$z2uZhHTisOv->A?hmcI-5JXi^xj|)8Mo((P1mqxW=}TyhA$0` z>v^16eA1Pjx!KMrXU)yi@wWzlZ`izcET^Z~9SwF`|A-(5mi+yP*pL8uH z+WK!3QY9t!;n?-Yb<`E?`NjbA<;@y_M^=%{;MN+wUq&-(~7}eb1 zuCsfaYie+7v8yz&+&!=K8FP(z*I^Z6BQgI(@I1k{B%yPLbrI0sVmkje??3kHCsr%EcLrAKW?A$d1NHdQVF{m|rF5hf z&rVg2C7O_PELB0yv8;5Yk1OGG1{#h>7te_@{8yARXCR;c4aMmB_~Tm);V=4<#ZbWBF>VI-0&Zt2adR$9zVhC>2>;LiX-fY-z;2FOTwT2mw) zXAN~m!35d+1h$uw}R$c%*|WhT<$0edWj8e@uye;2{e z5L&avmS{*GBoiuoL%r9>#*u71!32Vd;w1~rU;HoaqTv?O!rwzJI8s4$$uJ>Gj@ppWB_@;BKm8IpSJAOo69GVF9FB=8-CD zt1X*@sBvOx0W5>Z#OVUq1osQySSWy6v3o2`Fw4c2`DvWEIu`DW=~p28J}u_n2_sAu zWWp)2_D)dYSE9BM>df+-m+St6=q!Y+L2_v*SgJ;6eT|PzCsSmIsu1x-FeY&wDpAc# zd_Ire>vQ;;y=;IqR4gij+!zxLAuX3HJ5PurNSP-yWy*L}rNQlRdKsm}EP?vn7LcS< z$q#yvngD^Gfc%6Rk-|%ogE}rIMz@G6hJ+{w*=uTM4Gr~+S-HqBhG)W?NlxGhoG6B< zc*nQY>jwn9fy)W?DOY0~&~+Oc8tsTKyqB>SZ78EQlw2g4agsZDF}7hO>Aj@iAhiVR z1En+Jk^$Zo1=aAXvId)O4qT`Prx7NL%DFHhjy%_@XC?TLbU_R%Ys7PNA?4@tdZvaf zcQJ3hyNUIAs*01X_qy!0Zq~EfR-@*VkE0Sb#N%k81!d%`x7qa|jY9B7AblR}1xOVq z=0j%cbGU|Tgx7@;&}U!nXlQn^{`Ed0;uk<}IJIE}Cj(^*z^FIL$p=N%LdYLX&Vfr< zNb}xIgUHX()8g5MFc!{<6ANJ*4(PMPWVk8#`Fk(Q3v;eMRe9d*G12y7@yG@M@jiigcCH6HyH6ALFjnJ$=pchZpL!#K)2z%1x zBE3##ORK!poYb*53EmR!Cb-`;k_KKV2NpPRmN2>DoVLC)!rt1!-~pRyEOn^v>yRW? zd*CaR)>bTA1_`iBJh2QKVZ8Wm8C-(*=`Br6@Z#d{UxtPf1ycjnUKnFYmNrWB{^@Hz zyBre6%23{Cxivyt2zg}R_WkN;Qci*k;_Ky*3U3MX3iP03S_6}%hsDIBdplc1J@8N0 zsudwdYDI`gNhX8*V??mw4#*-M+r40f781|*6Iw;cFT6BhGoRybKZnNlulHo#Ck zQr2w%n{pmm?*`6nKyNq63~89mkVb16lGlT$7@lCa=1CYiOQq7Ds?sW^IMcJEhtp|q)Cwuq5Eel~nO{|juXaOa zxS1FP^TmW0;9@@YMxT^T;bbh^5?WL=f&$U!`g5%SX%81*!~1(6q6h`TjcM7vRZ|NeTLv1e`*Huv^*%!Ci zF`r*v??eGPnrgy;i-iZPNsVwdEw5+pCi_xXi@3QDT9h$p(5Hdc*I^LMkUv5gq)9d# zuE_`*20wxZR>h`q$YibXAv;7*v#|--a++-#J~sKVRB`qVD5yw7?bB@5->?g0tjXy{ zJC>fil4s;!;zp`59&TGRv0fvOGE% zMRrE9n#mYNp@dOAfQ6T{xE185tYI`zZco`%{=6`1`xC0tQs*Au_1D1@8|$ J@*XVH|1VYpCx`$5 delta 8093 zcmb_hdw5jUwVyRJXYv^GA}>M+2@^;dc|(8%c?AeWNkZhIKsCdV3?Z0ghCLG?sX&xl zYUQEy2wN1iY6S&Ki$fU^>!Z|1D|k_$(qmEJijRsIK&wzM-u2rDlF02J_m4Z@m*2Pc zT5GSp_S$Rjz0QvQ%cx5mqT;v5#l>3qci%pzPfyzr-;EyGlmEIceBO4f&^jSX-%{Qu z{Q2nnh@K4Bk8QL^KV)5dbFL*cO0Sxjm5>!0gTCf(QMSA{Lfv%#9bNU-zHzpmq4D}d zeX}#iUsteLL_(;zqjgV=zW9#RsF)QoZ1gd``=mYz+5g^Pnyg5m7|PKPRc3~toYX+p zmU;R^g=6){CNHC2ddifS^|Mp9TAS>8{~4JyPJgoW&A5Sa11?w?9*Sef<>Ly#e88HqRb!P86W&wBIpT6#x6J$({A9?qK~td1j) z8v5h2)_0poT2h++RTB9KZ``?-10unS-CW34-(bQ?rV!9a}I&T1dW(lU&vGKtzPI8hrv7rIHG?Xm_ml$ ze{o@UhqJ8^y#u%ppnU&QU$wYz;@{9d4`>5?r5{*4mWsny7GJRq`U;#V>Z}m7GjCqb z`0;9*hNHT^&es?c(X8}!ICsegYl@Reqf5XmybVE5gEv&YP@K`&T!UGS6S&5{mJD`E==Eu_BB@fYw8+n4Tqw9BAfL@4xpD_xMFak3uGsz ze3d9wU$-Jr>&ucC!cVQ}ZKJdLoA(~oMlj=O_=)=_MN^*s@dHDLCo{wC5Kt6w$Xw<` z;S1E)ReM7|PlLb4S05DcO`x|sMx^Oe*SPWrfisvP7{hL=rS4enU5FImtMJ2Xaw#pI z$?Lx^Oz_%UF1hr>YcrDSEH$*nxjg=!_!$L4{RPtrTYHO^;DwgJsKcaSU!3`&GW;005BC$23Q2BhSO63~O@Hd5g!iZVo6CvL+#APOI%?FgAi_=^03oP__>jMkDq6+N00gd6>=ZdMX038I3 zVla|AOkU5F5QIzgBRh(QEQ3n?d~eVf@-<$UH$&b6P#gOho|D^lZGQTwoqe@+EGrGy z?mVPru7KJ~1|xo2Wl2TxEuP}aTRc-s<_MQwu_t{lyf)&BZy7pdNV{Yec*(_OC6%R~ z;4Nhq@tbMs?1NER$4N>q@6iae`QZb1}1JKRg_hFCYDW`HK|=% zq+i&R&PyHh{QHjetRo)OKY2b^+s@?baD2GHMg#PT`}%3mG2@6H+;>}=O1sTGRU@Xc zseuovdfroG!^Xb;R^FLW`^P%f+A6iNdgcB?K7fC*e@toz7oEip3^rE(VE-`sou2Sw zf9*wPydNI_;vlykEl1zhT zFj(fVt!1;{zCG7*0xD4>O{esMrz+^A zzWmh9?&Dx%|GQ%id#E|!Gc?~4xbXw5cp+U6IIf>QwW6!K&RpDn&T1YsC-lXqyF300 zjkEfO(<%MGKsTKBMq>3sZ^#qy2Seq)V9?8xJ_nhK)j&{(^>9(XUPAlNpe}+! zeEFt{I+H&bH@El)gS(q~nX3D!72Lgmjw|e1VNi8ux>H?b%56k^BlrVsbqo}X6}Uz5MZx{XKKsYZgv@`TW^PMUo~1tzTf zFV(#*nsxcl{P7sWLL>Frm$Imkedq=MhS}E=D$MIT7E5o7n6x~pb7>eF+abLtY1d79 z+oi08ew~ZUMZK<9qv=^mE0P|vuf4U#vYe@YshnyUYgC&S2P35Z=dq+_6Ff*8%XhkERU2hGr{3#Fi`)fVHrs`_ms^TjQtajt?U_&|62HpkY zkegey0g~N-Ujx2|7+05Q->_mEn%f!NT{~@4wKrQKQgQQw>a>PvMgYn1vMV43kP3*m z#xSa7*Ypp*?UQc7uz+q3E>JuST%g!1_4Mx!Bs5b@>IDn%{?azR^4x zr``2#-=~L{{A-_e+$@XbVDhw^XCzvW##^~@G%Eo8z+v9D=V~cK&vHdkhyFFgN`|6zD?PpeyCuOdoMe zHd9%JQC%4M5GfO#dZTDgi&dvHU{B-}Q5(^2`L2z6rLQ*)sk~c_?mq{tkTG`3CYN;E zDND=ac^r&PwG)5r?w0Rp)Sb%Z=NjFnxmogFWSoOmkv53wk0NhI(StVUV5Y?oz)CqKoLVrZ~0OJb}J4dz!-Trm3}h+-};5&s<0eWp{X{}Bv2)Q$$%*UwK3J&bU14_t(QTrT)j;` zkVYa-lyF6pF`|yqOE@g0f;HWy^EJC?-1# zlBw)bOINc<$*BwGg#7-TdVgcBaLJScnn>&A>;lS8oefjaHN)lwRV3FI&|+$pR|{wp z&6FF5QvuDFe;H0=ohrDhSnev5N6;OK9R^h2^|ECI^>?7k7RTkT5oFLR`SX!f>r{1y ziu)CLc_ig#s?gvpHtaQZ)ge&|SJVkNMUE__q22ChBZhNfC=~DnL*7tRP;^y}%6khb zKhcIEl;kC?I^zut={jg2@2HbA|3s1ILcH~sRexvDp#TM0|0pn}T)zM$w+d z1_z=p=<_V_i)$;vZzU>USF=$gD&TEKum$p1`JB?>^b!mUcp)-+F8!XUhrIX`a`oJb zHR=-d@c|j~Eb-Pi`9#NJ9Fjxtr2JUu2*B}3^_^t5IxZptU&@9m8rB1u!|@#nmc$@l zP}ekGs-ofaiM&`vTY12TJT!sYG|FT{xvG>bNs0D1C>MKrvZ8meQI6>R%CTW7QK zn?-ahb&rg{n{K0tDo##hTh5}Ozj0KK5nt-^6K7TR~@b}E)K|J z^)wULQUf*7ALJ7abU0hZ>jlh!|9DFRU`V_OI3U+F(riaRgt-b~&Xd+mFpH$I&GKFET#s!StbiQPj8@=E&1F@m491Iy%j|hB7qF|q1pebZ-s&>EKUCaO=S#DfNAEq=P@QI9GNkyn|HNP&;HmMhP@FXRrO`Z8X>gTm)-$8+vWBa@=#4A>Zf!t=2lfa6v0n*n5!2Q zye^o}C{^gef0h>WEoFfDma-BG>UwAestVQKnOkk($-?fR?9@2Afb5c%N+=U1x;+f#P6u zWA!au;CZnz_((}vONF#omapaP`9N-2OWo@pg9ncTeg$ZsxH(MG^3M>c zV5lh^NAvFhb$Gdv5;p-hf(JMo$zDfeh&IZ**3(-fck}dkefPD`at^eoG4Q@R!TLsA zgTMN2kMIQpyr+2zkSEGJAEc4mDV86IJn|s@ny6Wh*+7GmaXK3gerl{zae~kQV1Lr*z$$=YbaCUnU#pmb*)&1H_>lk#el*>0#zm>Qr#RO*X zbz24c|5`?=eCCzC@!Ib6k>U{lz(^QdT~6ly`U1Rj3_>_{r7(LwsK{OvW%bB|VhNsHxQ?hXjxmKd5P@(+o=BsapG%$#apVTe%K* zehLK@$V2kzQimjh#Emaxf>o}+Z96SOML z$|dcfX|b_j0OrWKJLrpHQ0ClJDr&BXu1dfgN}dFC1k}l8J85aYi=A|n`IH~eY*byA z>-CXx0nivp+eLfHsTeB4XXWwV(3)5q%qfU0+D#v64mGYzPTNaU7T6(dcZ8?(W6$^- zORduYxT=h*e<-Np6M+&4iVB0 zX^_qLTNu~5AD@cXpMg>6x5O0VKLG>E%F5Y#N1=ptGu`?bsCWiLYvO-@34ug2x|$o# zGr!rxp8`ubj|A5&S6?7q8^XiB6WO$%GHGfu_u~m;;ED<++%SgFDsXERCI@n*Bvgm^ z$ra*e)|^~1nST}di5~L5U!;Otdb07!74C<{r|`&8!7C=Nq!+0NvCbAG$`@_F{G5^;%Ut5PA6k@gv^?dG#e~rpt2KA86E=&td6# z82KD<7)|w7qFzYOqp99L)Cmve#Fax;1Gsx1xzK0^;HC<_2j1>$}1@<8Fc zTHu4)yzGZtIV;Uil<8n%1E_BTv%Xi=wkn&|wSZ$>;9wT&n8Gn3u;)cb!m6#Qwwc;3 z*c^s^zTaOjke32k^5YvP&2lArG^bTly>G0zmBs9|CAcQC{1s}o53aHk|Nr*i;|0b4 sy0dhW9PlSPn%0)ple)KAehiZ%8L!h)>N?#@ac!1PxG3_}8?@N^KbrL}5C8xG diff --git a/litellm/integrations/prompt_layer.py b/litellm/integrations/prompt_layer.py index c70fcb3334..cf0b7fd57c 100644 --- a/litellm/integrations/prompt_layer.py +++ b/litellm/integrations/prompt_layer.py @@ -41,8 +41,25 @@ class PromptLayerLogger: }, ) print_verbose( - f"Prompt Layer Logging: success - final response object: {request_response}" + f"Prompt Layer Logging: success - final response object: {request_response.text}" ) + response_json = request_response.json() + if "success" not in request_response.json(): + raise Exception("Promptlayer did not successfully log the response!") + + if "request_id" in response_json: + print(kwargs["litellm_params"]["metadata"]) + if kwargs["litellm_params"]["metadata"] is not None: + response = requests.post( + "https://api.promptlayer.com/rest/track-metadata", + json={ + "request_id": response_json["request_id"], + "api_key": self.key, + "metadata": kwargs["litellm_params"]["metadata"] + }, + ) + print_verbose(f"Prompt Layer Logging: success - metadata post response object: {response.text}") + except: print_verbose(f"error: Prompt Layer Error - {traceback.format_exc()}") pass diff --git a/litellm/main.py b/litellm/main.py index 546d8a7345..538c60b8db 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -171,6 +171,7 @@ def completion( litellm_logging_obj=None, use_client=False, id=None, # this is an optional param to tag individual completion calls + metadata: Optional[dict]=None, # model specific optional params top_k=40,# used by text-bison only task: Optional[str]="text-generation-inference", # used by huggingface inference endpoints @@ -201,6 +202,7 @@ def completion( frequency_penalty: It is used to penalize new tokens based on their frequency in the text so far. logit_bias (dict, optional): Used to modify the probability of specific tokens appearing in the completion. user (str, optional): A unique identifier representing your end-user. This can help the LLM provider to monitor and detect abuse. + metadata (dict, optional): Pass in additional metadata to tag your completion calls - eg. prompt version, details, etc. LITELLM Specific Params mock_response (str, optional): If provided, return a mock completion response for testing or debugging purposes (default is None). @@ -276,7 +278,8 @@ def completion( api_base=api_base, litellm_call_id=litellm_call_id, model_alias_map=litellm.model_alias_map, - completion_call_id=id + completion_call_id=id, + metadata=metadata ) logging.update_environment_variables(model=model, user=user, optional_params=optional_params, litellm_params=litellm_params) if custom_llm_provider == "azure": diff --git a/litellm/tests/test_promptlayer_integration.py b/litellm/tests/test_promptlayer_integration.py index 975434764d..6d3bbe105c 100644 --- a/litellm/tests/test_promptlayer_integration.py +++ b/litellm/tests/test_promptlayer_integration.py @@ -13,18 +13,46 @@ import time -def test_promptlayer_logging(): +# def test_promptlayer_logging(): +# try: +# # Redirect stdout +# old_stdout = sys.stdout +# sys.stdout = new_stdout = io.StringIO() + + +# response = completion(model="claude-instant-1.2", +# messages=[{ +# "role": "user", +# "content": "Hi 👋 - i'm claude" +# }]) + +# # Restore stdout +# time.sleep(1) +# sys.stdout = old_stdout +# output = new_stdout.getvalue().strip() +# print(output) +# if "LiteLLM: Prompt Layer Logging: success" not in output: +# raise Exception("Required log message not found!") + +# except Exception as e: +# print(e) + +# test_promptlayer_logging() + + +def test_promptlayer_logging_with_metadata(): try: # Redirect stdout old_stdout = sys.stdout sys.stdout = new_stdout = io.StringIO() - response = completion(model="claude-instant-1.2", + response = completion(model="j2-light", messages=[{ "role": "user", - "content": "Hi 👋 - i'm openai" - }]) + "content": "Hi 👋 - i'm ai21" + }], + metadata={"model": "ai21"}) # Restore stdout time.sleep(1) @@ -37,7 +65,7 @@ def test_promptlayer_logging(): except Exception as e: print(e) -test_promptlayer_logging() +# test_promptlayer_logging_with_metadata() diff --git a/litellm/utils.py b/litellm/utils.py index 137e4f9742..ead50964fe 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -177,7 +177,7 @@ class CallTypes(Enum): # Logging function -> log the exact model details + what's being sent | Non-Blocking class Logging: - global supabaseClient, liteDebuggerClient + global supabaseClient, liteDebuggerClient, promptLayerLogger def __init__(self, model, messages, stream, call_type, start_time, litellm_call_id, function_id): if call_type not in [item.value for item in CallTypes]: @@ -395,6 +395,15 @@ class Logging: self.litellm_params["stream_response"][litellm_call_id] = new_model_response #print("adding to cache for", litellm_call_id) litellm.cache.add_cache(self.litellm_params["stream_response"][litellm_call_id], **self.model_call_details) + if callback == "promptlayer": + print_verbose("reaches promptlayer for logging!") + promptLayerLogger.log_event( + kwargs=self.model_call_details, + response_obj=result, + start_time=start_time, + end_time=end_time, + print_verbose=print_verbose, + ) except Exception as e: print_verbose( @@ -827,7 +836,8 @@ def get_litellm_params( api_base=None, litellm_call_id=None, model_alias_map=None, - completion_call_id=None + completion_call_id=None, + metadata=None ): litellm_params = { "return_async": return_async, @@ -840,6 +850,7 @@ def get_litellm_params( "litellm_call_id": litellm_call_id, "model_alias_map": model_alias_map, "completion_call_id": completion_call_id, + "metadata": metadata, "stream_response": {} # litellm_call_id: ModelResponse Dict } @@ -1630,15 +1641,6 @@ def handle_success(args, kwargs, result, start_time, end_time): run_id=kwargs["litellm_call_id"], print_verbose=print_verbose, ) - elif callback == "promptlayer": - print_verbose("reaches promptlayer for logging!") - promptLayerLogger.log_event( - kwargs=kwargs, - response_obj=result, - start_time=start_time, - end_time=end_time, - print_verbose=print_verbose, - ) elif callback == "langfuse": print_verbose("reaches langfuse for logging!") langFuseLogger.log_event( diff --git a/pyproject.toml b/pyproject.toml index 8cb6f889eb..73b6beea35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.721" +version = "0.1.722" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"