From e80457013b51c5e9689cf3ef5e8d0e226c28dff6 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 12 Sep 2023 12:16:44 -0700 Subject: [PATCH] doc cleanup for custom prompt templates --- docs/my-website/docs/providers/huggingface.md | 15 +++---- docs/my-website/docs/providers/togetherai.md | 42 +++++++++++++----- docs/my-website/docs/providers/vllm.md | 2 +- litellm/__pycache__/main.cpython-311.pyc | Bin 32824 -> 32862 bytes litellm/__pycache__/utils.cpython-311.pyc | Bin 99751 -> 100211 bytes litellm/utils.py | 2 +- 6 files changed, 40 insertions(+), 21 deletions(-) diff --git a/docs/my-website/docs/providers/huggingface.md b/docs/my-website/docs/providers/huggingface.md index 9dda860a5..e232cb586 100644 --- a/docs/my-website/docs/providers/huggingface.md +++ b/docs/my-website/docs/providers/huggingface.md @@ -42,24 +42,21 @@ def default_pt(messages): ```python # Create your own custom prompt template works litellm.register_prompt_template( - model="togethercomputer/LLaMA-2-7B-32K", - role_dict={ + model="togethercomputer/LLaMA-2-7B-32K", + roles={ "system": { "pre_message": "[INST] <>\n", "post_message": "\n<>\n [/INST]\n" }, - "user": { + "user": { "pre_message": "[INST] ", "post_message": " [/INST]\n" }, "assistant": { - "pre_message": "\n", - "post_message": "\n", + "post_message": "\n" } - } # tell LiteLLM how you want to map the openai messages to this model - pre_message_sep= "\n", - post_message_sep= "\n" -) + } + ) def test_huggingface_custom_model(): model = "huggingface/togethercomputer/LLaMA-2-7B-32K" diff --git a/docs/my-website/docs/providers/togetherai.md b/docs/my-website/docs/providers/togetherai.md index 8ceb196be..dd1b37989 100644 --- a/docs/my-website/docs/providers/togetherai.md +++ b/docs/my-website/docs/providers/togetherai.md @@ -75,11 +75,22 @@ Let's register our custom prompt template: [Implementation Code](https://github. import litellm litellm.register_prompt_template( - model="OpenAssistant/llama2-70b-oasst-sft-v10", - roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model - pre_message_sep= "\n", - post_message_sep= "\n" -) + model="OpenAssistant/llama2-70b-oasst-sft-v10", + roles={ + "system": { + "pre_message": "[<|im_start|>system", + "post_message": "\n" + }, + "user": { + "pre_message": "<|im_start|>user", + "post_message": "\n" + }, + "assistant": { + "pre_message": "<|im_start|>assistant", + "post_message": "\n" + } + } + ) ``` Let's use it! @@ -105,11 +116,22 @@ from litellm import completion os.environ["TOGETHERAI_API_KEY"] = "" litellm.register_prompt_template( - model="OpenAssistant/llama2-70b-oasst-sft-v10", - roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model - pre_message_sep= "\n", - post_message_sep= "\n" -) + model="OpenAssistant/llama2-70b-oasst-sft-v10", + roles={ + "system": { + "pre_message": "[<|im_start|>system", + "post_message": "\n" + }, + "user": { + "pre_message": "<|im_start|>user", + "post_message": "\n" + }, + "assistant": { + "pre_message": "<|im_start|>assistant", + "post_message": "\n" + } + } + ) messages=[{"role":"user", "content": "Write me a poem about the blue sky"}] diff --git a/docs/my-website/docs/providers/vllm.md b/docs/my-website/docs/providers/vllm.md index d97f09eae..5a916217e 100644 --- a/docs/my-website/docs/providers/vllm.md +++ b/docs/my-website/docs/providers/vllm.md @@ -95,7 +95,7 @@ def default_pt(messages): # Create your own custom prompt template works litellm.register_prompt_template( model="togethercomputer/LLaMA-2-7B-32K", - role_dict={ + roles={ "system": { "pre_message": "[INST] <>\n", "post_message": "\n<>\n [/INST]\n" diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 518b9d26e69c0bb97e3eb6e529cc7d32ef29c489..a7c59fbcb796b7fd9e9705e7315135d0311e46b9 100644 GIT binary patch delta 4119 zcmcInYit}>6`t9rpYbz}AF<=CH?}8R$L~iP+wm)Dolw%enr4!CHQu>ik2ABgxijl{ zUD7qwKt&U&?fp>%>7OE%M5vY86=^~WLR66c2o+RSA0t#K5`qFk0Tl=pRGf3i_PUYU z@P|p}+jH-^_sn_SbFQy0dEQ_06o0q4xIlpC{L2D)>#5=z@zzr$8L*~*v~?qNo7FYLfX|vc(;-PHAMQ=CV0bSr@D>Yt2P7O0&gqXrAolxPCCdy z&8kR2jodN5=Z0viLO?886`?4Sp#f7+_)<6L8U#%*FW&Nzc+n)KqYBAHGb}k3y zVf9|M0c=2f(Y9_fFq5bDEc@l%?5g*^iV?LRbR1O&0LRpwfa9#(w z9ZiyiqT6YD3Yk;LELKTUH`4}5*ilWTN06}(;Q)YCKu3l9V8C_SgVQh)==}(M)dW)e z5flKYJZD%mrsz6|yzFy@PO-kOmVhgt#kvkwx+cP|%U;K?C6XzdjPR~g`np|Dwl$lfmhN4F)v65!OBJ>RF=sq3cf>4GOa23gk z=aO^oPOzVK&(*iW(#swy!H*L(AcjhuSm^b}MjBxc^t6k6*}0zP%n*w7ITa@QREwH4 zt|YWYg~xR5n@*mR)S`AeNhCl2x)VrJh`oJ>eNHeoYif`&j?X3wHqC%=dXB3QqeQWZ z>iAQr3B^O@lpWf`LcJ^AJjxFAHbrJ}hlYR=a`GVDrV5U7eQ3d)Y0+NWdG@Isi${yX~*wBr3R*inl!b9y_XK7Mt+s( zwz3#BbzL!(c3Q{2vg^gW8rL@H1vPSk^3!G1i&vXmG2+DoV#qEIPOY5zr-!P=R#r2# zIe?XdhS{#6u`Ojt79q?dJchty%!|VqyEs%MCfJpsOJWa;4o6Bc`)NA@PuL6WTf<#q z1^fB16sX176YSr^C&jO^59`|8EF&i*zZRAim%{;Ab>Op<~`b3FRc|`8bjV%-K8oH z*D5U>hBv0F*%PCC4;)5DFMtzVDQA?&KX?9iAyXvWw@$^nqPkYx-ar|@KzNJQjqR9y z7s>ZcEwWtWkzWv-RqUu?LPj_~r0EMdT?D-h%8|>U zf5AO3v6seL0)IvFBliB-xsnKyJR%7DvvcDIYsDCQ^;C^`gS~w!B;hnvYBZPjAry)r zI!K94(}Y?+m}-OvYU|didVdKp<7|BD8|=-g%0k|84&bJJ**{M`cJ;x;4{i{Al7bHxU8hu6dy93=iQ&-B&$UjZq39j|WLyy-fkW_e-GHR7G=Ht`bk z$Bx!w9s3Y-0Na+KEJV+m^fXvZkMcA8cx5Y?29#ff*Fz)Z?c!4!TK9Rojf z31Bfit=O^IC_6aVAby|KtCGxXWfA=WE_oHHYT@)6KMPMNzcmqivo7PJAG0V_3a zXf_X_6Ht=S%~Yw;P3`dilG>QnrRxHPpY3J&WNQhgGX@%Mv*<>)lMDoEA-+~4+c4f# z&kb`*?^KS$m038GeT6*fJ&}*8lCM~nX4y)@zU*;))YJ*@jP$D@9$fY^ex-OuOK8?C z^nS&HI~Cltg(OC}5qT8@h`sPrbn4FD-`T^it>io$(e za1UuE>a-!}OuUKEf593n4*-&YKdx~l27EJ`TT9zv^EF5o>mykEhwS}x*ZdhQ#*la1 zg_A(L*D37cG!Aft&LPP;r;*YT3;^iDk0CvQFpgm2tjE0-QJmmabpfgLIu@q*;`T3^ zjq{4Gv87}<^G)QgH1%t^9J{1cPJe{-XV+)s{usc?-(yS@mESjhf|HnrPBmVQu;P$a zrdvrYH(&pb3tvOH!LK{7BN+scid^sWD+RZk`({7#zktAfa|kJ{lk}emyuRE9%0hKl zqOdL$`D;qh9tgsl7=)8>!j^_WWez(ggh_iomRJykIE3g2TVDk%hs!HY9H%bRgRoGm~lR$MJRc%)(?L#a`|BbltGn zpBB12a22UdhuCPkB11#SDuNyxUAuN(z@2!$@XD^JQPs3bVxIrCz3_3~AoQ=4PKN0rV8egGIvfy$ rTV7EV-(_WswXAfp4&DHZEWXD+SPZe>EH=SgAeLtLJ@QAN4`uxq4_xJW delta 3967 zcmcIndu$ZP8Q;B^&pyA6F@B%%i#6EBK4Tl>w|Ue=p^!MG*#ejK?fAU*?(H(WXKd$j zB+yn#Xw`Hgp{o50RUs%=+Vm0$NTR;eKcdi7{iEr&O;o9>+NvO;zS1bF`hBzH>>wpX zs(Nt0`^|jc%zTfTZ^mz*_FX>hE4o-zR3O0T+L}OKeX6Kh{OeQgKhB7Pnx}^K2BT4K zMc-m%U@olooej#P>=plkiZOKnY#dhy0VmYmfRk)<;ItTI=L4V3{yR|N z7pJmC;X1LZ0|MYil!R$mI&m~JMfX5YEIXc`?-N_u!AM_Z0y^j96;boQ^*b?JOJRbp5DzNKE-i%cW=8#r`c-nkxicA zpmfRa2DOxxcIX6qr?;_Z21gqRd=)+;D$huw8_~^VQcES{=InwLpeEO1-LWdsVh3W2 z+xajYA%#54*C*ritcT2Czl$x@w}a{>AKeB&Zk|DGTS*ez2d!w7b}+TCTinNSW z!8EQ(x3Jz_U0!%**xat>?MYOfN5BZXVaS%Lg6CdPDD+7_ia&Iky|`<#g$J2?`vi7A ziSW@m=s8x?zcs`|K-pyf;VLhDIF)~6wAiKo&qZ=FYO{iYuhup!GFTJgTZ%hbk^SCaK)j@;Aiildb30*5X`s?8eaW9WB`T z$B$TmiB>IBOF8@L2pSVqiS8(?d8V!_hSE)I*f;h(f5%xn_xQnr(`W|IMv$IC&P%{i zWS>XiCmIsPvco4Dd5$pPZn;S{IAn?*C(8+9@#53Yc8=7A9>7t}?4FUy-K9ti5z+{c zA@Jl*BEuZI`O0dVlK3K7al;mngre)zU~h~(E5_K;==RbrsM>|Vx9d6f!f3Bp&i*he zg|PlpnN^G(6Tir;vHnmMc3fapC}Dx5vk1BRNE6u1RYa;{bDX`W*0T!}6K#Fa zbn|W%BFcBe`av5)p-AW%Ry_oO9O0L&cd{$- zGf!w-(J*NA_bBq*M0r7CUUA}v2^+!zL_&i*;x%^t z_||e%h0=>rpt}$XSksBkVh0;MQ6DT81!0cKC!S~=M-e~Dd_(Z9!7q$MD45RrMVr2aQJvETLUwnOg+?uwf2 zhuLAP|F@st6I5@_0V=hc6!#(9G(t0 zq*!njeN~7hFM&zntBK)I|~2-cmc;0&1osko{z(=X2V+rUarFWZAZU^Dus8O ziN$-ook*bQT+!My%{WgPDr1W1>j z=m2nSK8BocZobV^*z5D&B_~nDi{mL|%NsFB?X1+2NcJa;QMJ~A_riSN>-Vkg}2#q+7Yj^>**e^qVdZF zp^<6c+ zf@839GyCnSqb>Z5`}XtQE=|!laUx8(>jzKiOW1su9bF!--GTIVgl-fE5~j66gV?^x zo?o7hxB*=^Y+4RI%(v3ZcCR#Muqx07ghqrYLKA>n2)9H$_Z&Qj{ms}9Z(~J`t0vUz zCH}{P8=QkKhiBaN!;``d%$g=#9;%k`3Jt}2P4RS7JQy^FfRUw7Vh||0>OK0aX{QMBh!}e}&?#_26Fu0bKvT D4p^YL diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 6e7724d5f9e6a18005cc957167751031d7a6ac7f..a12a9697700203e04ee6663b0541d45c9e185120 100644 GIT binary patch delta 695 zcmZ8d%}*0S6o0cHrLDH2wxkdQj7f1dC<^uh@zPinl3wsa6D`ZWDPJwc*)0@b-*4u<-+MD}ww63URy@J2 zK)?@V%g=$oc(WReu;{babFyY&w8T<_h}mw%6r2 z8?pE04mN6slvdhzD_!Tu@FGv*6>edIxQ8b*Uc6dTQw1EsYbB;nVQ&di)7VE?#BqC5 zSH_h}opD~!d zF&1i>rMA{!7A&{Fzm{ePJ0IAq#X7sUy>1Q}N+_eUE)6qOpL#ZBfG4#6*d2y7{cs8x z3;=rjp(qc)G}C-ZQA5SVEmG|%7B?-S)A_Ueo0=?oh(yMPHu7FiXo~1{m;Is8*|{m+G%XWAIAqk_}k9A7i1)1B<tCnXy=8yImRM7ADqnObiUh+rL*ZE@Wiv z-afO6@hY