forked from phoenix/litellm-mirror
(fix) test_tokenizers
This commit is contained in:
parent
44b8270bf6
commit
ff815e93a8
1 changed files with 1 additions and 1 deletions
|
@ -53,7 +53,7 @@ def test_tokenizers():
|
|||
try:
|
||||
### test the openai, claude, cohere and llama2 tokenizers.
|
||||
### The tokenizer value should be different for all
|
||||
sample_text = "Hellö World, this is my input string!"
|
||||
sample_text = "Hellö World, this is my input string! My name is ishaan CTO"
|
||||
|
||||
# openai tokenizer
|
||||
openai_tokens = token_counter(model="gpt-3.5-turbo", text=sample_text)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue