Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -25,16 +25,16 @@ def tokenize(input_text):
|
|
25 |
command_r_tokens = len(command_r_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
26 |
|
27 |
results = {
|
28 |
-
"LLaMa": llama_tokens,
|
29 |
-
"LLaMa-3": llama3_tokens,
|
30 |
-
"Mistral": mistral_tokens,
|
31 |
-
"GPT-2/GPT-J": gpt2_tokens,
|
32 |
-
"GPT-NeoX": gpt_neox_tokens,
|
33 |
-
"Falcon": falcon_tokens,
|
34 |
-
"Phi-2": phi2_tokens,
|
35 |
-
"T5": t5_tokens,
|
36 |
-
"Gemma": gemma_tokens,
|
37 |
-
"Command-R": command_r_tokens
|
38 |
}
|
39 |
|
40 |
# Sort the results in descending order based on token length
|
|
|
25 |
command_r_tokens = len(command_r_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
26 |
|
27 |
results = {
|
28 |
+
"LLaMa-1/LLaMa-2": llama_tokens,
|
29 |
+
" LLaMa-3": llama3_tokens,
|
30 |
+
" Mistral": mistral_tokens,
|
31 |
+
" GPT-2/GPT-J": gpt2_tokens,
|
32 |
+
" GPT-NeoX": gpt_neox_tokens,
|
33 |
+
" Falcon": falcon_tokens,
|
34 |
+
" Phi-2": phi2_tokens,
|
35 |
+
" T5": t5_tokens,
|
36 |
+
" Gemma": gemma_tokens,
|
37 |
+
" Command-R": command_r_tokens
|
38 |
}
|
39 |
|
40 |
# Sort the results in descending order based on token length
|