Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -18,6 +18,7 @@ def tokenize(input_text):
|
|
18 |
)
|
19 |
phi2_tokens = len(phi2_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
20 |
t5_tokens = len(t5_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
|
|
21 |
|
22 |
results = {
|
23 |
"LLaMa": llama_tokens,
|
@@ -27,6 +28,7 @@ def tokenize(input_text):
|
|
27 |
"Falcon": falcon_tokens,
|
28 |
"Phi-2": phi2_tokens,
|
29 |
"T5": t5_tokens,
|
|
|
30 |
}
|
31 |
|
32 |
# Sort the results in descending order based on token length
|
@@ -43,6 +45,7 @@ if __name__ == "__main__":
|
|
43 |
falcon_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b")
|
44 |
phi2_tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2")
|
45 |
t5_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-xxl")
|
|
|
46 |
|
47 |
-
iface = gr.Interface(fn=tokenize, inputs=gr.Textbox(lines=
|
48 |
iface.launch()
|
|
|
18 |
)
|
19 |
phi2_tokens = len(phi2_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
20 |
t5_tokens = len(t5_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
21 |
+
gemma_tokens = len(gemma_tokenizer(input_text, add_special_tokens=True)["input_ids"])
|
22 |
|
23 |
results = {
|
24 |
"LLaMa": llama_tokens,
|
|
|
28 |
"Falcon": falcon_tokens,
|
29 |
"Phi-2": phi2_tokens,
|
30 |
"T5": t5_tokens,
|
31 |
+
"Gemma": gemma_tokens
|
32 |
}
|
33 |
|
34 |
# Sort the results in descending order based on token length
|
|
|
45 |
falcon_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b")
|
46 |
phi2_tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2")
|
47 |
t5_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-xxl")
|
48 |
+
gemma_tokenizer = AutoTokenizer.from_pretrained("alpindale/gemma-2b")
|
49 |
|
50 |
+
iface = gr.Interface(fn=tokenize, inputs=gr.Textbox(lines=8), outputs="text")
|
51 |
iface.launch()
|