wassemgtk commited on
Commit
48a45d9
1 Parent(s): d6d5720

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -10
app.py CHANGED
@@ -1,17 +1,31 @@
1
  from transformers import AutoTokenizer
2
  import gradio as gr
 
3
 
4
-
5
- tokenizer = AutoTokenizer.from_pretrained("kiranr/gpt2-tokenizer")
6
 
7
  def tokenize(input_text):
8
- tokens = tokenizer(input_text)["input_ids"]
9
- return f"Number of tokens: {len(tokens)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
 
 
 
 
11
 
12
- demo = gr.Interface(
13
- fn=tokenize,
14
- inputs=gr.Textbox(lines=7),
15
- outputs="text",
16
- )
17
- demo.launch()
 
1
  from transformers import AutoTokenizer
2
  import gradio as gr
3
+ import os
4
 
5
+ # Retrieve the Hugging Face token from secrets
6
+ huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
7
 
8
  def tokenize(input_text):
9
+ palmyra_x_003_tokens = len(palmyra_x_003_tokenizer(input_text, add_special_tokens=True)["input_ids"])
10
+ gpt2_tokens = len(gpt2_tokenizer(input_text, add_special_tokens=True)["input_ids"])
11
+ palmyra_x_004_tokens = len(palmyra_x_004_tokenizer(input_text, add_special_tokens=True)["input_ids"])
12
+
13
+ results = {
14
+ "Palmyra-X-004": palmyra_x_004_tokens,
15
+ "Palmyra-Fin & Med": palmyra_x_003_tokens,
16
+ "Palmyra-X-003": gpt2_tokens
17
+ }
18
+
19
+ # Sort the results in descending order based on token length
20
+ sorted_results = sorted(results.items(), key=lambda x: x[1], reverse=True)
21
+
22
+ return "\n".join([f"{model}: {tokens}" for model, tokens in sorted_results])
23
+
24
 
25
+ if __name__ == "__main__":
26
+ palmyra_x_003_tokenizer = AutoTokenizer.from_pretrained("wassemgtk/palmyra-x-003-tokenizer", token=huggingface_token)
27
+ gpt2_tokenizer = AutoTokenizer.from_pretrained("gpt2")
28
+ palmyra_x_004_tokenizer = AutoTokenizer.from_pretrained("wassemgtk/palmyra-x-004-tokenizer", token=huggingface_token)
29
 
30
+ iface = gr.Interface(fn=tokenize, inputs=gr.Textbox(label="Input Text", lines=19), outputs="text")
31
+ iface.launch()