truongghieu commited on
Commit
e544d42
1 Parent(s): bcbabe9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -5
app.py CHANGED
@@ -1,14 +1,30 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForCausalLM
 
 
 
 
 
 
 
 
 
 
 
 
3
  tokenizer = AutoTokenizer.from_pretrained("truongghieu/deci-finetuned", trust_remote_code=True)
4
  model = AutoModelForCausalLM.from_pretrained("truongghieu/deci-finetuned", trust_remote_code=True)
5
 
 
 
 
6
  # Define a function that takes a text input and generates a text output
7
  def generate_text(text):
8
- input_ids = tokenizer.encode(text, return_tensors="pt")
9
- output_ids = model.generate(input_ids, max_length=200)
10
- output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
11
- return output_text
 
12
 
13
  iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
14
  iface.launch()
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM , GenerationConfig
3
+
4
+
5
+ generation_config = GenerationConfig(
6
+ penalty_alpha=0.6,
7
+ do_sample = True,
8
+ top_k=5,
9
+ temperature=0.5,
10
+ repetition_penalty=1.2,
11
+ max_new_tokens=100,
12
+ pad_token_id=tokenizer.eos_token_id
13
+ )
14
+
15
  tokenizer = AutoTokenizer.from_pretrained("truongghieu/deci-finetuned", trust_remote_code=True)
16
  model = AutoModelForCausalLM.from_pretrained("truongghieu/deci-finetuned", trust_remote_code=True)
17
 
18
+
19
+
20
+
21
  # Define a function that takes a text input and generates a text output
22
  def generate_text(text):
23
+ input_text = f"###Instruction: {text}\n###Response:"
24
+ input_ids = tokenizer.encode(input_text, return_tensors="pt")
25
+ output_ids = model.generate(input_ids, generation_config=generation_config)
26
+ output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
27
+ return output_text
28
 
29
  iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
30
  iface.launch()