explorewithai commited on
Commit
bdff1e2
1 Parent(s): b29c9a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -4,14 +4,13 @@ import torch
4
 
5
  device = 0 if torch.cuda.is_available() else -1
6
 
7
- def generate_response(user_input, history, temperature=0.75, do_sample=True, repetition_penalty=1.2):
8
  pipe = pipeline("text-generation", model="frameai/ChatFrame-Instruct-Persian-Small", device=device)
9
 
10
  messages = [
11
- {"role": "system", "content": "Your name is ChatFrame and you are helpful."},
12
  {"role": "user", "content": user_input},
13
  ]
14
- response = pipe(messages, max_length=8000, temperature=temperature, do_sample=do_sample, top_p=0.95, repetition_penalty=repetition_penalty)
15
  return response[0]['generated_text'][1]["content"]
16
 
17
  iface = gr.ChatInterface(
 
4
 
5
  device = 0 if torch.cuda.is_available() else -1
6
 
7
+ def generate_response(user_input, history):
8
  pipe = pipeline("text-generation", model="frameai/ChatFrame-Instruct-Persian-Small", device=device)
9
 
10
  messages = [
 
11
  {"role": "user", "content": user_input},
12
  ]
13
+ response = pipe(messages, max_length=8000)
14
  return response[0]['generated_text'][1]["content"]
15
 
16
  iface = gr.ChatInterface(