Rupnil commited on
Commit
bae20de
1 Parent(s): 45f375c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -3,7 +3,8 @@ import gradio as gr
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
- def format_prompt(message, history, prompt):
 
7
  for user_prompt, bot_response in history:
8
  prompt += f"[INST] {user_prompt} [/INST]"
9
  prompt += f" {bot_response}</s> "
@@ -11,7 +12,7 @@ def format_prompt(message, history, prompt):
11
  return prompt
12
 
13
  def generate(
14
- prompt, history, system_prompt="You are AmeuChat", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
15
  ):
16
  temperature = float(temperature)
17
  if temperature < 1e-2:
@@ -27,7 +28,7 @@ def generate(
27
  seed=42,
28
  )
29
 
30
- formatted_prompt = format_prompt(prompt, history, system_prompt)
31
 
32
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
33
  output = ""
@@ -38,7 +39,6 @@ def generate(
38
  return output
39
 
40
  additional_inputs=[
41
- gr.Textbox("You are AmeuChat", label="system_prompt"),
42
  gr.Slider(
43
  label="temperature",
44
  value=0.9,
@@ -70,4 +70,4 @@ demo = gr.ChatInterface(fn=generate,
70
  undo_btn=None
71
  )
72
 
73
- demo.queue().launch(show_api=True)
 
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
+ def format_prompt(message, history):
7
+ prompt = "You are an helpful AI assistant, You are made by Rupnil Mondal."
8
  for user_prompt, bot_response in history:
9
  prompt += f"[INST] {user_prompt} [/INST]"
10
  prompt += f" {bot_response}</s> "
 
12
  return prompt
13
 
14
  def generate(
15
+ prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
16
  ):
17
  temperature = float(temperature)
18
  if temperature < 1e-2:
 
28
  seed=42,
29
  )
30
 
31
+ formatted_prompt = format_prompt(prompt, history)
32
 
33
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
34
  output = ""
 
39
  return output
40
 
41
  additional_inputs=[
 
42
  gr.Slider(
43
  label="temperature",
44
  value=0.9,
 
70
  undo_btn=None
71
  )
72
 
73
+ demo.queue().launch()