Sakalti commited on
Commit
cd8414e
1 Parent(s): 6e2a71c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import torch
3
  import gradio as gr
@@ -45,16 +47,17 @@ def respond(message, history, max_tokens, temperature, top_p):
45
  with gr.Blocks() as demo:
46
  gr.Markdown("## AIチャット")
47
  chatbot = gr.Chatbot()
48
- msg = gr.Textbox(label="あなたのメッセージ")
49
  max_tokens = gr.Slider(1, 2048, value=512, step=1, label="Max new tokens")
50
  temperature = gr.Slider(0.1, 4.0, value=0.7, step=0.1, label="Temperature")
51
  top_p = gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
 
52
  clear = gr.Button("クリア")
53
 
54
  def clear_history():
55
  return [], []
56
 
57
- msg.submit(respond, inputs=[msg, chatbot, max_tokens, temperature, top_p], outputs=[chatbot, chatbot])
58
  clear.click(clear_history, outputs=[chatbot])
59
 
60
  demo.launch()
 
1
+ !pip install torch transformers gradio
2
+
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import torch
5
  import gradio as gr
 
47
  with gr.Blocks() as demo:
48
  gr.Markdown("## AIチャット")
49
  chatbot = gr.Chatbot()
50
+ msg = gr.Textbox(label="あなたのメッセージ", placeholder="ここにメッセージを入力...")
51
  max_tokens = gr.Slider(1, 2048, value=512, step=1, label="Max new tokens")
52
  temperature = gr.Slider(0.1, 4.0, value=0.7, step=0.1, label="Temperature")
53
  top_p = gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
54
+ send_button = gr.Button("送信")
55
  clear = gr.Button("クリア")
56
 
57
  def clear_history():
58
  return [], []
59
 
60
+ send_button.click(respond, inputs=[msg, chatbot, max_tokens, temperature, top_p], outputs=[chatbot, chatbot])
61
  clear.click(clear_history, outputs=[chatbot])
62
 
63
  demo.launch()