|
from transformers import AutoTokenizer,AutoModel |
|
import gradio as gr |
|
from gradio.components import Textbox |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b-int4", trust_remote_code=True) |
|
model = AutoModel.from_pretrained("THUDM/chatglm-6b-int4", trust_remote_code=True).float() |
|
model.eval() |
|
|
|
history=[] |
|
|
|
def ChatGLM_bot(input,history=history): |
|
if input: |
|
response,history = model.chat(tokenizer,input,history=history) |
|
return response |
|
|
|
|
|
inputs = Textbox(line=7,labels='请输入你的问题') |
|
outputs = Textbox(line=7,labels='万能AI的回答') |
|
|
|
|
|
gr.Interface(fn=ChatGLM_bot,inputs=inputs,outputs=outputs,title='万能的AI助手', |
|
description='我是万能的AI助手,您可以问我任何问题', |
|
theme=gr.themes.Default()).launch(share=True) |