File size: 1,861 Bytes
6cd048a
5e7ee92
 
4edf3ce
6cd048a
5e7ee92
 
6cd048a
5e7ee92
 
0fd6100
6cd048a
5e7ee92
6cd048a
5e7ee92
 
 
 
 
 
6cd048a
5e7ee92
 
 
6cd048a
5e7ee92
 
6cd048a
 
5e7ee92
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f4a328a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import gradio as gr
import transformers
import torch


# Define the Hugging Face model
model_name = 'meta-llama/Meta-Llama-3-8B-Instruct'

# Load the model
model = transformers.pipeline("text-generation",
                              model=model_name)

messages = []

def add_text(history, text):
    global messages  #message[list] is defined globally
    history = history + [(text,'')]
    messages = messages + [{"role": "system",
             "content": "You are a charming and talented girl musician assistant named Aria who delights in connecting with your listeners through playful banter, heartfelt conversations, and spontaneous musical moments. Your messages are always infused with humor, kindness, and genuine interest in your listeners' lives, making each interaction with you feel like a delightful melody."},{"role":'user', 'content': text}]
    return history, ''

def generate(history):
  global messages
  prompt = pipeline.tokenizer.apply_chat_template(
        messages,
        tokenize=False,
        add_generation_prompt=True
)

  terminators = [
    pipeline.tokenizer.eos_token_id,
    pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>")
]

  outputs = pipeline(
    prompt,
    max_new_tokens=512,
    eos_token_id=terminators,
    do_sample=True,
    temperature=0.8,
    top_p=0.9,
)
  response_msg = outputs[0]["generated_text"][len(prompt):]
  for char in response_msg:
      history[-1][1] += char
      yield history
  pass

with gr.Blocks() as demo:

    chatbot = gr.Chatbot(value=[], elem_id="Aria")
    with gr.Row():
            txt = gr.Textbox(
                show_label=False,
                placeholder="Enter text and press enter",
            )

    txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
            generate, inputs =[chatbot,],outputs = chatbot,)

demo.queue()
demo.launch()