VLegio commited on
Commit
7c797b7
1 Parent(s): 61f41e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -31
app.py CHANGED
@@ -5,38 +5,22 @@ from rwkv.model import RWKV
5
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
6
 
7
  ctx_limit = 2048
8
- title = "RWKV-5-World-0.4B-v2-20231113-ctx4096.pth"
9
 
10
- model_path = hf_hub_download(repo_id="BlinkDL/rwkv-5-world", filename=f"{title}")
11
  model = RWKV(model=model_path, strategy='cpu bf16')
12
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
13
 
14
- def generate_prompt(instruction, input=None):
15
  instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n').replace('\n\n','\n')
16
  input = input.strip().replace('\r\n','\n').replace('\n\n','\n').replace('\n\n','\n')
17
- if input and len(input) > 0:
18
- return f"""Instruction: {instruction}
 
19
 
20
- Input: {input}
21
-
22
- Response:"""
23
- else:
24
- return f"""User: hi
25
-
26
- Assistant: Hi. I am your assistant and I will provide expert full response in full details. Please feel free to ask any question and I will always answer it.
27
-
28
- User: {instruction}
29
-
30
- Assistant:"""
31
 
32
  examples = [
33
- ["東京で訪れるべき素晴らしい場所とその紹介をいくつか挙げてください。", "", 300, 1.2, 0.5, 0.4, 0.4],
34
- ["Écrivez un programme Python pour miner 1 Bitcoin, avec des commentaires.", "", 300, 1.2, 0.5, 0.4, 0.4],
35
- ["Write a song about ravens.", "", 300, 1.2, 0.5, 0.4, 0.4],
36
- ["Explain the following metaphor: Life is like cats.", "", 300, 1.2, 0.5, 0.4, 0.4],
37
- ["Write a story using the following information", "A man named Alex chops a tree down", 300, 1.2, 0.5, 0.4, 0.4],
38
- ["Generate a list of adjectives that describe a person as brave.", "", 300, 1.2, 0.5, 0.4, 0.4],
39
- ["You have $100, and your goal is to turn that into as much money as possible with AI and Machine Learning. Please respond with detailed plan.", "", 300, 1.2, 0.5, 0.4, 0.4],
40
  ]
41
 
42
  def evaluate(
@@ -108,12 +92,11 @@ def alternative(chatbot, history):
108
 
109
  with gr.Blocks(title=title) as demo:
110
  gr.HTML(f"<div style=\"text-align: center;\">\n<h1>🌍World - {title}</h1>\n</div>")
111
- with gr.Tab("Instruct mode"):
112
- gr.Markdown(f"100% RNN RWKV-LM **trained on 100+ world languages**. Demo limited to ctxlen {ctx_limit}. Finetuned on alpaca, gpt4all, codealpaca and more. For best results, ** keep you prompt short and clear **.</b>.") # <b>UPDATE: now with Chat (see above, as a tab) ==> turn off as of now due to VRAM leak caused by buggy code.
113
- with gr.Row():
114
  with gr.Column():
115
- instruction = gr.Textbox(lines=2, label="Instruction", value='東京で訪れるべき素晴らしい場所とその紹介をいくつか挙げてください。')
116
- input = gr.Textbox(lines=2, label="Input", placeholder="")
117
  token_count = gr.Slider(10, 300, label="Max Tokens", step=10, value=300)
118
  temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=1.2)
119
  top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.5)
@@ -124,10 +107,10 @@ with gr.Blocks(title=title) as demo:
124
  submit = gr.Button("Submit", variant="primary")
125
  clear = gr.Button("Clear", variant="secondary")
126
  output = gr.Textbox(label="Output", lines=5)
127
- data = gr.Dataset(components=[instruction, input, token_count, temperature, top_p, presence_penalty, count_penalty], samples=examples, label="Example Instructions", headers=["Instruction", "Input", "Max Tokens", "Temperature", "Top P", "Presence Penalty", "Count Penalty"])
128
- submit.click(evaluate, [instruction, input, token_count, temperature, top_p, presence_penalty, count_penalty], [output])
129
  clear.click(lambda: None, [], [output])
130
- data.click(lambda x: x, [data], [instruction, input, token_count, temperature, top_p, presence_penalty, count_penalty])
131
 
132
  demo.queue(max_size=10)
133
  demo.launch(share=False)
 
5
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
6
 
7
  ctx_limit = 2048
8
+ title = "ru_rwkv5_extract_qa_04B_65536_ctx8192_L24_D1024.pth"
9
 
10
+ model_path = hf_hub_download(repo_id="Sigma-AI/ru_rwkv5_extract_qa", filename=f"{title}")
11
  model = RWKV(model=model_path, strategy='cpu bf16')
12
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
13
 
14
+ def generate_prompt(context, question):
15
  instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n').replace('\n\n','\n')
16
  input = input.strip().replace('\r\n','\n').replace('\n\n','\n').replace('\n\n','\n')
17
+ return f"""CONTEXT:{context}
18
+ QUESTION:{question}
19
+ ANSWER:"""
20
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
  examples = [
23
+ ["Вторая мировая война (началась 01.09.1939 и закончилась 02.09.1945) — война двух мировых военно-политических коалиций, ставшая крупнейшим вооружённым конфликтом в истории человечества.В ней участвовали 62 государства из 74 существовавших на тот момент (80 % населения Земного шара).\nБоевые действия велись на территории Европы, Азии и Африки и в водах всех океанов. Это единственный конфликт, в котором было применено ядерное оружие. В результате войны погибло более 70 миллионов человек, из которых большинство — мирные жители.Число участвовавших стран менялось в течение войны. Некоторые из них вели активные военные действия, другие помогали Союзникам поставками продовольствия, а многие участвовали в войне только номинально.", "Было ли применено ядерное оружие?", 300, 1, 0.5, 0.4, 0.4],
 
 
 
 
 
 
24
  ]
25
 
26
  def evaluate(
 
92
 
93
  with gr.Blocks(title=title) as demo:
94
  gr.HTML(f"<div style=\"text-align: center;\">\n<h1>🌍World - {title}</h1>\n</div>")
95
+ with gr.Tab("Extract QA"):
96
+ gr.Markdown(f"100% RNN RWKV-LM **trained on 100+ world languages**. Demo limited to ctxlen {ctx_limit}")
 
97
  with gr.Column():
98
+ context = gr.Textbox(lines=2, label="Context", value='')
99
+ question = gr.Textbox(lines=2, label="Question", placeholder="")
100
  token_count = gr.Slider(10, 300, label="Max Tokens", step=10, value=300)
101
  temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=1.2)
102
  top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.5)
 
107
  submit = gr.Button("Submit", variant="primary")
108
  clear = gr.Button("Clear", variant="secondary")
109
  output = gr.Textbox(label="Output", lines=5)
110
+ data = gr.Dataset(components=[context, question, token_count, temperature, top_p, presence_penalty, count_penalty], samples=examples, label="Example", headers=["Context", "Question", "Max Tokens", "Temperature", "Top P", "Presence Penalty", "Count Penalty"])
111
+ submit.click(evaluate, [context, question, token_count, temperature, top_p, presence_penalty, count_penalty], [output])
112
  clear.click(lambda: None, [], [output])
113
+ data.click(lambda x: x, [data], [context, question, token_count, temperature, top_p, presence_penalty, count_penalty])
114
 
115
  demo.queue(max_size=10)
116
  demo.launch(share=False)