suprimedev commited on
Commit
797fae6
1 Parent(s): 99c5a04

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -141
app.py CHANGED
@@ -1,143 +1,8 @@
1
- import gradio as gr
2
- import cohere
3
- import os
4
- import re
5
- import uuid
6
- import secrets
7
 
8
-
9
-
10
- cohere_api_key = os.getenv("COHERE_API_KEY")
11
- co = cohere.Client(cohere_api_key, client_name="huggingface-rp")
12
-
13
-
14
- def trigger_example(example):
15
- chat, updated_history = generate_response(example)
16
- return chat, updated_history
17
-
18
- def generate_response(user_message, cid, token, history=None):
19
-
20
- if not token:
21
- raise gr.Error("Error loading.")
22
-
23
- if history is None:
24
- history = []
25
- if cid == "" or None:
26
- cid = str(uuid.uuid4())
27
-
28
- print(f"cid: {cid} prompt:{user_message}")
29
-
30
- history.append(user_message)
31
-
32
- stream = co.chat_stream(message=user_message, conversation_id=cid, model='command-r-plus', connectors=[], temperature=0.3)
33
-
34
- output = ""
35
-
36
- for idx, response in enumerate(stream):
37
- if response.event_type == "text-generation":
38
- output += response.text
39
- if idx == 0:
40
- history.append(" " + output)
41
- else:
42
- history[-1] = output
43
- chat = [
44
- (history[i].strip(), history[i + 1].strip())
45
- for i in range(0, len(history) - 1, 2)
46
- ]
47
- yield chat, history, cid
48
-
49
- return chat, history, cid
50
-
51
-
52
- def clear_chat():
53
- return [], [], str(uuid.uuid4())
54
-
55
-
56
- examples = [
57
- "What are 8 good questions to get to know a stranger?",
58
- "Create a list of 10 unusual excuses people might use to get out of a work meeting",
59
- "Write a python code to reverse a string",
60
- "Explain the relativity theory in French",
61
- "Como sair de um helicóptero que caiu na água?",
62
- "Formally introduce the transformer architecture with notation.",
63
- "¿Cómo le explicarías el aprendizaje automático a un extraterrestre?",
64
- "Summarize recent news about the North American tech job market",
65
- "Explain gravity to a chicken.",
66
- "Is the world discrete or analog?",
67
- "What is the memory cost in a typical implementation of an all-gather operation?",
68
- "Give me a brief history of the golden era of Cantopop.",
69
- "Descrivi il processo di creazione di un capolavoro, come se fossi un artista del Rinascimento a Firenze.",
70
- "Explique-moi le sens de la vie selon un grand auteur littéraire.",
71
- "Give me an example of an endangered species and let me know what I can do to help preserve it"
72
  ]
73
-
74
- custom_css = """
75
- #logo-img {
76
- border: none !important;
77
- }
78
- #chat-message {
79
- font-size: 14px;
80
- min-height: 300px;
81
- }
82
- """
83
-
84
- with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
85
- cid = gr.State("")
86
- token = gr.State(value=None)
87
-
88
- with gr.Row():
89
- with gr.Column(scale=1):
90
- gr.Image("logoplus.png", elem_id="logo-img", show_label=False, show_share_button=False, show_download_button=False)
91
- with gr.Column(scale=3):
92
- gr.Markdown("""C4AI Command R+ is a research open weights release of a 104B billion parameter with highly advanced Retrieval Augmented Generation (RAG) capabilities, tool Use to automate sophisticated tasks, and is multilingual in 10 languages: English, French, Spanish, Italian, German, Portuguese, Japanese, Korean, Arabic, and Chinese. Command R+ is optimized for a variety of use cases including reasoning, summarization, and question answering.
93
- <br/><br/>
94
- **Model**: [c4ai-command-r-plus](https://huggingface.co/CohereForAI/c4ai-command-r-plus)
95
- <br/>
96
- **Developed by**: [Cohere](https://cohere.com/) and [Cohere for AI](https://cohere.com/research)
97
- <br/>
98
- **License**: [CC-BY-NC](https://cohere.com/c4ai-cc-by-nc-license), requires also adhering to [C4AI's Acceptable Use Policy](https://docs.cohere.com/docs/c4ai-acceptable-use-policy)
99
- """
100
- )
101
-
102
- with gr.Column():
103
- with gr.Row():
104
- chatbot = gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True)
105
-
106
- with gr.Row():
107
- user_message = gr.Textbox(lines=1, placeholder="Ask anything ...", label="Input", show_label=False)
108
-
109
-
110
- with gr.Row():
111
- submit_button = gr.Button("Submit")
112
- clear_button = gr.Button("Clear chat")
113
-
114
-
115
- history = gr.State([])
116
-
117
- user_message.submit(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
118
- submit_button.click(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
119
-
120
- clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history, cid], concurrency_limit=32)
121
-
122
- user_message.submit(lambda x: gr.update(value=""), None, [user_message], queue=False)
123
- submit_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
124
- clear_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
125
-
126
- with gr.Row():
127
- gr.Examples(
128
- examples=examples,
129
- inputs=user_message,
130
- cache_examples=False,
131
- fn=trigger_example,
132
- outputs=[chatbot],
133
- examples_per_page=100
134
- )
135
-
136
- demo.load(lambda: secrets.token_hex(16), None, token)
137
-
138
- if __name__ == "__main__":
139
- # demo.launch(debug=True)
140
- try:
141
- demo.queue(api_open=False, max_size=40).launch(show_api=False)
142
- except Exception as e:
143
- print(f"Error: {e}")
 
1
+ # Use a pipeline as a high-level helper
2
+ from transformers import pipeline
 
 
 
 
3
 
4
+ messages = [
5
+ {"role": "user", "content": "سلام خوبی؟"},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ]
7
+ pipe = pipeline("text-generation", model="CohereForAI/aya-23-8B")
8
+ pipe(messages)