Spaces:
Sleeping
Sleeping
prithivMLmods
commited on
Commit
β’
edb2d41
1
Parent(s):
f41cfa8
Update app.py
Browse files
app.py
CHANGED
@@ -1,10 +1,12 @@
|
|
1 |
import gradio as gr
|
2 |
from openai import OpenAI
|
3 |
import os
|
4 |
-
from
|
5 |
-
from
|
6 |
-
import
|
|
|
7 |
|
|
|
8 |
css = '''
|
9 |
.gradio-container{max-width: 1000px !important}
|
10 |
h1{text-align:center}
|
@@ -13,6 +15,7 @@ footer {
|
|
13 |
}
|
14 |
'''
|
15 |
|
|
|
16 |
ACCESS_TOKEN = os.getenv("HF_TOKEN")
|
17 |
|
18 |
client = OpenAI(
|
@@ -20,12 +23,17 @@ client = OpenAI(
|
|
20 |
api_key=ACCESS_TOKEN,
|
21 |
)
|
22 |
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
27 |
messages = [{"role": "system", "content": system_message}]
|
28 |
-
|
29 |
for val in history:
|
30 |
if val[0]:
|
31 |
messages.append({"role": "user", "content": val[0]})
|
@@ -45,81 +53,120 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
45 |
messages=messages,
|
46 |
):
|
47 |
token = message.choices[0].delta.content
|
48 |
-
|
49 |
response += token
|
50 |
yield response
|
51 |
-
|
52 |
-
history.append((message, response))
|
53 |
-
return history
|
54 |
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
pdf.set_font("Arial", size=12)
|
64 |
-
pdf.multi_cell(0, 10, f"User Query: {input_text}\n\nResponse: {output_text}")
|
65 |
-
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf").name
|
66 |
-
pdf.output(file_name)
|
67 |
-
elif conversion_type == "DOCX":
|
68 |
-
doc = Document()
|
69 |
-
doc.add_heading('Conversation', 0)
|
70 |
-
doc.add_paragraph(f"User Query: {input_text}\n\nResponse: {output_text}")
|
71 |
-
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".docx").name
|
72 |
-
doc.save(file_name)
|
73 |
-
elif conversion_type == "TXT":
|
74 |
-
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".txt").name
|
75 |
-
with open(file_name, "w") as f:
|
76 |
-
f.write(f"User Query: {input_text}\n\nResponse: {output_text}")
|
77 |
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
demo = gr.Blocks(css=css)
|
88 |
-
|
89 |
-
with demo:
|
90 |
-
history_state = gr.State([]) # Initialize an empty list to store the conversation history
|
91 |
-
|
92 |
-
with gr.Row():
|
93 |
-
system_message = gr.Textbox(value="", label="System message")
|
94 |
-
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
95 |
-
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
96 |
-
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
|
97 |
|
98 |
-
|
99 |
-
|
100 |
-
chat_output = gr.Textbox(label="Response", interactive=False)
|
101 |
|
102 |
-
|
103 |
-
|
104 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
|
106 |
-
|
|
|
107 |
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
|
124 |
if __name__ == "__main__":
|
125 |
-
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
from openai import OpenAI
|
3 |
import os
|
4 |
+
from io import BytesIO
|
5 |
+
from reportlab.lib.pagesizes import letter
|
6 |
+
from reportlab.pdfgen import canvas
|
7 |
+
from docx import Document
|
8 |
|
9 |
+
# Custom CSS
|
10 |
css = '''
|
11 |
.gradio-container{max-width: 1000px !important}
|
12 |
h1{text-align:center}
|
|
|
15 |
}
|
16 |
'''
|
17 |
|
18 |
+
# Set up OpenAI client
|
19 |
ACCESS_TOKEN = os.getenv("HF_TOKEN")
|
20 |
|
21 |
client = OpenAI(
|
|
|
23 |
api_key=ACCESS_TOKEN,
|
24 |
)
|
25 |
|
26 |
+
# Function to handle chat responses
|
27 |
+
def respond(
|
28 |
+
message,
|
29 |
+
history: list[tuple[str, str]],
|
30 |
+
system_message,
|
31 |
+
max_tokens,
|
32 |
+
temperature,
|
33 |
+
top_p,
|
34 |
+
):
|
35 |
messages = [{"role": "system", "content": system_message}]
|
36 |
+
|
37 |
for val in history:
|
38 |
if val[0]:
|
39 |
messages.append({"role": "user", "content": val[0]})
|
|
|
53 |
messages=messages,
|
54 |
):
|
55 |
token = message.choices[0].delta.content
|
|
|
56 |
response += token
|
57 |
yield response
|
|
|
|
|
|
|
58 |
|
59 |
+
# Function to save chat history to a text file
|
60 |
+
def save_as_txt(history):
|
61 |
+
with open("chat_history.txt", "w") as f:
|
62 |
+
for user_message, assistant_message in history:
|
63 |
+
f.write(f"User: {user_message}\n")
|
64 |
+
f.write(f"Assistant: {assistant_message}\n")
|
65 |
+
return "chat_history.txt"
|
66 |
+
|
67 |
+
# Function to save chat history to a DOCX file
|
68 |
+
def save_as_docx(history):
|
69 |
+
doc = Document()
|
70 |
+
doc.add_heading('Chat History', 0)
|
71 |
|
72 |
+
for user_message, assistant_message in history:
|
73 |
+
doc.add_paragraph(f"User: {user_message}")
|
74 |
+
doc.add_paragraph(f"Assistant: {assistant_message}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
|
76 |
+
doc.save("chat_history.docx")
|
77 |
+
return "chat_history.docx"
|
78 |
+
|
79 |
+
# Function to save chat history to a PDF file
|
80 |
+
def save_as_pdf(history):
|
81 |
+
buffer = BytesIO()
|
82 |
+
c = canvas.Canvas(buffer, pagesize=letter)
|
83 |
+
width, height = letter
|
84 |
+
y = height - 40
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
|
86 |
+
c.drawString(30, y, "Chat History")
|
87 |
+
y -= 30
|
|
|
88 |
|
89 |
+
for user_message, assistant_message in history:
|
90 |
+
c.drawString(30, y, f"User: {user_message}")
|
91 |
+
y -= 20
|
92 |
+
c.drawString(30, y, f"Assistant: {assistant_message}")
|
93 |
+
y -= 30
|
94 |
+
|
95 |
+
if y < 40:
|
96 |
+
c.showPage()
|
97 |
+
y = height - 40
|
98 |
+
|
99 |
+
c.save()
|
100 |
+
buffer.seek(0)
|
101 |
|
102 |
+
with open("chat_history.pdf", "wb") as f:
|
103 |
+
f.write(buffer.read())
|
104 |
|
105 |
+
return "chat_history.pdf"
|
106 |
+
|
107 |
+
# Gradio interface
|
108 |
+
def handle_file_save(history, file_format):
|
109 |
+
if file_format == "txt":
|
110 |
+
return save_as_txt(history)
|
111 |
+
elif file_format == "docx":
|
112 |
+
return save_as_docx(history)
|
113 |
+
elif file_format == "pdf":
|
114 |
+
return save_as_pdf(history)
|
115 |
+
|
116 |
+
demo = gr.ChatInterface(
|
117 |
+
respond,
|
118 |
+
additional_inputs=[
|
119 |
+
gr.Textbox(value="", label="System message"),
|
120 |
+
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
121 |
+
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
122 |
+
gr.Slider(
|
123 |
+
minimum=0.1,
|
124 |
+
maximum=1.0,
|
125 |
+
value=0.95,
|
126 |
+
step=0.05,
|
127 |
+
label="Top-P",
|
128 |
+
),
|
129 |
+
gr.Dropdown(
|
130 |
+
choices=["txt", "docx", "pdf"],
|
131 |
+
label="Save as",
|
132 |
+
),
|
133 |
+
],
|
134 |
+
outputs=[
|
135 |
+
gr.File(label="Download Chat History"),
|
136 |
+
],
|
137 |
+
css=css,
|
138 |
+
theme="allenai/gradio-theme",
|
139 |
+
)
|
140 |
|
141 |
+
def save_handler(message, history, system_message, max_tokens, temperature, top_p, file_format):
|
142 |
+
response = respond(message, history, system_message, max_tokens, temperature, top_p)
|
143 |
+
saved_file = handle_file_save(history, file_format)
|
144 |
+
return saved_file
|
145 |
+
|
146 |
+
demo = gr.Interface(
|
147 |
+
fn=save_handler,
|
148 |
+
inputs=[
|
149 |
+
gr.Textbox(value="", label="Message"),
|
150 |
+
gr.State(),
|
151 |
+
gr.Textbox(value="", label="System message"),
|
152 |
+
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
153 |
+
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
154 |
+
gr.Slider(
|
155 |
+
minimum=0.1,
|
156 |
+
maximum=1.0,
|
157 |
+
value=0.95,
|
158 |
+
step=0.05,
|
159 |
+
label="Top-P",
|
160 |
+
),
|
161 |
+
gr.Dropdown(
|
162 |
+
choices=["txt", "docx", "pdf"],
|
163 |
+
label="Save as",
|
164 |
+
),
|
165 |
+
],
|
166 |
+
outputs=gr.File(label="Download Chat History"),
|
167 |
+
css=css,
|
168 |
+
theme="allenai/gradio-theme",
|
169 |
+
)
|
170 |
|
171 |
if __name__ == "__main__":
|
172 |
+
demo.launch()
|