shubham5027 commited on
Commit
f432114
·
verified ·
1 Parent(s): 233f340

Upload 23 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ img/assistant.gif filter=lfs diff=lfs merge=lfs -text
.streamlit/config.toml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [theme]
2
+ base = "dark"
exifa.py ADDED
@@ -0,0 +1,1176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ All code contributed to Exifa.net is © 2024 by Sahir Maharaj.
4
+ The content is licensed under the Creative Commons Attribution 4.0 International License.
5
+ This allows for sharing and adaptation, provided appropriate credit is given, and any changes made are indicated.
6
+
7
+ When using the code from Exifa.net, please credit as follows: "Code sourced from Exifa.net, authored by Sahir Maharaj, 2024."
8
+
9
+ For reporting bugs, requesting features, or further inquiries, please reach out to Sahir Maharaj at sahir@sahirmaharaj.com.
10
+
11
+ Connect with Sahir Maharaj on LinkedIn for updates and potential collaborations: https://www.linkedin.com/in/sahir-maharaj/
12
+
13
+ Hire Sahir Maharaj: https://topmate.io/sahirmaharaj/362667
14
+ """
15
+
16
+ import streamlit as st
17
+ import replicate
18
+ import os
19
+ import pdfplumber
20
+ from docx import Document
21
+ import pandas as pd
22
+ from io import BytesIO
23
+ from transformers import AutoTokenizer
24
+ import exifread
25
+ import requests
26
+ from PIL import Image
27
+ import numpy as np
28
+ import plotly.express as px
29
+ import matplotlib.colors as mcolors
30
+ import plotly.graph_objs as go
31
+ import streamlit.components.v1 as components
32
+ import random
33
+
34
+ config = {
35
+ "toImageButtonOptions": {
36
+ "format": "png",
37
+ "filename": "custom_image",
38
+ "height": 720,
39
+ "width": 480,
40
+ "scale": 6,
41
+ }
42
+ }
43
+
44
+ icons = {
45
+ "assistant": "https://raw.githubusercontent.com/sahirmaharaj/exifa/2f685de7dffb583f2b2a89cb8ee8bc27bf5b1a40/img/assistant-done.svg",
46
+ "user": "https://raw.githubusercontent.com/sahirmaharaj/exifa/2f685de7dffb583f2b2a89cb8ee8bc27bf5b1a40/img/user-done.svg",
47
+ }
48
+
49
+ particles_js = """<!DOCTYPE html>
50
+ <html lang="en">
51
+ <head>
52
+ <meta charset="UTF-8">
53
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
54
+ <title>Particles.js</title>
55
+ <style>
56
+ #particles-js {
57
+ position: fixed;
58
+ width: 100vw;
59
+ height: 100vh;
60
+ top: 0;
61
+ left: 0;
62
+ z-index: -1; /* Send the animation to the back */
63
+ }
64
+ .content {
65
+ position: relative;
66
+ z-index: 1;
67
+ color: white;
68
+ }
69
+
70
+ </style>
71
+ </head>
72
+ <body>
73
+ <div id="particles-js"></div>
74
+ <div class="content">
75
+ <!-- Placeholder for Streamlit content -->
76
+ </div>
77
+ <script src="https://cdn.jsdelivr.net/particles.js/2.0.0/particles.min.js"></script>
78
+ <script>
79
+ particlesJS("particles-js", {
80
+ "particles": {
81
+ "number": {
82
+ "value": 300,
83
+ "density": {
84
+ "enable": true,
85
+ "value_area": 800
86
+ }
87
+ },
88
+ "color": {
89
+ "value": "#ffffff"
90
+ },
91
+ "shape": {
92
+ "type": "circle",
93
+ "stroke": {
94
+ "width": 0,
95
+ "color": "#000000"
96
+ },
97
+ "polygon": {
98
+ "nb_sides": 5
99
+ },
100
+ "image": {
101
+ "src": "img/github.svg",
102
+ "width": 100,
103
+ "height": 100
104
+ }
105
+ },
106
+ "opacity": {
107
+ "value": 0.5,
108
+ "random": false,
109
+ "anim": {
110
+ "enable": false,
111
+ "speed": 1,
112
+ "opacity_min": 0.2,
113
+ "sync": false
114
+ }
115
+ },
116
+ "size": {
117
+ "value": 2,
118
+ "random": true,
119
+ "anim": {
120
+ "enable": false,
121
+ "speed": 40,
122
+ "size_min": 0.1,
123
+ "sync": false
124
+ }
125
+ },
126
+ "line_linked": {
127
+ "enable": true,
128
+ "distance": 100,
129
+ "color": "#ffffff",
130
+ "opacity": 0.22,
131
+ "width": 1
132
+ },
133
+ "move": {
134
+ "enable": true,
135
+ "speed": 0.2,
136
+ "direction": "none",
137
+ "random": false,
138
+ "straight": false,
139
+ "out_mode": "out",
140
+ "bounce": true,
141
+ "attract": {
142
+ "enable": false,
143
+ "rotateX": 600,
144
+ "rotateY": 1200
145
+ }
146
+ }
147
+ },
148
+ "interactivity": {
149
+ "detect_on": "canvas",
150
+ "events": {
151
+ "onhover": {
152
+ "enable": true,
153
+ "mode": "grab"
154
+ },
155
+ "onclick": {
156
+ "enable": true,
157
+ "mode": "repulse"
158
+ },
159
+ "resize": true
160
+ },
161
+ "modes": {
162
+ "grab": {
163
+ "distance": 100,
164
+ "line_linked": {
165
+ "opacity": 1
166
+ }
167
+ },
168
+ "bubble": {
169
+ "distance": 400,
170
+ "size": 2,
171
+ "duration": 2,
172
+ "opacity": 0.5,
173
+ "speed": 1
174
+ },
175
+ "repulse": {
176
+ "distance": 200,
177
+ "duration": 0.4
178
+ },
179
+ "push": {
180
+ "particles_nb": 2
181
+ },
182
+ "remove": {
183
+ "particles_nb": 3
184
+ }
185
+ }
186
+ },
187
+ "retina_detect": true
188
+ });
189
+ </script>
190
+ </body>
191
+ </html>
192
+ """
193
+
194
+ st.set_page_config(page_title="Exifa.net", page_icon="✨", layout="wide")
195
+
196
+ welcome_messages = [
197
+ "Hello! I'm Exifa, an AI assistant designed to make image metadata meaningful. Ask me anything!",
198
+ "Hi! I'm Exifa, an AI-powered assistant for extracting and explaining EXIF data. How can I help you today?",
199
+ "Hey! I'm Exifa, your AI-powered guide to understanding the metadata in your images. What would you like to explore?",
200
+ "Hi there! I'm Exifa, an AI-powered tool built to help you make sense of your image metadata. How can I help you today?",
201
+ "Hello! I'm Exifa, an AI-driven tool designed to help you understand your images' metadata. What can I do for you?",
202
+ "Hi! I'm Exifa, an AI-driven assistant designed to make EXIF data easy to understand. How can I help you today?",
203
+ "Welcome! I'm Exifa, an intelligent AI-powered tool for extracting and explaining EXIF data. How can I assist you today?",
204
+ "Hello! I'm Exifa, your AI-powered guide for understanding image metadata. Ask me anything!",
205
+ "Hi! I'm Exifa, an intelligent AI assistant ready to help you understand your images' metadata. What would you like to explore?",
206
+ "Hey! I'm Exifa, an AI assistant for extracting and explaining EXIF data. How can I help you today?",
207
+ ]
208
+
209
+ message = random.choice(welcome_messages)
210
+
211
+ if "messages" not in st.session_state:
212
+ st.session_state["messages"] = [{"role": "assistant", "content": message}]
213
+ if "exif_df" not in st.session_state:
214
+ st.session_state["exif_df"] = pd.DataFrame()
215
+ if "url_exif_df" not in st.session_state:
216
+ st.session_state["url_exif_df"] = pd.DataFrame()
217
+ if "show_expanders" not in st.session_state:
218
+ st.session_state.show_expanders = True
219
+ if "reset_trigger" not in st.session_state:
220
+ st.session_state.reset_trigger = False
221
+ if "uploaded_files" not in st.session_state:
222
+ st.session_state["uploaded_files"] = None
223
+ if "image_url" not in st.session_state:
224
+ st.session_state["image_url"] = ""
225
+ if "follow_up" not in st.session_state:
226
+ st.session_state.follow_up = False
227
+ if "show_animation" not in st.session_state:
228
+ st.session_state.show_animation = True
229
+
230
+
231
+ def clear_url():
232
+ st.session_state["image_url"] = ""
233
+
234
+
235
+ def clear_files():
236
+ st.session_state["uploaded_files"] = None
237
+ st.session_state["file_uploader_key"] = not st.session_state.get(
238
+ "file_uploader_key", False
239
+ )
240
+
241
+
242
+ def download_image(data):
243
+ st.download_button(
244
+ label="⇩ Download Image",
245
+ data=data,
246
+ file_name="image_no_exif.jpg",
247
+ mime="image/jpeg",
248
+ )
249
+
250
+
251
+ def clear_chat_history():
252
+
253
+ st.session_state.reset_trigger = not st.session_state.reset_trigger
254
+ st.session_state.show_expanders = True
255
+
256
+ st.session_state.show_animation = True
257
+
258
+ st.session_state.messages = [{"role": "assistant", "content": message}]
259
+
260
+ st.session_state["exif_df"] = pd.DataFrame()
261
+ st.session_state["url_exif_df"] = pd.DataFrame()
262
+ uploaded_files = ""
263
+
264
+ if "uploaded_files" in st.session_state:
265
+ del st.session_state["uploaded_files"]
266
+ if "image_url" in st.session_state:
267
+ st.session_state["image_url"] = ""
268
+ st.cache_data.clear()
269
+
270
+ st.success("Chat History Cleared!")
271
+
272
+
273
+ def clear_exif_data(image_input):
274
+ if isinstance(image_input, BytesIO):
275
+ image_input.seek(0)
276
+ image = Image.open(image_input)
277
+ elif isinstance(image_input, Image.Image):
278
+ image = image_input
279
+ else:
280
+ raise ValueError("Unsupported image input type")
281
+ data = list(image.getdata())
282
+ image_without_exif = Image.new(image.mode, image.size)
283
+ image_without_exif.putdata(data)
284
+
285
+ buffered = BytesIO()
286
+ image_without_exif.save(buffered, format="JPEG", quality=100, optimize=True)
287
+ buffered.seek(0)
288
+ return buffered.getvalue()
289
+
290
+
291
+ with st.sidebar:
292
+
293
+ image_url = (
294
+ "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/Exifa.gif"
295
+ )
296
+
297
+ st.markdown(
298
+ f"""
299
+ <div style='display: flex; align-items: center;'>
300
+ <img src='{image_url}' style='width: 50px; height: 50px; margin-right: 30px;'>
301
+ <h1 style='margin: 0;'>Exifa.net</h1>
302
+ </div>
303
+ """,
304
+ unsafe_allow_html=True,
305
+ )
306
+
307
+ expander = st.expander("🗀 File Input")
308
+ with expander:
309
+
310
+ image_url = st.text_input(
311
+ "Enter image URL for EXIF analysis:",
312
+ key="image_url",
313
+ on_change=clear_files,
314
+ value=st.session_state.image_url,
315
+ )
316
+
317
+ file_uploader_key = "file_uploader_{}".format(
318
+ st.session_state.get("file_uploader_key", False)
319
+ )
320
+
321
+ uploaded_files = st.file_uploader(
322
+ "Upload local files:",
323
+ type=["txt", "pdf", "docx", "csv", "jpg", "png", "jpeg"],
324
+ key=file_uploader_key,
325
+ on_change=clear_url,
326
+ accept_multiple_files=True,
327
+ )
328
+
329
+ if uploaded_files is not None:
330
+ st.session_state["uploaded_files"] = uploaded_files
331
+ expander = st.expander("⚒ Model Configuration")
332
+ with expander:
333
+
334
+ if "REPLICATE_API_TOKEN" in st.secrets:
335
+ replicate_api = st.secrets["REPLICATE_API_TOKEN"]
336
+ else:
337
+ replicate_api = st.text_input("Enter Replicate API token:", type="password")
338
+ if not (replicate_api.startswith("r8_") and len(replicate_api) == 40):
339
+ st.warning("Please enter your Replicate API token.", icon="⚠️")
340
+ st.markdown(
341
+ "**Don't have an API token?** Head over to [Replicate](https://replicate.com/account/api-tokens) to sign up for one."
342
+ )
343
+ os.environ["REPLICATE_API_TOKEN"] = replicate_api
344
+ st.subheader("Adjust model parameters")
345
+ temperature = st.slider(
346
+ "Temperature", min_value=0.01, max_value=5.0, value=0.3, step=0.01
347
+ )
348
+ top_p = st.slider("Top P", min_value=0.01, max_value=1.0, value=0.2, step=0.01)
349
+ max_new_tokens = st.number_input(
350
+ "Max New Tokens", min_value=1, max_value=1024, value=512
351
+ )
352
+ min_new_tokens = st.number_input(
353
+ "Min New Tokens", min_value=0, max_value=512, value=0
354
+ )
355
+ presence_penalty = st.slider(
356
+ "Presence Penalty", min_value=0.0, max_value=2.0, value=1.15, step=0.05
357
+ )
358
+ frequency_penalty = st.slider(
359
+ "Frequency Penalty", min_value=0.0, max_value=2.0, value=0.2, step=0.05
360
+ )
361
+ stop_sequences = st.text_area("Stop Sequences", value="<|im_end|>", height=100)
362
+ if uploaded_files and not st.session_state["exif_df"].empty:
363
+ with st.expander("🗏 EXIF Details"):
364
+ st.dataframe(st.session_state["exif_df"])
365
+ if image_url and not st.session_state["url_exif_df"].empty:
366
+ with st.expander("🗏 EXIF Details"):
367
+ st.dataframe(st.session_state["url_exif_df"])
368
+ base_prompt = """
369
+
370
+ You are an expert EXIF Analyser. The user will provide an image file and you will explain the file EXIF in verbose detail.
371
+
372
+ Pay careful attention to the data of the EXIF image and create a profile for the user who took this image.
373
+
374
+ 1. Make inferences on things like location, budget, experience, etc. (2 paragraphs)
375
+ 2. Make as many inferences as possible about the exif data in the next 3 paragraphs.
376
+
377
+ 3. Please follow this format, style, pacing and structure.
378
+ 4. In addition to the content above, provide 1 more paragraph about the users financial standing based on the equipment they are using and estimate their experience.
379
+
380
+ DO NOT skip any steps.
381
+
382
+ FORMAT THE RESULT IN MULTIPLE PARAGRAPHS
383
+
384
+ Do not keep talking and rambling on - Get to the point.
385
+
386
+ """
387
+
388
+ if uploaded_files:
389
+ for uploaded_file in uploaded_files:
390
+ if uploaded_file.type == "application/pdf":
391
+ with pdfplumber.open(uploaded_file) as pdf:
392
+ pages = [page.extract_text() for page in pdf.pages]
393
+ file_text = "\n".join(pages) if pages else ""
394
+ elif uploaded_file.type == "text/plain":
395
+ file_text = str(uploaded_file.read(), "utf-8")
396
+ elif (
397
+ uploaded_file.type
398
+ == "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
399
+ ):
400
+ doc = Document(uploaded_file)
401
+ file_text = "\n".join([para.text for para in doc.paragraphs])
402
+ elif uploaded_file.type == "text/csv":
403
+ df = pd.read_csv(uploaded_file)
404
+ file_text = df.to_string(index=False)
405
+ elif uploaded_file.type in ["image/jpeg", "image/png", "image/jpg"]:
406
+ import tempfile
407
+
408
+ with tempfile.NamedTemporaryFile(delete=False) as temp:
409
+ temp.write(uploaded_file.read())
410
+ temp.flush()
411
+ temp.close()
412
+ with open(temp.name, "rb") as file:
413
+ tags = exifread.process_file(file)
414
+ exif_data = {}
415
+ for tag in tags.keys():
416
+ if tag not in [
417
+ "JPEGThumbnail",
418
+ "TIFFThumbnail",
419
+ "Filename",
420
+ "EXIF MakerNote",
421
+ ]:
422
+ exif_data[tag] = str(tags[tag])
423
+ df = pd.DataFrame(exif_data, index=[0])
424
+ df.insert(loc=0, column="Image Feature", value=["Value"] * len(df))
425
+ df = df.transpose()
426
+ df.columns = df.iloc[0]
427
+ df = df.iloc[1:]
428
+
429
+ st.session_state["exif_df"] = df
430
+
431
+ file_text = "\n".join(
432
+ [
433
+ f"{tag}: {tags[tag]}"
434
+ for tag in tags.keys()
435
+ if tag
436
+ not in (
437
+ "JPEGThumbnail",
438
+ "TIFFThumbnail",
439
+ "Filename",
440
+ "EXIF MakerNote",
441
+ )
442
+ ]
443
+ )
444
+ os.unlink(temp.name)
445
+ base_prompt += "\n" + file_text
446
+ if image_url:
447
+ try:
448
+ response = requests.head(image_url)
449
+ if response.headers["Content-Type"] in [
450
+ "image/jpeg",
451
+ "image/png",
452
+ "image/jpg",
453
+ ]:
454
+ response = requests.get(image_url)
455
+ response.raise_for_status()
456
+ image_data = BytesIO(response.content)
457
+ image = Image.open(image_data)
458
+ image.load()
459
+
460
+ tags = exifread.process_file(image_data)
461
+
462
+ exif_data = {}
463
+ for tag in tags.keys():
464
+ if tag not in [
465
+ "JPEGThumbnail",
466
+ "TIFFThumbnail",
467
+ "Filename",
468
+ "EXIF MakerNote",
469
+ ]:
470
+ exif_data[tag] = str(tags[tag])
471
+ df = pd.DataFrame(exif_data, index=[0])
472
+ df.insert(loc=0, column="Image Feature", value=["Value"] * len(df))
473
+ df = df.transpose()
474
+ df.columns = df.iloc[0]
475
+ df = df.iloc[1:]
476
+
477
+ st.session_state["url_exif_df"] = df
478
+
479
+ file_text = "\n".join(
480
+ [
481
+ f"{tag}: {tags[tag]}"
482
+ for tag in tags.keys()
483
+ if tag
484
+ not in (
485
+ "JPEGThumbnail",
486
+ "TIFFThumbnail",
487
+ "Filename",
488
+ "EXIF MakerNote",
489
+ )
490
+ ]
491
+ )
492
+ base_prompt += "\n" + file_text
493
+ else:
494
+
495
+ pass
496
+ except requests.RequestException:
497
+
498
+ pass
499
+
500
+ def load_image(file):
501
+ if isinstance(file, str):
502
+ response = requests.get(file)
503
+ response.raise_for_status()
504
+ return Image.open(BytesIO(response.content))
505
+ elif isinstance(file, bytes):
506
+ return Image.open(BytesIO(file))
507
+ else:
508
+ return Image.open(file)
509
+
510
+ uploaded_file = image
511
+
512
+ with st.expander("⛆ RGB Channel"):
513
+
514
+ def get_channel_image(image, channels):
515
+
516
+ data = np.array(image)
517
+
518
+ channel_data = np.zeros_like(data)
519
+
520
+ for channel in channels:
521
+ channel_data[:, :, channel] = data[:, :, channel]
522
+ return Image.fromarray(channel_data)
523
+
524
+ channels = st.multiselect(
525
+ "Select channels:",
526
+ ["Red", "Green", "Blue"],
527
+ default=["Red", "Green", "Blue"],
528
+ )
529
+
530
+ if channels:
531
+ channel_indices = [
532
+ 0 if channel == "Red" else 1 if channel == "Green" else 2
533
+ for channel in channels
534
+ ]
535
+ combined_image = get_channel_image(image, channel_indices)
536
+ st.image(combined_image, use_column_width=True)
537
+ else:
538
+ st.image(image, use_column_width=True)
539
+ with st.expander("〽 HSV Distribution"):
540
+
541
+ def get_hsv_histogram(image):
542
+
543
+ hsv_image = image.convert("HSV")
544
+ data = np.array(hsv_image)
545
+
546
+ hue_hist, _ = np.histogram(data[:, :, 0], bins=256, range=(0, 256))
547
+ saturation_hist, _ = np.histogram(
548
+ data[:, :, 1], bins=256, range=(0, 256)
549
+ )
550
+ value_hist, _ = np.histogram(data[:, :, 2], bins=256, range=(0, 256))
551
+
552
+ histogram_df = pd.DataFrame(
553
+ {
554
+ "Hue": hue_hist,
555
+ "Saturation": saturation_hist,
556
+ "Value": value_hist,
557
+ }
558
+ )
559
+
560
+ return histogram_df
561
+
562
+ hsv_histogram_df = get_hsv_histogram(image)
563
+
564
+ st.line_chart(hsv_histogram_df)
565
+ with st.expander("☄ Color Distribution"):
566
+ if image_url:
567
+ image = load_image(image_url)
568
+ if image:
569
+
570
+ def color_distribution_sunburst(data):
571
+ data = np.array(data)
572
+ red, green, blue = data[:, :, 0], data[:, :, 1], data[:, :, 2]
573
+ color_intensity = {"color": [], "intensity": [], "count": []}
574
+ for name, channel in zip(
575
+ ["Red", "Green", "Blue"], [red, green, blue]
576
+ ):
577
+ unique, counts = np.unique(channel, return_counts=True)
578
+ color_intensity["color"].extend([name] * len(unique))
579
+ color_intensity["intensity"].extend(unique)
580
+ color_intensity["count"].extend(counts)
581
+ df = pd.DataFrame(color_intensity)
582
+ fig = px.sunburst(
583
+ df,
584
+ path=["color", "intensity"],
585
+ values="count",
586
+ color="color",
587
+ color_discrete_map={
588
+ "Red": "#ff6666",
589
+ "Green": "#85e085",
590
+ "Blue": "#6666ff",
591
+ },
592
+ )
593
+ return fig
594
+
595
+ fig = color_distribution_sunburst(image)
596
+ st.plotly_chart(fig, use_container_width=True)
597
+ with st.expander("🕸 3D Color Space"):
598
+
599
+ def plot_3d_color_space(data, skip_factor):
600
+ sample = data[::skip_factor, ::skip_factor].reshape(-1, 3)
601
+
602
+ normalized_colors = sample / 255.0
603
+
604
+ trace = go.Scatter3d(
605
+ x=sample[:, 0],
606
+ y=sample[:, 1],
607
+ z=sample[:, 2],
608
+ mode="markers",
609
+ marker=dict(
610
+ size=5,
611
+ color=["rgb({},{},{})".format(r, g, b) for r, g, b in sample],
612
+ opacity=0.8,
613
+ ),
614
+ )
615
+ layout = go.Layout(
616
+ scene=dict(
617
+ xaxis=dict(title="Red"),
618
+ yaxis=dict(title="Green"),
619
+ zaxis=dict(title="Blue"),
620
+ camera=dict(eye=dict(x=1.25, y=1.25, z=1.25)),
621
+ ),
622
+ margin=dict(l=0, r=0, b=0, t=30),
623
+ )
624
+ fig = go.Figure(data=[trace], layout=layout)
625
+ return fig
626
+
627
+ skip_factor = 8
628
+
629
+ if isinstance(uploaded_file, Image.Image):
630
+ data = np.array(uploaded_file)
631
+ else:
632
+ data = np.array(Image.open(uploaded_file))
633
+ fig = plot_3d_color_space(data, skip_factor)
634
+ st.plotly_chart(fig, use_container_width=True, config=config)
635
+ with st.expander("‏𖦹 Pixel Density Polar"):
636
+
637
+ def pixel_density_polar_plot(image):
638
+ image_data = np.array(image)
639
+ hsv_data = mcolors.rgb_to_hsv(image_data / 255.0)
640
+ hue = hsv_data[:, :, 0].flatten()
641
+
642
+ hist, bins = np.histogram(hue, bins=360, range=(0, 1))
643
+ theta = np.linspace(0, 360, len(hist), endpoint=False)
644
+
645
+ fig = px.bar_polar(
646
+ r=hist,
647
+ theta=theta,
648
+ template="seaborn",
649
+ color_discrete_sequence=["red"],
650
+ )
651
+ fig.update_traces(marker=dict(line=dict(color="red", width=1)))
652
+ fig.update_layout()
653
+
654
+ return fig
655
+
656
+ if uploaded_file is not None:
657
+ if isinstance(uploaded_file, Image.Image):
658
+ image = uploaded_file
659
+ else:
660
+ image = Image.open(uploaded_file)
661
+ fig = pixel_density_polar_plot(image)
662
+ st.plotly_chart(fig, use_container_width=True, config=config)
663
+ with st.expander("ᨒ 3D Surface (Color Intensities)"):
664
+
665
+ def surface_plot_image_intensity(data):
666
+ intensity = np.mean(data, axis=2)
667
+ sample_size = int(intensity.shape[0] * 0.35)
668
+ intensity_sample = intensity[:sample_size, :sample_size]
669
+ fig = go.Figure(
670
+ data=[go.Surface(z=intensity_sample, colorscale="Viridis")]
671
+ )
672
+ fig.update_layout(autosize=True)
673
+ return fig
674
+
675
+ if isinstance(uploaded_file, Image.Image):
676
+ data = np.array(uploaded_file)
677
+ else:
678
+ data = np.array(Image.open(uploaded_file))
679
+ fig = surface_plot_image_intensity(data)
680
+ st.plotly_chart(fig, use_container_width=True, config=config)
681
+ with st.expander("🖌 Color Palette"):
682
+
683
+ def extract_color_palette(image, num_colors=6):
684
+ image = image.resize((100, 100))
685
+ result = image.quantize(colors=num_colors)
686
+ palette = result.getpalette()
687
+ color_counts = result.getcolors()
688
+
689
+ colors = [palette[i * 3 : (i + 1) * 3] for i in range(num_colors)]
690
+ counts = [
691
+ count
692
+ for count, _ in sorted(
693
+ color_counts, reverse=True, key=lambda x: x[0]
694
+ )
695
+ ]
696
+ return colors, counts
697
+
698
+ def plot_color_palette(colors, counts):
699
+ fig = go.Figure()
700
+ for i, (color, count) in enumerate(zip(colors, counts)):
701
+ hex_color = "#%02x%02x%02x" % tuple(color)
702
+ fig.add_trace(
703
+ go.Bar(
704
+ x=[1],
705
+ y=[hex_color],
706
+ orientation="h",
707
+ marker=dict(color=hex_color),
708
+ hoverinfo="text",
709
+ hovertext=f"<b>HEX:</b> {hex_color}<br><b>Count:</b> {count}",
710
+ name="",
711
+ )
712
+ )
713
+ fig.update_layout(
714
+ xaxis=dict(showticklabels=False),
715
+ yaxis=dict(showticklabels=True),
716
+ showlegend=False,
717
+ template="plotly_dark",
718
+ height=400,
719
+ )
720
+ return fig
721
+
722
+ num_colors = st.slider("Number of Colors", 2, 10, 6)
723
+
724
+ if isinstance(uploaded_file, Image.Image):
725
+ image = uploaded_file.convert("RGB")
726
+ else:
727
+ image = Image.open(uploaded_file).convert("RGB")
728
+ colors, counts = extract_color_palette(image, num_colors)
729
+ fig = plot_color_palette(colors, counts)
730
+ st.plotly_chart(fig, use_container_width=True, config=config)
731
+ if uploaded_file is not None:
732
+ col1, col2 = st.columns(2)
733
+ clean_img = clear_exif_data(image)
734
+ with col1:
735
+ st.button("🗑 Clear Chat History", on_click=clear_chat_history)
736
+ with col2:
737
+ download_image(clean_img)
738
+ st.session_state.reset_trigger = True
739
+ if st.session_state.show_expanders:
740
+
741
+ if uploaded_files and not st.session_state["exif_df"].empty:
742
+
743
+ with st.expander("⛆ RGB Channel"):
744
+
745
+ for uploaded_file in uploaded_files:
746
+ if uploaded_file.type in ["image/jpeg", "image/png", "image/jpg"]:
747
+
748
+ def load_image(image_file):
749
+ return Image.open(image_file)
750
+
751
+ image = load_image(uploaded_file)
752
+
753
+ def get_channel_image(image, channels):
754
+ data = np.array(image)
755
+
756
+ channel_data = np.zeros_like(data)
757
+
758
+ for channel in channels:
759
+ channel_data[:, :, channel] = data[:, :, channel]
760
+ return Image.fromarray(channel_data)
761
+
762
+ channels = st.multiselect(
763
+ "Select channels:",
764
+ ["Red", "Green", "Blue"],
765
+ default=["Red", "Green", "Blue"],
766
+ )
767
+
768
+ if channels:
769
+ channel_indices = [
770
+ 0 if channel == "Red" else 1 if channel == "Green" else 2
771
+ for channel in channels
772
+ ]
773
+ combined_image = get_channel_image(image, channel_indices)
774
+ st.image(combined_image, use_column_width=True)
775
+ else:
776
+ st.image(image, use_column_width=True)
777
+ with st.expander("〽 HSV Distribution"):
778
+
779
+ def get_hsv_histogram(image):
780
+ hsv_image = image.convert("HSV")
781
+ data = np.array(hsv_image)
782
+
783
+ hue_hist, _ = np.histogram(data[:, :, 0], bins=256, range=(0, 256))
784
+ saturation_hist, _ = np.histogram(
785
+ data[:, :, 1], bins=256, range=(0, 256)
786
+ )
787
+ value_hist, _ = np.histogram(
788
+ data[:, :, 2], bins=256, range=(0, 256)
789
+ )
790
+
791
+ histogram_df = pd.DataFrame(
792
+ {
793
+ "Hue": hue_hist,
794
+ "Saturation": saturation_hist,
795
+ "Value": value_hist,
796
+ }
797
+ )
798
+
799
+ return histogram_df
800
+
801
+ hsv_histogram_df = get_hsv_histogram(image)
802
+
803
+ st.line_chart(hsv_histogram_df)
804
+ with st.expander("☄ Color Distribution"):
805
+
806
+ def color_distribution_sunburst(data):
807
+ data = np.array(data)
808
+
809
+ red, green, blue = data[:, :, 0], data[:, :, 1], data[:, :, 2]
810
+ color_intensity = {"color": [], "intensity": [], "count": []}
811
+ for name, channel in zip(
812
+ ["Red", "Green", "Blue"], [red, green, blue]
813
+ ):
814
+ unique, counts = np.unique(channel, return_counts=True)
815
+ color_intensity["color"].extend([name] * len(unique))
816
+ color_intensity["intensity"].extend(unique)
817
+ color_intensity["count"].extend(counts)
818
+ df = pd.DataFrame(color_intensity)
819
+ fig = px.sunburst(
820
+ df,
821
+ path=["color", "intensity"],
822
+ values="count",
823
+ color="color",
824
+ color_discrete_map={
825
+ "Red": "#ff6666",
826
+ "Green": "#85e085",
827
+ "Blue": "#6666ff",
828
+ },
829
+ )
830
+ return fig
831
+
832
+ image = load_image(uploaded_file)
833
+ fig = color_distribution_sunburst(image)
834
+ st.plotly_chart(fig, use_container_width=True, config=config)
835
+ with st.expander("🕸 3D Color Space"):
836
+
837
+ def plot_3d_color_space(data, skip_factor):
838
+ sample = data[::skip_factor, ::skip_factor].reshape(-1, 3)
839
+
840
+ normalized_colors = sample / 255.0
841
+
842
+ trace = go.Scatter3d(
843
+ x=sample[:, 0],
844
+ y=sample[:, 1],
845
+ z=sample[:, 2],
846
+ mode="markers",
847
+ marker=dict(
848
+ size=5,
849
+ color=[
850
+ "rgb({},{},{})".format(r, g, b) for r, g, b in sample
851
+ ],
852
+ opacity=0.8,
853
+ ),
854
+ )
855
+ layout = go.Layout(
856
+ scene=dict(
857
+ xaxis=dict(title="Red"),
858
+ yaxis=dict(title="Green"),
859
+ zaxis=dict(title="Blue"),
860
+ camera=dict(eye=dict(x=1.25, y=1.25, z=1.25)),
861
+ ),
862
+ margin=dict(l=0, r=0, b=0, t=30),
863
+ )
864
+ fig = go.Figure(data=[trace], layout=layout)
865
+ return fig
866
+
867
+ skip_factor = 8
868
+
869
+ data = np.array(Image.open(uploaded_file))
870
+ fig = plot_3d_color_space(data, skip_factor)
871
+ st.plotly_chart(fig, use_container_width=True, config=config)
872
+ with st.expander("𖦹 Pixel Density Polar"):
873
+
874
+ def pixel_density_polar_plot(data):
875
+ image_data = np.array(Image.open(data))
876
+ hsv_data = mcolors.rgb_to_hsv(image_data / 255.0)
877
+ hue = hsv_data[:, :, 0].flatten()
878
+
879
+ hist, bins = np.histogram(hue, bins=360, range=(0, 1))
880
+ theta = np.linspace(0, 360, len(hist), endpoint=False)
881
+
882
+ fig = px.bar_polar(
883
+ r=hist,
884
+ theta=theta,
885
+ template="seaborn",
886
+ color_discrete_sequence=["red"],
887
+ )
888
+ fig.update_traces(marker=dict(line=dict(color="red", width=1)))
889
+ fig.update_layout()
890
+
891
+ return fig
892
+
893
+ if uploaded_file is not None:
894
+ fig = pixel_density_polar_plot(uploaded_file)
895
+ st.plotly_chart(fig, use_container_width=True, config=config)
896
+ with st.expander("ᨒ 3D Surface (Color Intensities)"):
897
+
898
+ def surface_plot_image_intensity(data):
899
+ intensity = np.mean(data, axis=2)
900
+ sample_size = int(intensity.shape[0] * 0.35)
901
+ intensity_sample = intensity[:sample_size, :sample_size]
902
+ fig = go.Figure(
903
+ data=[go.Surface(z=intensity_sample, colorscale="Viridis")]
904
+ )
905
+ fig.update_layout(autosize=True)
906
+ return fig
907
+
908
+ data = np.array(Image.open(uploaded_file))
909
+ fig = surface_plot_image_intensity(data)
910
+
911
+ st.plotly_chart(fig, use_container_width=True, config=config)
912
+ with st.expander("🖌 Color Palette"):
913
+
914
+ def extract_color_palette(image, num_colors=6):
915
+ image = image.resize((100, 100))
916
+ result = image.quantize(colors=num_colors)
917
+ palette = result.getpalette()
918
+ color_counts = result.getcolors()
919
+
920
+ colors = [palette[i * 3 : (i + 1) * 3] for i in range(num_colors)]
921
+ counts = [
922
+ count
923
+ for count, _ in sorted(
924
+ color_counts, reverse=True, key=lambda x: x[0]
925
+ )
926
+ ]
927
+
928
+ return colors, counts
929
+
930
+ def plot_color_palette(colors, counts):
931
+ fig = go.Figure()
932
+ for i, (color, count) in enumerate(zip(colors, counts)):
933
+ hex_color = "#%02x%02x%02x" % tuple(color)
934
+ fig.add_trace(
935
+ go.Bar(
936
+ x=[1],
937
+ y=[hex_color],
938
+ orientation="h",
939
+ marker=dict(color=hex_color),
940
+ hoverinfo="text",
941
+ hovertext=f"<b>HEX:</b> {hex_color}<br><b>Count:</b> {count}",
942
+ name="",
943
+ )
944
+ )
945
+ fig.update_layout(
946
+ xaxis=dict(showticklabels=False),
947
+ yaxis=dict(showticklabels=True),
948
+ showlegend=False,
949
+ template="plotly_dark",
950
+ height=400,
951
+ )
952
+ return fig
953
+
954
+ num_colors = st.slider("Number of Colors", 2, 10, 6)
955
+ image = Image.open(uploaded_file).convert("RGB")
956
+ colors, counts = extract_color_palette(image, num_colors)
957
+ fig = plot_color_palette(colors, counts)
958
+ st.plotly_chart(fig, use_container_width=True, config=config)
959
+ st.session_state.reset_trigger = True
960
+
961
+ col1, col2 = st.columns(2)
962
+ with col1:
963
+ st.button("🗑 Clear Chat History", on_click=clear_chat_history)
964
+ with col2:
965
+ clear = clear_exif_data(image)
966
+ download_image(clear)
967
+
968
+
969
+ @st.experimental_dialog("How to use Exifa.net", width=1920)
970
+ def show_video(item):
971
+ video_url = "https://www.youtube.com/watch?v=CS7rkWu7LNY"
972
+ st.video(video_url, loop=False, autoplay=True, muted=False)
973
+
974
+
975
+ for message in st.session_state.messages:
976
+ with st.chat_message(message["role"], avatar=icons[message["role"]]):
977
+ st.write(message["content"])
978
+ if message == st.session_state["messages"][0]:
979
+ if st.button("How can I use Exifa?"):
980
+ show_video("")
981
+ st.sidebar.caption(
982
+ "Built by [Sahir Maharaj](https://www.linkedin.com/in/sahir-maharaj/). Like this? [Hire me!](https://topmate.io/sahirmaharaj/362667)"
983
+ )
984
+
985
+ linkedin = "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/linkedin.gif"
986
+ topmate = "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/topmate.gif"
987
+ email = "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/email.gif"
988
+ newsletter = (
989
+ "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/newsletter.gif"
990
+ )
991
+ share = "https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/share.gif"
992
+
993
+ uptime = "https://uptime.betterstack.com/status-badges/v1/monitor/196o6.svg"
994
+
995
+ st.sidebar.caption(
996
+ f"""
997
+ <div style='display: flex; align-items: center;'>
998
+ <a href = 'https://www.linkedin.com/in/sahir-maharaj/'><img src='{linkedin}' style='width: 35px; height: 35px; margin-right: 25px;'></a>
999
+ <a href = 'https://topmate.io/sahirmaharaj/362667'><img src='{topmate}' style='width: 32px; height: 32px; margin-right: 25px;'></a>
1000
+ <a href = 'mailto:sahir@sahirmaharaj.com'><img src='{email}' style='width: 28px; height: 28px; margin-right: 25px;'></a>
1001
+ <a href = 'https://www.linkedin.com/build-relation/newsletter-follow?entityUrn=7163516439096733696'><img src='{newsletter}' style='width: 28px; height: 28px; margin-right: 25px;'></a>
1002
+ <a href = 'https://www.kaggle.com/sahirmaharajj'><img src='{share}' style='width: 28px; height: 28px; margin-right: 25px;'></a>
1003
+
1004
+ </div>
1005
+ <br>
1006
+ <a href = 'https://exifa.betteruptime.com/'><img src='{uptime}'></a>
1007
+ &nbsp; <a href="https://www.producthunt.com/posts/exifa-net?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-exifa&#0045;net" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=474560&theme=dark" alt="Exifa&#0046;net - Your&#0032;AI&#0032;assistant&#0032;for&#0032;understanding&#0032;EXIF&#0032;data | Product Hunt" style="width: 125px; height: 27px;" width="125" height="27" /></a>
1008
+
1009
+ """,
1010
+ unsafe_allow_html=True,
1011
+ )
1012
+
1013
+
1014
+ @st.cache_resource(show_spinner=False)
1015
+ def get_tokenizer():
1016
+ return AutoTokenizer.from_pretrained("huggyllama/llama-7b")
1017
+
1018
+
1019
+ def get_num_tokens(prompt):
1020
+ tokenizer = get_tokenizer()
1021
+ tokens = tokenizer.tokenize(prompt)
1022
+ return len(tokens)
1023
+
1024
+
1025
+ def generate_arctic_response_follow_up():
1026
+
1027
+ follow_up_response = ""
1028
+
1029
+ last_three_messages = st.session_state.messages[-3:]
1030
+ for message in last_three_messages:
1031
+ follow_up_response += "\n\n {}".format(message)
1032
+ prompt = [
1033
+ "Please generate one question based on the conversation thus far that the user might ask next. Ensure the question is short, less than 8 words, stays on the topic of EXIF and its importance and dangers, and is formatted with underscores instead of spaces, e.g., What_does_EXIF_mean? Conversation Info = {}. Please generate one question based on the conversation thus far that the user might ask next. Ensure the question is short, less than 8 words, stays on the topic of EXIF and its importance and dangers, and is formatted with underscores instead of spaces".format(
1034
+ follow_up_response
1035
+ )
1036
+ ]
1037
+ prompt.append("assistant\n")
1038
+ prompt_str = "\n".join(prompt)
1039
+
1040
+ full_response = []
1041
+ for event in replicate.stream(
1042
+ "snowflake/snowflake-arctic-instruct",
1043
+ input={
1044
+ "prompt": prompt_str,
1045
+ "prompt_template": r"{prompt}",
1046
+ "temperature": temperature,
1047
+ "top_p": top_p,
1048
+ "max_new_tokens": max_new_tokens,
1049
+ "min_new_tokens": min_new_tokens,
1050
+ "presence_penalty": presence_penalty,
1051
+ "frequency_penalty": frequency_penalty,
1052
+ "stop_sequences": stop_sequences,
1053
+ },
1054
+ ):
1055
+ full_response.append(str(event).strip())
1056
+ complete_response = "".join(full_response)
1057
+
1058
+ return complete_response
1059
+
1060
+
1061
+ def generate_arctic_response():
1062
+
1063
+ prompt = [base_prompt] if base_prompt else []
1064
+ for dict_message in st.session_state.messages:
1065
+ if dict_message["role"] == "user":
1066
+ prompt.append("user\n" + dict_message["content"])
1067
+ else:
1068
+ prompt.append("assistant\n" + dict_message["content"])
1069
+ prompt.append("assistant\n")
1070
+ prompt_str = "\n".join(prompt)
1071
+
1072
+ if get_num_tokens(prompt_str) >= 1000000:
1073
+ st.error("Conversation length too long. Please keep it under 1000000 tokens.")
1074
+ st.button(
1075
+ "🗑 Clear Chat History",
1076
+ on_click=clear_chat_history,
1077
+ key="clear_chat_history",
1078
+ )
1079
+ st.stop()
1080
+ for event in replicate.stream(
1081
+ "snowflake/snowflake-arctic-instruct",
1082
+ input={
1083
+ "prompt": prompt_str,
1084
+ "prompt_template": r"{prompt}",
1085
+ "temperature": temperature,
1086
+ "top_p": top_p,
1087
+ "max_new_tokens": max_new_tokens,
1088
+ "min_new_tokens": min_new_tokens,
1089
+ "presence_penalty": presence_penalty,
1090
+ "frequency_penalty": frequency_penalty,
1091
+ "stop_sequences": stop_sequences,
1092
+ },
1093
+ ):
1094
+ yield str(event)
1095
+
1096
+
1097
+ def display_question():
1098
+ st.session_state.follow_up = True
1099
+
1100
+
1101
+ if prompt := st.chat_input(disabled=not replicate_api):
1102
+
1103
+ st.session_state.show_animation = False
1104
+
1105
+ st.session_state.messages.append({"role": "user", "content": prompt})
1106
+ with st.chat_message(
1107
+ "user",
1108
+ avatar="https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/user.gif",
1109
+ ):
1110
+ st.write(prompt)
1111
+ if st.session_state.follow_up:
1112
+
1113
+ st.session_state.show_animation = False
1114
+
1115
+ unique_key = "chat_input_" + str(hash("Snowflake Arctic is cool"))
1116
+
1117
+ complete_question = generate_arctic_response_follow_up()
1118
+ formatted_question = complete_question.replace("_", " ").strip()
1119
+
1120
+ st.session_state.messages.append({"role": "user", "content": formatted_question})
1121
+ with st.chat_message(
1122
+ "user",
1123
+ avatar="https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/user.gif",
1124
+ ):
1125
+ st.write(formatted_question)
1126
+ st.session_state.follow_up = False
1127
+
1128
+ with st.chat_message(
1129
+ "assistant",
1130
+ avatar="https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/assistant.gif",
1131
+ ):
1132
+ response = generate_arctic_response()
1133
+ full_response = st.write_stream(response)
1134
+ message = {"role": "assistant", "content": full_response}
1135
+
1136
+ st.session_state.messages.append(message)
1137
+
1138
+ full_response_prompt = generate_arctic_response_follow_up()
1139
+ message_prompt = {"content": full_response_prompt}
1140
+ st.button(
1141
+ str(message_prompt["content"]).replace("_", " ").strip(),
1142
+ on_click=display_question,
1143
+ )
1144
+ if st.session_state.messages[-1]["role"] != "assistant":
1145
+
1146
+ st.session_state.show_animation = False
1147
+
1148
+ with st.chat_message(
1149
+ "assistant",
1150
+ avatar="https://raw.githubusercontent.com/sahirmaharaj/exifa/main/img/assistant.gif",
1151
+ ):
1152
+ response = generate_arctic_response()
1153
+ full_response = st.write_stream(response)
1154
+ message = {"role": "assistant", "content": full_response}
1155
+
1156
+ full_response_prompt = generate_arctic_response_follow_up()
1157
+ message_prompt = {"content": full_response_prompt}
1158
+ st.button(
1159
+ str(message_prompt["content"]).replace("_", " ").strip(),
1160
+ on_click=display_question,
1161
+ )
1162
+
1163
+ st.session_state.messages.append(message)
1164
+ if st.session_state.reset_trigger:
1165
+
1166
+ unique_key = "chat_input_" + str(hash("Snowflake Arctic is cool"))
1167
+
1168
+ complete_question = generate_arctic_response_follow_up()
1169
+
1170
+ st.session_state.show_animation = False
1171
+ if "has_snowed" not in st.session_state:
1172
+
1173
+ st.snow()
1174
+ st.session_state["has_snowed"] = True
1175
+ if st.session_state.show_animation:
1176
+ components.html(particles_js, height=370, scrolling=False)
img/1.png ADDED
img/3.png ADDED
img/Exifa-1.png ADDED
img/Exifa-2.png ADDED
img/Exifa-3.png ADDED
img/Exifa-4.png ADDED
img/Exifa-5.png ADDED
img/Exifa.gif ADDED
img/Headshot.png ADDED
img/assistant-done.svg ADDED
img/assistant.gif ADDED

Git LFS Details

  • SHA256: d9c1d2eeffe2fc8986d994e74b154d857b2bc6d291c277d7e03100974e8b6030
  • Pointer size: 132 Bytes
  • Size of remote file: 1.02 MB
img/email.gif ADDED
img/file1 ADDED
@@ -0,0 +1 @@
 
 
1
+
img/kaggle.gif ADDED
img/linkedin.gif ADDED
img/newsletter.gif ADDED
img/share.gif ADDED
img/topmate.gif ADDED
img/user-done.svg ADDED
img/user.gif ADDED
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ streamlit
2
+ replicate
3
+ pdfplumber
4
+ python-docx
5
+ pandas
6
+ transformers
7
+ exifread
8
+ requests
9
+ pillow
10
+ numpy
11
+ plotly
12
+ matplotlib