awacke1 commited on
Commit
029dbc6
·
1 Parent(s): 4672749

Update backup.py

Browse files
Files changed (1) hide show
  1. backup.py +71 -7
backup.py CHANGED
@@ -7,6 +7,8 @@ from bs4 import BeautifulSoup
7
  import hashlib
8
  import json
9
  import uuid
 
 
10
 
11
  EXCLUDED_FILES = ['app.py', 'requirements.txt', 'pre-requirements.txt', 'packages.txt', 'README.md','.gitattributes', "backup.py","Dockerfile"]
12
  URLS = {
@@ -27,6 +29,25 @@ if not os.path.exists("history.json"):
27
  with open("history.json", "w") as f:
28
  json.dump({}, f)
29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  def download_file(url, local_filename):
31
  if url.startswith('http://') or url.startswith('https://'):
32
  try:
@@ -157,8 +178,11 @@ def main():
157
  json.dump({}, f)
158
 
159
  with open("history.json", "r") as f:
160
- history = json.load(f)
161
-
 
 
 
162
  # Handling URL submission
163
  if url:
164
  subdir = hashlib.md5(url.encode()).hexdigest()
@@ -179,14 +203,54 @@ def main():
179
  for subdir in history.values():
180
  show_download_links(subdir)
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  # Expander for showing URL history and download links
183
  with st.expander("URL History and Downloaded Files"):
184
- for url, subdir in history.items():
185
- st.markdown(f"#### {url}")
186
- show_download_links(subdir)
187
-
 
 
188
  # Update each time to show files we have
189
  for subdir in history.values():
190
  show_download_links(subdir)
191
  if __name__ == "__main__":
192
- main()
 
7
  import hashlib
8
  import json
9
  import uuid
10
+ import glob
11
+ import zipfile
12
 
13
  EXCLUDED_FILES = ['app.py', 'requirements.txt', 'pre-requirements.txt', 'packages.txt', 'README.md','.gitattributes', "backup.py","Dockerfile"]
14
  URLS = {
 
29
  with open("history.json", "w") as f:
30
  json.dump({}, f)
31
 
32
+
33
+ @st.cache_resource
34
+ def create_zip_of_files(files):
35
+ zip_name = "all_files.zip"
36
+ with zipfile.ZipFile(zip_name, 'w') as zipf:
37
+ for file in files:
38
+ zipf.write(file)
39
+ return zip_name
40
+
41
+ @st.cache_resource
42
+ def get_zip_download_link(zip_file):
43
+ with open(zip_file, 'rb') as f:
44
+ data = f.read()
45
+ b64 = base64.b64encode(data).decode()
46
+ href = f'<a href="data:application/zip;base64,{b64}" download="{zip_file}">Download All</a>'
47
+ return href
48
+
49
+
50
+
51
  def download_file(url, local_filename):
52
  if url.startswith('http://') or url.startswith('https://'):
53
  try:
 
178
  json.dump({}, f)
179
 
180
  with open("history.json", "r") as f:
181
+ try:
182
+ history = json.load(f)
183
+ except:
184
+ print('error')
185
+
186
  # Handling URL submission
187
  if url:
188
  subdir = hashlib.md5(url.encode()).hexdigest()
 
203
  for subdir in history.values():
204
  show_download_links(subdir)
205
 
206
+
207
+ if st.sidebar.button("🗑 Delete All"):
208
+ # Compose all_files
209
+ all_files = glob.glob("*.*")
210
+ all_files = [file for file in all_files if len(os.path.splitext(file)[0]) >= 10] # exclude files with short names
211
+ all_files.sort(key=lambda x: (os.path.splitext(x)[1], x), reverse=True) # sort by file type and file name in descending order
212
+
213
+ for file in all_files:
214
+ os.remove(file)
215
+ st.experimental_rerun()
216
+
217
+ if st.sidebar.button("⬇️ Download All"):
218
+ # Compose all_files
219
+ #all_files = glob.glob("*.*")
220
+ #all_files = [file for file in all_files if len(os.path.splitext(file)[0]) >= 10] # exclude files with short names
221
+ #all_files.sort(key=lambda x: (os.path.splitext(x)[1], x), reverse=True) # sort by file type and file name in descending order
222
+ for subdir in history.values(): # go through each download directory
223
+ #show_download_links(subdir)
224
+ global file_sequence_numbers
225
+ for file in list_files(subdir): # go through each file that was downloaded
226
+ file_path = os.path.join(subdir, file)
227
+ if file_path not in file_sequence_numbers:
228
+ file_sequence_numbers[file_path] = 1
229
+ else:
230
+ file_sequence_numbers[file_path] += 1
231
+ sequence_number = file_sequence_numbers[file_path]
232
+ if os.path.isfile(file_path):
233
+ st.markdown(get_download_link(file_path), unsafe_allow_html=True)
234
+ show_file_operations(file_path, sequence_number)
235
+ else:
236
+ st.write(f"File not found: {file}")
237
+ zip_file = create_zip_of_files(list_files(subdir))
238
+ #with open(file, "rb") as f:
239
+ # bytes = f.read()
240
+ # b64 = base64.b64encode(bytes).decode()
241
+ # href = f'<a href="data:file/octet-stream;base64,{b64}" download=\'{os.path.basename(file)}\'>Download: {os.path.basename(file)}</a>'
242
+ st.sidebar.markdown(get_zip_download_link(zip_file), unsafe_allow_html=True)
243
+
244
  # Expander for showing URL history and download links
245
  with st.expander("URL History and Downloaded Files"):
246
+ try:
247
+ for url, subdir in history.items():
248
+ st.markdown(f"#### {url}")
249
+ show_download_links(subdir)
250
+ except:
251
+ print('url history is empty')
252
  # Update each time to show files we have
253
  for subdir in history.values():
254
  show_download_links(subdir)
255
  if __name__ == "__main__":
256
+ main()