nilekhet commited on
Commit
2800253
β€’
1 Parent(s): 0cfae32

Upload 5 files

Browse files
.gitattributes CHANGED
@@ -52,3 +52,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
52
  *.jpg filter=lfs diff=lfs merge=lfs -text
53
  *.jpeg filter=lfs diff=lfs merge=lfs -text
54
  *.webp filter=lfs diff=lfs merge=lfs -text
 
 
 
52
  *.jpg filter=lfs diff=lfs merge=lfs -text
53
  *.jpeg filter=lfs diff=lfs merge=lfs -text
54
  *.webp filter=lfs diff=lfs merge=lfs -text
55
+ benign.tar filter=lfs diff=lfs merge=lfs -text
56
+ vx-underground_samples.tar filter=lfs diff=lfs merge=lfs -text
bengin_generator.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ def recursive_walk(folder):
4
+
5
+ for folderName, subfolders, filenames in os.walk(folder):
6
+ if subfolders:
7
+ for subfolder in subfolders:
8
+ recursive_walk(subfolder)
9
+ #print('\nFolder: ' + folderName + '\n')
10
+ for filename in filenames:
11
+ if filename.endswith('.exe'):
12
+ shutil.copy(folderName+"\\"+filename, dir_dst)
13
+ #print(filename)
14
+ unallowed = ['desktop.ini','WindowsApps']
15
+ l=os.listdir("C:\\Program Files\\")
16
+ dir_src = ("C:\\Program Files\\")
17
+ dir_dst = ("C:\\BenignFiles\\")
18
+ if not os.path.exists(dir_dst):
19
+ os.makedirs(dir_dst)
20
+ print("Directory " , dir_dst , " Created ")
21
+ for i in l:
22
+ if i in unallowed:
23
+ continue
24
+ print('C:\\Program Files\\' +i)
25
+ recursive_walk('C:\\Program Files\\'+i)
benign.tar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ebd5c4023d5dbfee477b3dc5b7e9871c4d2188b2f03df0544cc003853d94fdd
3
+ size 578426880
malfamily.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import requests
4
+ import pandas as pd
5
+ from bs4 import BeautifulSoup
6
+
7
+ url = "https://samples.vx-underground.org/samples/Families/"
8
+ html_content = requests.get(url).text
9
+ soup = BeautifulSoup(html_content, "html.parser")
10
+
11
+ family_links = [a["href"] for a in soup.find_all("a", href=True) if re.match(r'https://samples.vx-underground.org/samples/Families/[\w-]+/', a["href"])]
12
+
13
+ data = []
14
+ download_path = os.getcwd()
15
+
16
+ for family_link in family_links:
17
+ family_name = family_link.split('/')[-2]
18
+ print(f'Downloading files for {family_name}...')
19
+ family_html_content = requests.get(family_link).text
20
+ family_soup = BeautifulSoup(family_html_content, "html.parser")
21
+
22
+ sample_links = [a["href"] for a in family_soup.find_all("a", href=True) if a["href"].endswith(".7z")]
23
+
24
+ for sample_link in sample_links:
25
+ sample_name = sample_link.split('/')[-1]
26
+
27
+ family_path = os.path.join(download_path, family_name)
28
+ if not os.path.exists(family_path):
29
+ os.makedirs(family_path)
30
+ file_path = os.path.join(family_path, sample_name)
31
+
32
+ if not os.path.exists(file_path):
33
+ try:
34
+ response = requests.get(sample_link, stream=True)
35
+ with open(file_path, "wb") as f:
36
+ f.write(response.content)
37
+ except Exception as e:
38
+ print(f"Error downloading {sample_name}: {e}")
39
+ continue
40
+
41
+ data.append({"family": family_name, "sample": sample_name})
42
+
43
+ df = pd.DataFrame(data)
44
+ df.to_csv("samples_and_families.csv", index=False)
samples_and_families.csv ADDED
The diff for this file is too large to render. See raw diff
 
vx-underground_samples.tar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfcb3cce731612ca9d7790529d1305c2bd72066a0e6c7e90687502a121217427
3
+ size 41483008000