artificialhoney commited on
Commit
4d447d5
1 Parent(s): ffd62d6

feat: graffiti-database.com conditioning

Browse files
.gitignore CHANGED
@@ -1,3 +1,4 @@
1
  .DS_Store
2
  images/
 
3
  files.list
 
1
  .DS_Store
2
  images/
3
+ conditioning/
4
  files.list
cli.py CHANGED
@@ -7,7 +7,8 @@ import json
7
  import argparse
8
  from pathlib import Path
9
  import os
10
- import codecs
 
11
 
12
  from PIL import UnidentifiedImageError
13
  from PIL import Image, ImageFile
@@ -176,6 +177,7 @@ class CLI():
176
  help='Choose data source to scrape')
177
  subparsers.add_parser('cleanup', help='Cleans up downloaded images')
178
  subparsers.add_parser('caption', help='Captions downloaded images')
 
179
  metadata = subparsers.add_parser('metadata', help='Creates single meta files from metadata.jsonl')
180
  metadata.add_argument('--source',
181
  default='graffiti.org',
@@ -231,6 +233,21 @@ class CLI():
231
  data = json.loads(row)
232
  with open('./images/' + data["file"] + '.json', 'w') as j:
233
  j.write(json.dumps(data, indent=2, ensure_ascii=False))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
 
235
  def main():
236
  CLI()
 
7
  import argparse
8
  from pathlib import Path
9
  import os
10
+
11
+ from controlnet_aux.processor import Processor
12
 
13
  from PIL import UnidentifiedImageError
14
  from PIL import Image, ImageFile
 
177
  help='Choose data source to scrape')
178
  subparsers.add_parser('cleanup', help='Cleans up downloaded images')
179
  subparsers.add_parser('caption', help='Captions downloaded images')
180
+ subparsers.add_parser('condition', help='Condition downloaded images')
181
  metadata = subparsers.add_parser('metadata', help='Creates single meta files from metadata.jsonl')
182
  metadata.add_argument('--source',
183
  default='graffiti.org',
 
233
  data = json.loads(row)
234
  with open('./images/' + data["file"] + '.json', 'w') as j:
235
  j.write(json.dumps(data, indent=2, ensure_ascii=False))
236
+ elif args.command == 'condition':
237
+ processor_id = 'softedge_hed'
238
+ processor = Processor(processor_id)
239
+
240
+ path = Path("./images").rglob("*.jpg")
241
+ count = len(list(Path("./images").rglob("*.jpg")))
242
+ for i, img_p in enumerate(path):
243
+ path_name = str(img_p)
244
+ output_path = path_name.replace("images/", "conditioning/")
245
+ img = Image.open(img_p).convert("RGB")
246
+
247
+ processed_image = processor(img, to_pil=True)
248
+ processed_image.save(output_path)
249
+
250
+ print("{0} / {1}".format(i + 1, count), path_name, output_path)
251
 
252
  def main():
253
  CLI()
data/graffiti-database.com/conditioning.tar.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b533be29480b8b686432a70814a48c9122385ceaee025b4bc2d324d70ed2cbc1
3
+ size 2418234651
prepare.sh CHANGED
@@ -2,18 +2,31 @@
2
 
3
  args=("$@")
4
 
5
- rm ./data/${args[0]}/images.tar.gz
6
- rm ./data/${args[0]}/metadata.jsonl
 
 
 
7
 
8
- find ./images -name '*.jpg' -print > files.list
 
 
9
 
10
- while read p; do
11
- cat "$p".json | tr -d "\n" >> ./data/${args[0]}/metadata.jsonl
12
- echo >> ./data/${args[0]}/metadata.jsonl
13
- done < files.list
 
 
14
 
15
- tar czf ./data/${args[0]}/images.tar.gz --files-from files.list
 
 
16
 
17
- rm files.list
18
 
19
- datasets-cli test graffiti.py --save_info --all_config
 
 
 
 
 
 
2
 
3
  args=("$@")
4
 
5
+ clean () {
6
+ rm files.list
7
+ rm ./data/$1/$2.tar.gz
8
+ rm ./data/$1/metadata.jsonl
9
+ }
10
 
11
+ list () {
12
+ find ./$1 -name '*.jpg' -print > files.list
13
+ }
14
 
15
+ meta () {
16
+ while read p; do
17
+ cat "$p".json | tr -d "\n" >> ./data/$1/metadata.jsonl
18
+ echo >> ./data/$1/metadata.jsonl
19
+ done < files.list
20
+ }
21
 
22
+ tar () {
23
+ tar czf ./data/$1/$2.tar.gz --files-from files.list
24
+ }
25
 
 
26
 
27
+ test () {
28
+ datasets-cli test graffiti.py --save_info --all_config
29
+ }
30
+
31
+ # list ${args[1]}
32
+ tar ${args[0]} ${args[1]}
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  bs4
2
  requests
3
- transformers
 
 
1
  bs4
2
  requests
3
+ transformers
4
+ controlnet-aux==0.0.6