chongjie commited on
Commit
c455281
1 Parent(s): 67ce90e

Upload 2 files

Browse files
vae_pretrained/config.yaml ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ exp_root_dir: "outputs"
2
+ name: "michelangelo-autoencoder/l256-e64-ne8-nd16"
3
+ tag: michelangelo-autoencoder+n4096+noise0.0+pfeat3+normembFalse+lr5e-05+qkvbiasFalse+nfreq8+ln_postTrue
4
+ resume: ./ckpts/vae_pretrained/model.ckpt
5
+ seed: 0
6
+ data_type: "objaverse-datamodule"
7
+ data:
8
+ root_dir: 'data/objaverse-MIX'
9
+ data_type: "occupancy"
10
+ n_samples: 4096
11
+ noise_sigma: 0.
12
+
13
+ load_supervision: True
14
+ supervision_type: "occupancy"
15
+ n_supervision: 10000
16
+
17
+ load_image: False # whether to load images
18
+ load_caption: False # whether to load captions
19
+
20
+ batch_size: 8
21
+ num_workers: 16
22
+
23
+ system_type: "shape-autoencoder-system"
24
+ system:
25
+ sample_posterior: true
26
+
27
+ shape_model_type: "michelangelo-aligned-autoencoder"
28
+ shape_model:
29
+ num_latents: 256
30
+ embed_dim: 64
31
+ point_feats: 3
32
+ out_dim: 1
33
+ num_freqs: 8
34
+ include_pi: false
35
+ heads: 12
36
+ width: 768
37
+ num_encoder_layers: 8
38
+ num_decoder_layers: 16
39
+ use_ln_post: true
40
+ init_scale: 0.25
41
+ qkv_bias: false
42
+ use_flash: true
43
+ use_checkpoint: true
44
+
45
+ loggers:
46
+ wandb:
47
+ enable: false
48
+ project: "CraftsMan"
49
+ name: shape-autoencoder+${name}+${tag}
50
+
51
+ loss:
52
+ lambda_logits: 1.
53
+ lambda_kl: 0.001
54
+
55
+ optimizer:
56
+ name: AdamW
57
+ args:
58
+ lr: 5e-05
59
+ betas: [0.9, 0.99]
60
+ eps: 1.e-6
61
+
62
+ scheduler:
63
+ name: SequentialLR
64
+ interval: step
65
+ schedulers:
66
+ - name: LinearLR
67
+ interval: step
68
+ args:
69
+ start_factor: 1e-6
70
+ end_factor: 1.0
71
+ total_iters: 5000
72
+ - name: CosineAnnealingLR
73
+ interval: step
74
+ args:
75
+ T_max: 5000
76
+ eta_min: 0.
77
+ milestones: [5000]
78
+
79
+ trainer:
80
+ num_nodes: 1
81
+ max_epochs: 100000
82
+ log_every_n_steps: 5
83
+ num_sanity_val_steps: 1
84
+ # val_check_interval: 200
85
+ check_val_every_n_epoch: 1
86
+ enable_progress_bar: true
87
+ precision: 16-mixed
88
+
89
+ checkpoint:
90
+ save_last: true
91
+ save_top_k: -1
92
+ every_n_train_steps: 5000
vae_pretrained/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:855522f00642e268ae82ef08f037734e93a317689ab9a75bdc6fed64f8f6a594
3
+ size 741101398