Yntec commited on
Commit
05cb31f
1 Parent(s): 3d26022

Delete safety_checker

Browse files
safety_checker/config.json DELETED
@@ -1,168 +0,0 @@
1
- {
2
- "_commit_hash": "cb41f3a270d63d454d385fc2e4f571c487c253c5",
3
- "_name_or_path": "CompVis/stable-diffusion-safety-checker",
4
- "architectures": [
5
- "StableDiffusionSafetyChecker"
6
- ],
7
- "initializer_factor": 1.0,
8
- "logit_scale_init_value": 2.6592,
9
- "model_type": "clip",
10
- "projection_dim": 768,
11
- "text_config": {
12
- "_name_or_path": "",
13
- "add_cross_attention": false,
14
- "architectures": null,
15
- "attention_dropout": 0.0,
16
- "bad_words_ids": null,
17
- "begin_suppress_tokens": null,
18
- "bos_token_id": 0,
19
- "chunk_size_feed_forward": 0,
20
- "cross_attention_hidden_size": null,
21
- "decoder_start_token_id": null,
22
- "diversity_penalty": 0.0,
23
- "do_sample": false,
24
- "dropout": 0.0,
25
- "early_stopping": false,
26
- "encoder_no_repeat_ngram_size": 0,
27
- "eos_token_id": 2,
28
- "exponential_decay_length_penalty": null,
29
- "finetuning_task": null,
30
- "forced_bos_token_id": null,
31
- "forced_eos_token_id": null,
32
- "hidden_act": "quick_gelu",
33
- "hidden_size": 768,
34
- "id2label": {
35
- "0": "LABEL_0",
36
- "1": "LABEL_1"
37
- },
38
- "initializer_factor": 1.0,
39
- "initializer_range": 0.02,
40
- "intermediate_size": 3072,
41
- "is_decoder": false,
42
- "is_encoder_decoder": false,
43
- "label2id": {
44
- "LABEL_0": 0,
45
- "LABEL_1": 1
46
- },
47
- "layer_norm_eps": 1e-05,
48
- "length_penalty": 1.0,
49
- "max_length": 20,
50
- "max_position_embeddings": 77,
51
- "min_length": 0,
52
- "model_type": "clip_text_model",
53
- "no_repeat_ngram_size": 0,
54
- "num_attention_heads": 12,
55
- "num_beam_groups": 1,
56
- "num_beams": 1,
57
- "num_hidden_layers": 12,
58
- "num_return_sequences": 1,
59
- "output_attentions": false,
60
- "output_hidden_states": false,
61
- "output_scores": false,
62
- "pad_token_id": 1,
63
- "prefix": null,
64
- "problem_type": null,
65
- "projection_dim": 512,
66
- "pruned_heads": {},
67
- "remove_invalid_values": false,
68
- "repetition_penalty": 1.0,
69
- "return_dict": true,
70
- "return_dict_in_generate": false,
71
- "sep_token_id": null,
72
- "suppress_tokens": null,
73
- "task_specific_params": null,
74
- "temperature": 1.0,
75
- "tf_legacy_loss": false,
76
- "tie_encoder_decoder": false,
77
- "tie_word_embeddings": true,
78
- "tokenizer_class": null,
79
- "top_k": 50,
80
- "top_p": 1.0,
81
- "torch_dtype": null,
82
- "torchscript": false,
83
- "transformers_version": "4.30.2",
84
- "typical_p": 1.0,
85
- "use_bfloat16": false,
86
- "vocab_size": 49408
87
- },
88
- "torch_dtype": "float16",
89
- "transformers_version": null,
90
- "vision_config": {
91
- "_name_or_path": "",
92
- "add_cross_attention": false,
93
- "architectures": null,
94
- "attention_dropout": 0.0,
95
- "bad_words_ids": null,
96
- "begin_suppress_tokens": null,
97
- "bos_token_id": null,
98
- "chunk_size_feed_forward": 0,
99
- "cross_attention_hidden_size": null,
100
- "decoder_start_token_id": null,
101
- "diversity_penalty": 0.0,
102
- "do_sample": false,
103
- "dropout": 0.0,
104
- "early_stopping": false,
105
- "encoder_no_repeat_ngram_size": 0,
106
- "eos_token_id": null,
107
- "exponential_decay_length_penalty": null,
108
- "finetuning_task": null,
109
- "forced_bos_token_id": null,
110
- "forced_eos_token_id": null,
111
- "hidden_act": "quick_gelu",
112
- "hidden_size": 1024,
113
- "id2label": {
114
- "0": "LABEL_0",
115
- "1": "LABEL_1"
116
- },
117
- "image_size": 224,
118
- "initializer_factor": 1.0,
119
- "initializer_range": 0.02,
120
- "intermediate_size": 4096,
121
- "is_decoder": false,
122
- "is_encoder_decoder": false,
123
- "label2id": {
124
- "LABEL_0": 0,
125
- "LABEL_1": 1
126
- },
127
- "layer_norm_eps": 1e-05,
128
- "length_penalty": 1.0,
129
- "max_length": 20,
130
- "min_length": 0,
131
- "model_type": "clip_vision_model",
132
- "no_repeat_ngram_size": 0,
133
- "num_attention_heads": 16,
134
- "num_beam_groups": 1,
135
- "num_beams": 1,
136
- "num_channels": 3,
137
- "num_hidden_layers": 24,
138
- "num_return_sequences": 1,
139
- "output_attentions": false,
140
- "output_hidden_states": false,
141
- "output_scores": false,
142
- "pad_token_id": null,
143
- "patch_size": 14,
144
- "prefix": null,
145
- "problem_type": null,
146
- "projection_dim": 512,
147
- "pruned_heads": {},
148
- "remove_invalid_values": false,
149
- "repetition_penalty": 1.0,
150
- "return_dict": true,
151
- "return_dict_in_generate": false,
152
- "sep_token_id": null,
153
- "suppress_tokens": null,
154
- "task_specific_params": null,
155
- "temperature": 1.0,
156
- "tf_legacy_loss": false,
157
- "tie_encoder_decoder": false,
158
- "tie_word_embeddings": true,
159
- "tokenizer_class": null,
160
- "top_k": 50,
161
- "top_p": 1.0,
162
- "torch_dtype": null,
163
- "torchscript": false,
164
- "transformers_version": "4.30.2",
165
- "typical_p": 1.0,
166
- "use_bfloat16": false
167
- }
168
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
safety_checker/model.fp16.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:08902f19b1cfebd7c989f152fc0507bef6898c706a91d666509383122324b511
3
- size 608018440
 
 
 
 
safety_checker/model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:11cfe53105625af8c00faac32a430626641cce686454f3c39d837f14397d858b
3
- size 1215981832
 
 
 
 
safety_checker/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:16d28f2b37109f222cdc33620fdd262102ac32112be0352a7f77e9614b35a394
3
- size 1216064769
 
 
 
 
safety_checker/pytorch_model.fp16.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:22ba87205445ad5def13e54919b038dcfb7321ec1c3f4b12487d4fba6036125f
3
- size 608103564