praeclarumjj3 commited on
Commit
e65104f
1 Parent(s): 05456c5
Files changed (1) hide show
  1. config.json +2 -0
config.json CHANGED
@@ -391,6 +391,8 @@
391
  "model_type": "oneformer",
392
  "num_attention_heads": 8,
393
  "num_hidden_layers": 10,
 
 
394
  "text_encoder_config": {
395
  "max_seq_len": 77,
396
  "task_seq_len": 77,
 
391
  "model_type": "oneformer",
392
  "num_attention_heads": 8,
393
  "num_hidden_layers": 10,
394
+ "output_attentions": true,
395
+ "output_hidden_states": true,
396
  "text_encoder_config": {
397
  "max_seq_len": 77,
398
  "task_seq_len": 77,