{ "architectures": [ "FLAVACodebook" ], "freeze": true, "hidden_size": 256, "initializer_range": 0.02, "input_channels": 3, "model_type": "flava_codebook", "num_blocks_per_group": 2, "num_groups": 4, "torch_dtype": "float32", "transformers_version": "4.18.0.dev0", "vocab_size": 8192 }