{ "_class_name": "FluxTransformer2DModel", "_diffusers_version": "0.31.0", "_name_or_path": "/home/user/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/transformer", "attention_head_dim": 16, "axes_dims_rope": [ 4, 4, 8 ], "guidance_embeds": true, "in_channels": 4, "joint_attention_dim": 32, "num_attention_heads": 2, "num_layers": 1, "num_single_layers": 1, "patch_size": 1, "pooled_projection_dim": 32 }