{ "architectures": [ "Qwen3VLForConditionalGeneration" ], "model_type": "designer-thinking", "torch_dtype": "bfloat16", "transformers_version": "4.44.2", "thinking_tokens": 2000000, "_name_or_path": "zenlm/zen-designer-235b-a22b-thinking", "_base_model": "Qwen/Qwen3-VL-235B-A22B-Thinking", "vision_config": { "hidden_size": 2048, "image_size": 2048, "num_hidden_layers": 48, "patch_size": 14 }, "text_config": { "vocab_size": 151936, "hidden_size": 8192, "num_hidden_layers": 80, "num_attention_heads": 64, "num_key_value_heads": 8, "max_position_embeddings": 131072 }, "num_experts": 64, "num_experts_per_tok": 4, "expert_interval": 1, "_total_params": "235B", "_active_params": "22B" }