{ "module": "keras_hub.src.models.gemma.gemma_causal_lm", "class_name": "GemmaCausalLM", "config": { "backbone": { "module": "keras_hub.src.models.gemma.gemma_backbone", "class_name": "GemmaBackbone", "config": { "name": "gemma_backbone", "trainable": true, "vocabulary_size": 256000, "num_layers": 18, "num_query_heads": 8, "num_key_value_heads": 1, "hidden_dim": 2048, "intermediate_dim": 32768, "head_dim": 256, "layer_norm_epsilon": 1e-06, "dropout": 0, "query_head_dim_normalize": true, "use_post_ffw_norm": false, "use_post_attention_norm": false, "final_logit_soft_cap": null, "attention_logit_soft_cap": null, "sliding_window_size": 4096, "use_sliding_window_attention": false }, "registered_name": "keras_hub>GemmaBackbone" }, "preprocessor": { "module": "keras_hub.src.models.gemma.gemma_causal_lm_preprocessor", "class_name": "GemmaCausalLMPreprocessor", "config": { "name": "gemma_causal_lm_preprocessor", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "tokenizer": { "module": "keras_hub.src.models.gemma.gemma_tokenizer", "class_name": "GemmaTokenizer", "config": { "name": "gemma_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "config_file": "tokenizer.json", "proto": null, "sequence_length": null, "add_bos": false, "add_eos": false }, "registered_name": "keras_hub>GemmaTokenizer" }, "config_file": "preprocessor.json", "sequence_length": 512, "add_start_token": true, "add_end_token": true }, "registered_name": "keras_hub>GemmaCausalLMPreprocessor" }, "name": "gemma_causal_lm" }, "registered_name": "keras_hub>GemmaCausalLM" }