{ | |
"module": "keras_hub.src.models.llama3.llama3_tokenizer", | |
"class_name": "Llama3Tokenizer", | |
"config": { | |
"name": "llama3_tokenizer", | |
"trainable": true, | |
"dtype": { | |
"module": "keras", | |
"class_name": "DTypePolicy", | |
"config": { | |
"name": "int32" | |
}, | |
"registered_name": null | |
}, | |
"config_file": "tokenizer.json", | |
"sequence_length": null, | |
"add_prefix_space": false, | |
"unsplittable_tokens": [ | |
"<|eom_id|>", | |
"<|eot_id|>", | |
"<|finetune_right_pad_id|>", | |
"<|begin_of_text|>", | |
"<|python_tag|>", | |
"<|start_header_id|>", | |
"<|end_of_text|>", | |
"<|end_header_id|>" | |
] | |
}, | |
"registered_name": "keras_hub>Llama3Tokenizer" | |
} |