| { | |
| "module": "keras_nlp.src.models.gemma.gemma_causal_lm_preprocessor", | |
| "class_name": "GemmaCausalLMPreprocessor", | |
| "config": { | |
| "name": "gemma_causal_lm_preprocessor_1", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "float32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "tokenizer": { | |
| "module": "keras_nlp.src.models.gemma.gemma_tokenizer", | |
| "class_name": "GemmaTokenizer", | |
| "config": { | |
| "name": "gemma_tokenizer", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "int32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "proto": null, | |
| "sequence_length": null, | |
| "add_bos": false, | |
| "add_eos": false | |
| }, | |
| "registered_name": "keras_nlp>GemmaTokenizer" | |
| }, | |
| "sequence_length": 1024, | |
| "add_start_token": true, | |
| "add_end_token": true | |
| }, | |
| "registered_name": "keras_nlp>GemmaCausalLMPreprocessor" | |
| } |