| { | |
| "module": "keras_hub.src.models.gemma3.gemma3_causal_lm", | |
| "class_name": "Gemma3CausalLM", | |
| "config": { | |
| "backbone": { | |
| "module": "keras_hub.src.models.gemma3.gemma3_backbone", | |
| "class_name": "Gemma3Backbone", | |
| "config": { | |
| "name": "gemma3_backbone", | |
| "trainable": true, | |
| "vocabulary_size": 262144, | |
| "image_size": null, | |
| "num_layers": 26, | |
| "num_query_heads": 4, | |
| "num_key_value_heads": 1, | |
| "hidden_dim": 1152, | |
| "intermediate_dim": 6912, | |
| "head_dim": 256, | |
| "query_head_dim_normalize": true, | |
| "use_query_key_norm": true, | |
| "use_post_ffw_norm": true, | |
| "use_post_attention_norm": true, | |
| "attention_logit_soft_cap": null, | |
| "final_logit_soft_cap": null, | |
| "use_sliding_window_attention": true, | |
| "sliding_window_size": 512, | |
| "local_rope_scaling_factor": 1.0, | |
| "global_rope_scaling_factor": 1.0, | |
| "vision_encoder": null, | |
| "layer_norm_epsilon": 1e-06, | |
| "dropout": 0 | |
| }, | |
| "registered_name": "keras_hub>Gemma3Backbone" | |
| }, | |
| "preprocessor": { | |
| "module": "keras_hub.src.models.gemma3.gemma3_causal_lm_preprocessor", | |
| "class_name": "Gemma3CausalLMPreprocessor", | |
| "config": { | |
| "name": "gemma3_causal_lm_preprocessor", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "float32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "tokenizer": { | |
| "module": "keras_hub.src.models.gemma3.gemma3_tokenizer", | |
| "class_name": "Gemma3Tokenizer", | |
| "config": { | |
| "name": "gemma3_tokenizer", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "int32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "config_file": "tokenizer.json", | |
| "proto": null, | |
| "sequence_length": null, | |
| "add_bos": false, | |
| "add_eos": false | |
| }, | |
| "registered_name": "keras_hub>Gemma3Tokenizer" | |
| }, | |
| "config_file": "preprocessor.json", | |
| "sequence_length": 1024, | |
| "add_start_token": true, | |
| "add_end_token": true, | |
| "num_vision_tokens_per_image": 0, | |
| "max_images_per_prompt": 0 | |
| }, | |
| "registered_name": "keras_hub>Gemma3CausalLMPreprocessor" | |
| }, | |
| "name": "gemma3_causal_lm" | |
| }, | |
| "registered_name": "keras_hub>Gemma3CausalLM" | |
| } |