{ "architectures": [ "SmolLM3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": null, "dtype": "bfloat16", "eos_token_id": 128012, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 11008, "layer_types": [ "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention", "full_attention" ], "max_position_embeddings": 65536, "mlp_bias": false, "model_type": "smollm3", "no_rope_layer_interval": 4, "no_rope_layers": [ 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0 ], "num_attention_heads": 16, "num_hidden_layers": 36, "num_key_value_heads": 4, "pad_token_id": null, "pretraining_tp": 2, "quantization_config": { "include_input_output_embeddings": false, "modules_to_not_convert": null, "quant_method": "torchao", "quant_type": { "default": { "_data": { "activation_dtype": { "_data": "float8_e4m3fn", "_type": "torch.dtype" }, "activation_value_lb": null, "activation_value_ub": null, "granularity": [ { "_data": {}, "_type": "PerTensor", "_version": 1 }, { "_data": {}, "_type": "PerTensor", "_version": 1 } ], "kernel_preference": { "_data": "AUTO", "_type": "KernelPreference" }, "mm_config": { "_data": { "emulate": false, "pad_inner_dim": false, "use_fast_accum": true }, "_type": "Float8MMConfig", "_version": 1 }, "set_inductor_config": true, "weight_dtype": { "_data": "float8_e4m3fn", "_type": "torch.dtype" } }, "_type": "Float8DynamicActivationFloat8WeightConfig", "_version": 2 } }, "quant_type_kwargs": {}, "untie_embedding_weights": false }, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 5000000.0, "sliding_window": null, "transformers.js_config": { "dtype": "q4", "kv_cache_dtype": { "fp16": "float16", "q4f16": "float16" }, "use_external_data_format": true }, "transformers_version": "4.57.0.dev0", "use_cache": false, "use_sliding_window": false, "vocab_size": 128256 }