Qemma-GEI / generation_config.json
reaperdoesntknow's picture
Upload Gemma3ForCausalLM
6c4fff5 verified
raw
history blame
384 Bytes
{
"bos_token_id": 2,
"do_sample": true,
"eos_token_id": [
1,
1,
1,
106
],
"max_length": 4096,
"max_new_tokens": 1024,
"no_repeat_ngram_size": 3,
"pad_token_id": 0,
"repetition_penalty": 1.07,
"stop_sequences": [
"</think>",
"</reasoning_step>"
],
"temperature": 0.4,
"top_k": 40,
"top_p": 0.95,
"transformers_version": "4.57.1"
}