kernalkue
Initial commit: EXL3 6.0bpw quantization with proper LFS tracking
2194d1b
raw
history blame
635 Bytes
models:
- model: /workspace/cache/models--BruhzWater--Apocrypha-L3.3-70b-0.3/snapshots/3facb4c0a7b953ff34a5caa90976830bf82a84c2
parameters:
weight: [0.5]
- model: /workspace/cache/models--BruhzWater--Serpents-Tongue-L3.3-70b-0.3/snapshots/d007a7bcc7047d712abb2dfb6ad940fe03cd2047
parameters:
weight: [0.5]
base_model: /workspace/cache/models--deepcogito--cogito-v2-preview-llama-70B/snapshots/1e1d12e8eaebd6084a8dcf45ecdeaa2f4b8879ce
merge_method: multislerp
tokenizer:
source: base
chat_template: llama3
parameters:
normalize_weights: false
eps: 1e-9
pad_to_multiple_of: 8
int8_mask: true
dtype: bfloat16