{ "group_size": { "gate_up_proj": 32, "qkv_proj": 128, "o_proj": 128, "down_proj": 32 }, "nbits": { "gate_up_proj": 2, "qkv_proj": 4, "o_proj": 4, "down_proj": 2 }, "lora_rank": 64, "skipped_dora_layers": [], "block_influence_layers": [ "layers.0", "layers.13", "layers.15", "layers.17", "layers.19", "layers.21", "layers.23", "layers.26", "layers.29", "layers.31", "layers.33", "layers.56", "layers.59", "layers.68", "layers.71", "layers.79" ], "groupsize_4bit": 128, "bitblas_dtype": "bfloat16" }