xywang626 LutherXD commited on
Commit
c263af9
·
verified ·
1 Parent(s): 92bcfff

replace the config.json with a correct version (#2)

Browse files

- replace the config.json with a correct version (f47df8bb98a41bd7188ec3d595fa22243de0fef1)


Co-authored-by: mqhuang <LutherXD@users.noreply.huggingface.co>

Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -17,17 +17,17 @@
17
  "eos_token_id": 151644,
18
  "head_dim": 128,
19
  "hidden_act": "silu",
20
- "hidden_size": 5120,
21
  "initializer_range": 0.02,
22
- "intermediate_size": 27648,
23
  "k_proj_bias": true,
24
  "max_length": 20,
25
  "min_length": 0,
26
  "model_type": "qwen2",
27
- "num_attention_heads": 40,
28
  "num_beam_groups": 1,
29
  "num_beams": 1,
30
- "num_hidden_layers": 64,
31
  "num_key_value_heads": 8,
32
  "pad_token_id": 152063,
33
  "pretraining_sequence_length": 131072,
@@ -62,7 +62,7 @@
62
  "spatial_merge_size": 2,
63
  "spatial_patch_size": 14,
64
  "temporal_patch_size": 2,
65
- "out_hidden_size": 5120,
66
  "tokens_per_second": 2,
67
  "window_size": 112
68
  },
 
17
  "eos_token_id": 151644,
18
  "head_dim": 128,
19
  "hidden_act": "silu",
20
+ "hidden_size": 8192,
21
  "initializer_range": 0.02,
22
+ "intermediate_size": 29568,
23
  "k_proj_bias": true,
24
  "max_length": 20,
25
  "min_length": 0,
26
  "model_type": "qwen2",
27
+ "num_attention_heads": 64,
28
  "num_beam_groups": 1,
29
  "num_beams": 1,
30
+ "num_hidden_layers": 80,
31
  "num_key_value_heads": 8,
32
  "pad_token_id": 152063,
33
  "pretraining_sequence_length": 131072,
 
62
  "spatial_merge_size": 2,
63
  "spatial_patch_size": 14,
64
  "temporal_patch_size": 2,
65
+ "out_hidden_size": 8192,
66
  "tokens_per_second": 2,
67
  "window_size": 112
68
  },