Keeby-smilyai commited on
Commit
931936f
·
verified ·
1 Parent(s): 2d47ce9

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "sam1",
3
+ "architectures": [
4
+ "SAM1ForCausalLM"
5
+ ],
6
+ "vocab_size": 50261,
7
+ "max_position_embeddings": 1024,
8
+ "hidden_size": 768,
9
+ "num_hidden_layers": 16,
10
+ "num_attention_heads": 12,
11
+ "intermediate_size": 5376,
12
+ "hidden_act": "silu",
13
+ "rope_theta": 10000,
14
+ "rms_norm_eps": 1e-05,
15
+ "bos_token_id": 50256,
16
+ "eos_token_id": 50256,
17
+ "pad_token_id": 50256,
18
+ "custom_tokens": [
19
+ "<|im_start|>",
20
+ "<|im_end|>",
21
+ "<think>",
22
+ "<think/>"
23
+ ]
24
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a31500d64f87cf2e35d743c96692b98b3ea78d4b5c42679a3bca4b8245787f67
3
+ size 1252639768
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "tokenizer_class": "GPT2Tokenizer",
3
+ "model_max_length": 1024,
4
+ "pad_token": "<|endoftext|>",
5
+ "eos_token": "<|endoftext|>",
6
+ "bos_token": "<|endoftext|>",
7
+ "unk_token": "<|endoftext|>",
8
+ "additional_special_tokens": [
9
+ "<|im_start|>",
10
+ "<|im_end|>",
11
+ "<think>",
12
+ "<think/>"
13
+ ]
14
+ }
training_history.csv ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ accuracy,loss,val_accuracy,val_loss
2
+ 0.8683860898017883,0.8338250517845154,0.9013352394104004,0.4916256070137024
3
+ 0.9069636464118958,0.4496036469936371,0.9123231768608093,0.4182259440422058
4
+ 0.9167993068695068,0.3842655122280121,0.9181107878684998,0.3843038082122803
5
+ 0.9244006872177124,0.33804553747177124,0.9216646552085876,0.365914523601532
6
+ 0.9301119446754456,0.30558690428733826,0.9232332110404968,0.3598633408546448