lemon-mint commited on
Commit
6310651
·
verified ·
1 Parent(s): b9452ae

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +18 -1
config.json CHANGED
@@ -1 +1,18 @@
1
- {"vocab_size": 30528, "hidden_size": 768, "num_hidden_layers": 12, "num_attention_heads": 12, "intermediate_size": 3072, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "attention_probs_dropout_prob": 0.0, "max_position_embeddings": 8192, "type_vocab_size": 0, "layer_norm_type": "layer_norm", "layer_norm_eps": 1e-06, "pad_token_id": 0, "position_embedding_type": "rope", "rope_theta": 500000.0, "pack_qkv": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "vocab_size": 30528,
3
+ "hidden_size": 768,
4
+ "num_hidden_layers": 12,
5
+ "num_attention_heads": 12,
6
+ "intermediate_size": 3072,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "attention_probs_dropout_prob": 0.0,
10
+ "max_position_embeddings": 8192,
11
+ "type_vocab_size": 0,
12
+ "layer_norm_type": "layer_norm",
13
+ "layer_norm_eps": 1e-06,
14
+ "pad_token_id": 0,
15
+ "position_embedding_type": "rope",
16
+ "rope_theta": 500000.0,
17
+ "pack_qkv": true
18
+ }