Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
|
@@ -8,9 +8,9 @@
|
|
| 8 |
"AutoConfig": "modeling_clip_masked_lm.CLIPTextConfig",
|
| 9 |
"AutoModelForMaskedLM": "modeling_clip_masked_lm.CLIPTextModelForMaskedLM"
|
| 10 |
},
|
| 11 |
-
"bos_token_id":
|
| 12 |
"dropout": 0.0,
|
| 13 |
-
"eos_token_id":
|
| 14 |
"hidden_act": "quick_gelu",
|
| 15 |
"hidden_size": 768,
|
| 16 |
"initializer_factor": 1.0,
|
|
@@ -21,7 +21,7 @@
|
|
| 21 |
"model_type": "clip_text_model",
|
| 22 |
"num_attention_heads": 12,
|
| 23 |
"num_hidden_layers": 12,
|
| 24 |
-
"pad_token_id":
|
| 25 |
"projection_dim": 768,
|
| 26 |
"torch_dtype": "float32",
|
| 27 |
"transformers_version": "4.24.0",
|
|
|
|
| 8 |
"AutoConfig": "modeling_clip_masked_lm.CLIPTextConfig",
|
| 9 |
"AutoModelForMaskedLM": "modeling_clip_masked_lm.CLIPTextModelForMaskedLM"
|
| 10 |
},
|
| 11 |
+
"bos_token_id": 49406,
|
| 12 |
"dropout": 0.0,
|
| 13 |
+
"eos_token_id": 49407,
|
| 14 |
"hidden_act": "quick_gelu",
|
| 15 |
"hidden_size": 768,
|
| 16 |
"initializer_factor": 1.0,
|
|
|
|
| 21 |
"model_type": "clip_text_model",
|
| 22 |
"num_attention_heads": 12,
|
| 23 |
"num_hidden_layers": 12,
|
| 24 |
+
"pad_token_id": 49407,
|
| 25 |
"projection_dim": 768,
|
| 26 |
"torch_dtype": "float32",
|
| 27 |
"transformers_version": "4.24.0",
|