bert_config.json 314 B

12345678910111213
  1. {
  2. "attention_probs_dropout_prob": 0.1,
  3. "hidden_act": "gelu",
  4. "hidden_dropout_prob": 0.1,
  5. "hidden_size": 1024,
  6. "initializer_range": 0.02,
  7. "intermediate_size": 4096,
  8. "max_position_embeddings": 512,
  9. "num_attention_heads": 16,
  10. "num_hidden_layers": 24,
  11. "type_vocab_size": 2,
  12. "vocab_size": 30522
  13. }