config.json 776 B

1234567891011121314151617181920212223242526272829303132
  1. {
  2. "_name_or_path": "/root/.cache/torch/sentence_transformers/BAAI_bge-small-zh/",
  3. "architectures": [
  4. "BertModel"
  5. ],
  6. "attention_probs_dropout_prob": 0.1,
  7. "classifier_dropout": null,
  8. "gradient_checkpointing": false,
  9. "hidden_act": "gelu",
  10. "hidden_dropout_prob": 0.1,
  11. "hidden_size": 512,
  12. "id2label": {
  13. "0": "LABEL_0"
  14. },
  15. "initializer_range": 0.02,
  16. "intermediate_size": 2048,
  17. "label2id": {
  18. "LABEL_0": 0
  19. },
  20. "layer_norm_eps": 1e-12,
  21. "max_position_embeddings": 512,
  22. "model_type": "bert",
  23. "num_attention_heads": 8,
  24. "num_hidden_layers": 4,
  25. "pad_token_id": 0,
  26. "position_embedding_type": "absolute",
  27. "torch_dtype": "float32",
  28. "transformers_version": "4.30.0",
  29. "type_vocab_size": 2,
  30. "use_cache": true,
  31. "vocab_size": 21128
  32. }