config.json 944 B

12345678910111213141516171819202122232425262728293031323334353637
  1. {
  2. "_name_or_path": "ucaslcl/GOT-OCR2_0",
  3. "architectures": [
  4. "GOTQwenForCausalLM"
  5. ],
  6. "auto_map": {
  7. "AutoConfig": "modeling_GOT.GOTConfig",
  8. "AutoModel": "modeling_GOT.GOTQwenForCausalLM"
  9. },
  10. "attention_dropout": 0.0,
  11. "bos_token_id": 151643,
  12. "eos_token_id": 151643,
  13. "freeze_vision_tower": false,
  14. "hidden_act": "silu",
  15. "hidden_size": 1024,
  16. "im_end_token": 151858,
  17. "im_patch_token": 151859,
  18. "im_start_token": 151857,
  19. "image_token_len": 256,
  20. "initializer_range": 0.02,
  21. "intermediate_size": 2816,
  22. "max_position_embeddings": 32768,
  23. "max_window_layers": 21,
  24. "model_type": "GOT",
  25. "num_attention_heads": 16,
  26. "num_hidden_layers": 24,
  27. "num_key_value_heads": 16,
  28. "rms_norm_eps": 1e-06,
  29. "rope_theta": 1000000.0,
  30. "sliding_window": 32768,
  31. "tie_word_embeddings": true,
  32. "dtype": "bfloat16",
  33. "use_cache": true,
  34. "use_im_start_end": true,
  35. "use_sliding_window": false,
  36. "vocab_size": 151860
  37. }