DaizeDong commited on
Commit
3f79646
1 Parent(s): e063728

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "adaptive_position_length": false,
3
  "architectures": [
4
- "BondFormerForCausalLM"
5
  ],
6
  "atom_vocab_size": 118,
7
  "bond_vocab_size": 92,
@@ -13,7 +13,7 @@
13
  "initializer_method": "hidden",
14
  "initializer_range": 0.02,
15
  "intermediate_size": 2048,
16
- "model_type": "bond_former",
17
  "node_loss_weight": 1.0,
18
  "num_attention_heads": 8,
19
  "num_fingerprints": 8,
 
1
  {
2
  "adaptive_position_length": false,
3
  "architectures": [
4
+ "GraphsGPTForCausalLM"
5
  ],
6
  "atom_vocab_size": 118,
7
  "bond_vocab_size": 92,
 
13
  "initializer_method": "hidden",
14
  "initializer_range": 0.02,
15
  "intermediate_size": 2048,
16
+ "model_type": "graphs_gpt",
17
  "node_loss_weight": 1.0,
18
  "num_attention_heads": 8,
19
  "num_fingerprints": 8,