vickie02736 commited on
Commit
f6f133a
·
verified ·
1 Parent(s): a1ee5a0

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +9 -0
  2. ads_0_length_8000/ALLEGRO/results/adsorption/best_model.pth +3 -0
  3. ads_0_length_8000/ALLEGRO/results/adsorption/last_model.pth +3 -0
  4. ads_0_length_8000/ALLEGRO/results/adsorption/trainer.pth +3 -0
  5. ads_0_length_8000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt +3 -0
  6. ads_0_length_8000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt +3 -0
  7. ads_0_length_8000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt +3 -0
  8. ads_0_length_8000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_transform.pt +3 -0
  9. ads_0_length_8000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/data.pth +3 -0
  10. ads_0_length_8000/trainset.xyz +3 -0
  11. ads_16_length_2000/ALLEGRO/allegro.sh +40 -0
  12. ads_16_length_2000/ALLEGRO/results/adsorption/best_model.pth +3 -0
  13. ads_16_length_2000/ALLEGRO/results/adsorption/config.yaml +0 -0
  14. ads_16_length_2000/ALLEGRO/results/adsorption/last_model.pth +3 -0
  15. ads_16_length_2000/ALLEGRO/results/adsorption/log +0 -0
  16. ads_16_length_2000/ALLEGRO/results/adsorption/metrics_batch_train.csv +0 -0
  17. ads_16_length_2000/ALLEGRO/results/adsorption/metrics_batch_val.csv +0 -0
  18. ads_16_length_2000/ALLEGRO/results/adsorption/metrics_epoch.csv +0 -0
  19. ads_16_length_2000/ALLEGRO/results/adsorption/metrics_initialization.csv +2 -0
  20. ads_16_length_2000/ALLEGRO/results/adsorption/trainer.pth +3 -0
  21. ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/params.yaml +13 -0
  22. ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt +3 -0
  23. ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt +3 -0
  24. ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/params.yaml +13 -0
  25. ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt +3 -0
  26. ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_transform.pt +3 -0
  27. ads_16_length_2000/ALLEGRO/slurm-254487.out +0 -0
  28. ads_16_length_2000/ALLEGRO/test_config.yaml +63 -0
  29. ads_16_length_2000/ALLEGRO/train_config.yaml +78 -0
  30. ads_16_length_2000/MACE/MACE_model.model +3 -0
  31. ads_16_length_2000/MACE/MACE_model_compiled.model +3 -0
  32. ads_16_length_2000/MACE/MACE_model_stagetwo.model +3 -0
  33. ads_16_length_2000/MACE/MACE_model_stagetwo_compiled.model +3 -0
  34. ads_16_length_2000/MACE/checkpoints/MACE_model_run-123.model +3 -0
  35. ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_epoch-0.pt +3 -0
  36. ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_epoch-375_swa.pt +3 -0
  37. ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_stagetwo.model +3 -0
  38. ads_16_length_2000/NEQUIP/results/adsorption/best_model.pth +3 -0
  39. ads_16_length_2000/NEQUIP/results/adsorption/last_model.pth +3 -0
  40. ads_16_length_2000/NEQUIP/results/adsorption/log +0 -0
  41. ads_16_length_2000/NEQUIP/results/adsorption/metrics_batch_train.csv +0 -0
  42. ads_16_length_2000/NEQUIP/results/adsorption/metrics_batch_val.csv +0 -0
  43. ads_16_length_2000/NEQUIP/results/adsorption/metrics_epoch.csv +0 -0
  44. ads_16_length_2000/NEQUIP/results/adsorption/metrics_initialization.csv +2 -0
  45. ads_16_length_2000/NEQUIP/results/adsorption/trainer.pth +3 -0
  46. ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/params.yaml +13 -0
  47. ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt +3 -0
  48. ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt +3 -0
  49. ads_16_length_2000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/params.yaml +13 -0
  50. ads_16_length_2000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt +3 -0
.gitattributes CHANGED
@@ -63,3 +63,12 @@ ads_0_length_8000/testset.xyz filter=lfs diff=lfs merge=lfs -text
63
  ads_0_length_8000/MACE/results/MACE_model_run-123_train.txt filter=lfs diff=lfs merge=lfs -text
64
  ads_8_length_4000/trainset.xyz filter=lfs diff=lfs merge=lfs -text
65
  ads_0_length_8000/ALLEGRO/results/adsorption/metrics_batch_train.csv filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
63
  ads_0_length_8000/MACE/results/MACE_model_run-123_train.txt filter=lfs diff=lfs merge=lfs -text
64
  ads_8_length_4000/trainset.xyz filter=lfs diff=lfs merge=lfs -text
65
  ads_0_length_8000/ALLEGRO/results/adsorption/metrics_batch_train.csv filter=lfs diff=lfs merge=lfs -text
66
+ training_data_0ads.xyz filter=lfs diff=lfs merge=lfs -text
67
+ ads_0_length_8000/trainset.xyz filter=lfs diff=lfs merge=lfs -text
68
+ ads_16_length_2000/testset.xyz filter=lfs diff=lfs merge=lfs -text
69
+ training_data_8ads.xyz filter=lfs diff=lfs merge=lfs -text
70
+ ads_16_length_2000/trainset.xyz filter=lfs diff=lfs merge=lfs -text
71
+ training_data_16ads.xyz filter=lfs diff=lfs merge=lfs -text
72
+ training_data_32ads.xyz filter=lfs diff=lfs merge=lfs -text
73
+ ads_32_length_4000/testset.xyz filter=lfs diff=lfs merge=lfs -text
74
+ ads_32_length_4000/MACE/results/MACE_model_run-123_train.txt filter=lfs diff=lfs merge=lfs -text
ads_0_length_8000/ALLEGRO/results/adsorption/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1e737e889ee6469728b52244f9eb8065b29615a975111ce28ec5b71e634a4e9
3
+ size 5650623
ads_0_length_8000/ALLEGRO/results/adsorption/last_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ae413fd56327bceda21df37dd1bc8b475e2c79d80504c3132d8bb563caa40a4
3
+ size 5650623
ads_0_length_8000/ALLEGRO/results/adsorption/trainer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6dcf80cc652d71ffb06f608561fde8bdcc72f30f2bfebfd4603e32ce3c4161a
3
+ size 11339221
ads_0_length_8000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876
ads_0_length_8000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f3eb6d1179ad1ceab1d0fabf921d7e445593d2623299b7dbb9f93bc193c199
3
+ size 888
ads_0_length_8000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876
ads_0_length_8000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_transform.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f3eb6d1179ad1ceab1d0fabf921d7e445593d2623299b7dbb9f93bc193c199
3
+ size 888
ads_0_length_8000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/data.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2451cd3a619aee310e2824c9165dd5bde57b296e62914a90a159d2849391fb26
3
+ size 305621624
ads_0_length_8000/trainset.xyz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6ebe425db65633904c5bf333a02442ef0b617520a58c50037eeefcc9f9b3305
3
+ size 297382792
ads_16_length_2000/ALLEGRO/allegro.sh ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ #SBATCH --job-name=a_2k # 作业名称
4
+ #SBATCH --account=PAS2490 # Project ID
5
+ #SBATCH --nodes=1 # 节点数
6
+ #SBATCH --ntasks-per-node=1 # 每个节点的任务数
7
+ #SBATCH --cpus-per-task=16 # 每个任务使用的 CPU 核心数
8
+ #SBATCH --gpus-per-node=1 # GPU per node
9
+ #SBATCH --mem=200G # 内存限制
10
+ #SBATCH --time=8:00:00 # 作业运行时间限制
11
+
12
+ #SBATCH --mail-type=BEGIN,END,FAIL
13
+ #SBATCH --mail-user=uceckz0@ucl.ac.uk
14
+
15
+
16
+
17
+ convert_to_seconds() {
18
+ date -d "${1//_/ }" +%s
19
+ }
20
+ start_time=$(date +%Y-%m-%d_%H:%M:%S)
21
+ echo "Train start time: $start_time"
22
+
23
+ source $HOME/miniconda3/etc/profile.d/conda.sh
24
+ conda activate allegro_env
25
+ rm -rf ./results
26
+ nequip-train train_config.yaml --warn-unused
27
+ nequip-evaluate \
28
+ --train-dir ./results/adsorption/ \
29
+ --dataset-config ./test_config.yaml \
30
+ --metrics-config ./test_config.yaml
31
+
32
+ end_time=$(date +%Y-%m-%d_%H:%M:%S)
33
+ echo "Train end time: $end_time"
34
+ start_seconds=$(convert_to_seconds "$start_time")
35
+ end_seconds=$(convert_to_seconds "$end_time")
36
+ time_difference=$((end_seconds - start_seconds))
37
+ duration_h=$((time_difference / 3600))
38
+ duration_m=$(((time_difference % 3600) / 60))
39
+ duration_s=$((time_difference % 60))
40
+ echo "Training duration: ${duration_h}h ${duration_m}m ${duration_s}s"
ads_16_length_2000/ALLEGRO/results/adsorption/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16f064f1efd7a9171d832c7795f1cd2037a484b68debc034f7da4ef6ec6a0a30
3
+ size 5650623
ads_16_length_2000/ALLEGRO/results/adsorption/config.yaml ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/results/adsorption/last_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce02abd13135d4095dadc63aa68ad1c48918fc82a2c28fc8fc00d83380a58df9
3
+ size 5650623
ads_16_length_2000/ALLEGRO/results/adsorption/log ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/results/adsorption/metrics_batch_train.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/results/adsorption/metrics_batch_val.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/results/adsorption/metrics_epoch.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/results/adsorption/metrics_initialization.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ epoch, wall, LR,validation_loss_f,validation_loss_e,validation_loss,validation_f_mae,validation_f_rmse,validation_e_mae,validation_e_rmse,validation_e/N_mae,validation_e/N_rmse
2
+ 0, 7.196, 0.005, 1.0651, 494.8, 516.1, 0.914, 1.23, 25.9, 26.6, 0.0649, 0.0665
ads_16_length_2000/ALLEGRO/results/adsorption/trainer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1db7c412c1ac308cca42e85be024ace1b1185cea1d2dcfcbc4ee7a3f07d2834
3
+ size 11323221
ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/params.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AtomicData_options:
2
+ r_max: 5.0
3
+ ase_args:
4
+ format: extxyz
5
+ dtype: torch.float64
6
+ file_name: ../trainset.xyz
7
+ include_frames: null
8
+ include_keys: null
9
+ key_mapping:
10
+ force: forces
11
+ nequip_version: 0.6.1
12
+ root: results
13
+ url: null
ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876
ads_16_length_2000/ALLEGRO/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f3eb6d1179ad1ceab1d0fabf921d7e445593d2623299b7dbb9f93bc193c199
3
+ size 888
ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/params.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AtomicData_options:
2
+ r_max: 5.0
3
+ ase_args:
4
+ format: extxyz
5
+ dtype: torch.float64
6
+ file_name: ../testset.xyz
7
+ include_frames: null
8
+ include_keys: null
9
+ key_mapping:
10
+ force: forces
11
+ nequip_version: 0.6.1
12
+ root: results
13
+ url: null
ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876
ads_16_length_2000/ALLEGRO/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_transform.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f3eb6d1179ad1ceab1d0fabf921d7e445593d2623299b7dbb9f93bc193c199
3
+ size 888
ads_16_length_2000/ALLEGRO/slurm-254487.out ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/ALLEGRO/test_config.yaml ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ default_dtype: float64
2
+ dataset_file_name: ../testset.xyz
3
+ n_train: 0
4
+ n_val: 500
5
+
6
+ root: ./results/
7
+ run_name: adsorption
8
+ seed: 123
9
+ dataset_seed: 456
10
+ append: true
11
+ model_builders:
12
+ - allegro.model.Allegro
13
+ - PerSpeciesRescale
14
+ - StressForceOutput
15
+ - RescaleEnergyEtc
16
+ r_max: 5.0
17
+ l_max: 2
18
+ parity: o3_full # allowed: o3_full, o3_restricted, so3
19
+ num_layers: 2
20
+ env_embed_multiplicity: 16
21
+ two_body_latent_mlp_latent_dimensions: [128, 256, 512]
22
+ two_body_latent_mlp_nonlinearity: silu
23
+ latent_mlp_latent_dimensions: [512, 512]
24
+ latent_mlp_nonlinearity: silu
25
+ latent_resnet: true
26
+ env_embed_mlp_latent_dimensions: []
27
+ env_embed_mlp_nonlinearity: null
28
+ edge_eng_mlp_latent_dimensions: [128]
29
+ edge_eng_mlp_nonlinearity: null
30
+
31
+ dataset: ase
32
+ dataset_key_mapping:
33
+ force: forces
34
+ ase_args:
35
+ format: extxyz
36
+ chemical_symbols:
37
+ - H
38
+ - C
39
+ - N
40
+ - Zn
41
+ - O
42
+
43
+ metrics_components:
44
+ - - forces
45
+ - mae
46
+ - PerSpecies: False
47
+ report_per_component: False
48
+ - - total_energy
49
+ - mae
50
+ - PerAtom: False
51
+ - - forces
52
+ - rmse
53
+ - PerSpecies: False
54
+ report_per_component: False
55
+ - - total_energy
56
+ - rmse
57
+ - PerAtom: False
58
+ - - total_energy
59
+ - mae
60
+ - PerAtom: True
61
+ - - total_energy
62
+ - rmse
63
+ - PerAtom: True
ads_16_length_2000/ALLEGRO/train_config.yaml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ default_dtype: float64
2
+ dataset_file_name: ../trainset.xyz
3
+ n_train: 1500
4
+ n_val: 500
5
+ batch_size: 32
6
+ max_epochs: 500
7
+ train_val_split: sequential
8
+
9
+ root: ./results/
10
+ run_name: adsorption
11
+ seed: 123
12
+ dataset_seed: 456
13
+ append: true
14
+ model_builders:
15
+ - allegro.model.Allegro
16
+ - PerSpeciesRescale
17
+ - StressForceOutput
18
+ - RescaleEnergyEtc
19
+ r_max: 5.0
20
+ l_max: 2
21
+ parity: o3_full # allowed: o3_full, o3_restricted, so3
22
+ num_layers: 2
23
+ env_embed_multiplicity: 16
24
+ two_body_latent_mlp_latent_dimensions: [128, 256, 512]
25
+ two_body_latent_mlp_nonlinearity: silu
26
+ latent_mlp_latent_dimensions: [512, 512]
27
+ latent_mlp_nonlinearity: silu
28
+ latent_resnet: true
29
+ env_embed_mlp_latent_dimensions: []
30
+ env_embed_mlp_nonlinearity: null
31
+ edge_eng_mlp_latent_dimensions: [128]
32
+ edge_eng_mlp_nonlinearity: null
33
+
34
+ dataset: ase
35
+ dataset_key_mapping:
36
+ force: forces
37
+ ase_args:
38
+ format: extxyz
39
+ chemical_symbols:
40
+ - H
41
+ - C
42
+ - N
43
+ - Zn
44
+ - O
45
+
46
+
47
+ wandb: false
48
+ learning_rate: 0.005
49
+
50
+ loss_coeffs:
51
+ forces:
52
+ - 20
53
+ total_energy:
54
+ - 1
55
+
56
+ metrics_components:
57
+ - - forces
58
+ - mae
59
+ - PerSpecies: False
60
+ report_per_component: False
61
+ - - total_energy
62
+ - mae
63
+ - PerAtom: False
64
+ - - forces
65
+ - rmse
66
+ - PerSpecies: False
67
+ report_per_component: False
68
+ - - total_energy
69
+ - rmse
70
+ - PerAtom: False
71
+ - - total_energy
72
+ - mae
73
+ - PerAtom: True
74
+ - - total_energy
75
+ - rmse
76
+ - PerAtom: True
77
+
78
+ optimizer_name: Adam
ads_16_length_2000/MACE/MACE_model.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9212ec640e0cd9be41f01d5592485fab9574ae655a439021ffd8e83f00573f96
3
+ size 14518370
ads_16_length_2000/MACE/MACE_model_compiled.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ba7fc65cce12f7095bbfc80e573067e9fba4796f4249348aa46b661573898e7
3
+ size 14600334
ads_16_length_2000/MACE/MACE_model_stagetwo.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63ed8889ecbff9d06f61a03c42f429ea463b2db1d7f7ff153ce7753af1ebba2e
3
+ size 14519081
ads_16_length_2000/MACE/MACE_model_stagetwo_compiled.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9df1f811c967eeb6b9ea711be3b416984dd88661f127ebafda9b0bdcc7bae7f3
3
+ size 14617025
ads_16_length_2000/MACE/checkpoints/MACE_model_run-123.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4f1ef4fb1bdc539af723f6e04164b920176b9618ad211c4dd98832a69a79b0d
3
+ size 14519002
ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_epoch-0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0912974108107bde1097100bbcd91a5b1de415b04f988abc318a58b25d31a47
3
+ size 37012145
ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_epoch-375_swa.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6eb4078ee9620bb12fe0c2c7f434741f52bd46ec9eb98bdcd5f732bca47abacc
3
+ size 37013595
ads_16_length_2000/MACE/checkpoints/MACE_model_run-123_stagetwo.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6af24b8e7b9b421fb6631db9f7c251b26459922449752397e2d9fe4f7dceb703
3
+ size 14520545
ads_16_length_2000/NEQUIP/results/adsorption/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762230e324f0219210c7e58d5241c0f9bd1fd79b71b1788dd9abeed01dd61939
3
+ size 1515064
ads_16_length_2000/NEQUIP/results/adsorption/last_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67c79fc46d2a5531ac96e2c58f75f16802b1998317637e85429bd01e68883c03
3
+ size 1515064
ads_16_length_2000/NEQUIP/results/adsorption/log ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/NEQUIP/results/adsorption/metrics_batch_train.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/NEQUIP/results/adsorption/metrics_batch_val.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/NEQUIP/results/adsorption/metrics_epoch.csv ADDED
The diff for this file is too large to render. See raw diff
 
ads_16_length_2000/NEQUIP/results/adsorption/metrics_initialization.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ epoch, wall, LR,validation_loss_f,validation_loss_e,validation_loss,validation_f_mae,validation_f_rmse,validation_e_mae,validation_e_rmse,validation_e/N_mae,validation_e/N_rmse
2
+ 0, 5.991, 0.005, 0.98992, 4.5098, 24.308, 0.885, 1.19, 2.18, 2.54, 0.00544, 0.00634
ads_16_length_2000/NEQUIP/results/adsorption/trainer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b9c5161f78081b3f97abe88459dc78d91dc13f82224325d266d7594cb5020ab
3
+ size 2992114
ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/params.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AtomicData_options:
2
+ r_max: 5.0
3
+ ase_args:
4
+ format: extxyz
5
+ dtype: torch.float64
6
+ file_name: ../trainset.xyz
7
+ include_frames: null
8
+ include_keys: null
9
+ key_mapping:
10
+ force: forces
11
+ nequip_version: 0.6.1
12
+ root: results
13
+ url: null
ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876
ads_16_length_2000/NEQUIP/results/processed_dataset_755a510232d188b82abbf54786272f77fa69b204/pre_transform.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f3eb6d1179ad1ceab1d0fabf921d7e445593d2623299b7dbb9f93bc193c199
3
+ size 888
ads_16_length_2000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/params.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AtomicData_options:
2
+ r_max: 5.0
3
+ ase_args:
4
+ format: extxyz
5
+ dtype: torch.float64
6
+ file_name: ../testset.xyz
7
+ include_frames: null
8
+ include_keys: null
9
+ key_mapping:
10
+ force: forces
11
+ nequip_version: 0.6.1
12
+ root: results
13
+ url: null
ads_16_length_2000/NEQUIP/results/processed_dataset_d146e714f50ba36670243578e79f5a5a7b878205/pre_filter.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a86817b56c49da2bf50ecd039532de990d3ecbf7e9d65396e5fca25346ca59
3
+ size 876