Skip to content

Commit

Permalink
test for nequip/allegro
Browse files Browse the repository at this point in the history
  • Loading branch information
yomichi committed Oct 26, 2024
1 parent f887d03 commit fa21d87
Show file tree
Hide file tree
Showing 26 changed files with 372 additions and 23 deletions.
11 changes: 7 additions & 4 deletions .github/workflows/Test_abICS.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
strategy:
matrix:
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
testname: [Unit, Sampling, ActiveLearn]
testname: [Unit, Sampling, ActiveLearnAenet, ActiveLearnNequip]
fail-fast: false

steps:
Expand Down Expand Up @@ -42,8 +42,11 @@ jobs:
cd ../potts_pamc
sh ./run.sh
;;
ActiveLearn ) cd tests/integration/active_learn
sh ./install_aenet.sh
sh ./run.sh ;;
ActiveLearnAenet ) cd tests/integration/active_learn_aenet
sh ./install_aenet.sh
sh ./run.sh ;;
ActiveLearnNequip ) cd tests/integration/active_learn_nequip
sh ./install_nequip.sh
sh ./run.sh ;;
* ) echo "Unknown testname";;
esac
14 changes: 14 additions & 0 deletions abics/applications/latgas_abinitio_interface/base_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,29 @@ def prepare(self, latgas_mode = True, st_dir = ""):
...

def generate_run(self, xsfdir="", generate_dir="generate"):
""" generate training dataset for specific trainer
Args:
xsfdir (str, optional): _description_. Defaults to "".
generate_dir (str, optional): . Defaults to "generate".
"""
...

def generate_wait(self):
""" wait for generate_run to finish
"""
...

def train(self, train_dir = "train"):
...

def new_baseinput(self, baseinput_dir, train_dir = "train"):
"""generate new baseinput directory/files for prediction
Args:
baseinput_dir (str): new baseinput directory
train_dir (str, optional): directory including training result. Defaults to "train".
"""
...


Expand Down
24 changes: 5 additions & 19 deletions abics/applications/latgas_abinitio_interface/nequip_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from ase import io
from ase.calculators.singlepoint import SinglePointCalculator
from nequip.utils import Config
from nequip.scripts import deploy as nequip_deploy


def xsf_to_ase(xsf):
Expand Down Expand Up @@ -107,7 +108,8 @@ def generate_run(self, xsfdir="nequipXSF", generate_dir="generate"):
xsfdir = str(pathlib.Path(xsfdir).resolve())
if os.path.exists(generate_dir):
shutil.rmtree(generate_dir)
shutil.copytree(self.generate_inputdir, generate_dir)
# shutil.copytree(self.generate_inputdir, generate_dir)
os.makedirs(generate_dir, exist_ok=True)
self.generate_dir = generate_dir
os.chdir(generate_dir)
xsf_paths = [
Expand Down Expand Up @@ -137,23 +139,10 @@ def train(self, train_dir = "train"):
shutil.copytree(self.train_inputdir, train_dir)
os.chdir(train_dir)

# yaml_dic = Config.from_file("input.yaml")
# is_allegro = "allegro.model.Allegro" in yaml_dic["model_builders"]
# if self.trainer_type == "nequip":
# if is_allegro:
# print("Warning: trainer_type=='nequip', but Allegro model is in input.yaml.")
# else:
# if not is_allegro:
# print("Warning: trainer_type=='allegro', but Allegro model is not in input.yaml.")

os.rename(
os.path.join(self.generate_outputdir, "structure.xyz"),
os.path.join(os.getcwd(), "structure.xyz"),
)
# command = self.train_exe + " train.in"
# print(os.getcwd())
# print(command)
# print(os.path.exists("train.in"))

with open(os.path.join(os.getcwd(), "stdout"), "w") as fi:
subprocess.run(
Expand All @@ -175,9 +164,6 @@ def new_baseinput(self, baseinput_dir, train_dir = "train"):
yaml_dic = Config.from_file("input.yaml")
root = yaml_dic["root"]
runname = yaml_dic["run_name"]
nequip_deploy = ["nequip-deploy","build","--train-dir",os.path.join(root,runname),os.path.join(baseinput,"deployed.pth")]
with open("nequip-deploy.out", "w") as fi:
subprocess.run(
nequip_deploy, stdout=fi, stderr=subprocess.STDOUT, check=True
)
nequip_deploy_args = ["build","--train-dir",os.path.join(root,runname),os.path.join(baseinput,"deployed.pth")]
nequip_deploy.main(nequip_deploy_args)
os.chdir(pathlib.Path(os.getcwd()).parent)
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
16 changes: 16 additions & 0 deletions tests/integration/active_learn_nequip/AL.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/sh
rm -f active.out
echo start AL sample
mpiexec -np 2 --oversubscribe abics_mlref input.toml
echo start parallel_run 1
sh parallel_run.sh
sleep 5

echo start AL final
mpiexec -np 2 --oversubscribe abics_mlref input.toml
sleep 5

#train
echo start training
abics_train input.toml
echo Done
3 changes: 3 additions & 0 deletions tests/integration/active_learn_nequip/MC.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/sh
mpiexec -np 2 --oversubscribe abics_sampling input.toml
echo Done
64 changes: 64 additions & 0 deletions tests/integration/active_learn_nequip/MgAl2O4.vasp
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
Al2 Mg O4
1.0
8.1135997772 0.0000000000 0.0000000000
0.0000000000 8.1135997772 0.0000000000
0.0000000000 0.0000000000 8.1135997772
O Al Mg
32 16 8
Direct
0.237399980 0.237399980 0.237399980
0.762599945 0.762599945 0.762599945
0.512599945 0.012600004 0.737399936
0.487399966 0.987399936 0.262599975
0.012600004 0.737399936 0.512599945
0.987399936 0.262599975 0.487399966
0.737399936 0.512599945 0.012600004
0.262599975 0.487399966 0.987399936
0.987399936 0.487399966 0.262599975
0.012600004 0.512599945 0.737399936
0.487399966 0.262599975 0.987399936
0.512599945 0.737399936 0.012600004
0.262599975 0.987399936 0.487399966
0.737399936 0.012600004 0.512599945
0.237399980 0.737399936 0.737399936
0.762599945 0.262599975 0.262599975
0.512599945 0.512599945 0.237399980
0.487399966 0.487399966 0.762599945
0.012600004 0.237399980 0.012600004
0.987399936 0.762599945 0.987399936
0.987399936 0.987399936 0.762599945
0.012600004 0.012600004 0.237399980
0.487399966 0.762599945 0.487399966
0.512599945 0.237399980 0.512599945
0.737399936 0.237399980 0.737399936
0.262599975 0.762599945 0.262599975
0.237399980 0.512599945 0.512599945
0.762599945 0.487399966 0.487399966
0.762599945 0.987399936 0.987399936
0.237399980 0.012600004 0.012600004
0.737399936 0.737399936 0.237399980
0.262599975 0.262599975 0.762599945
0.000000000 0.000000000 0.000000000
0.749999940 0.249999985 0.499999970
0.249999985 0.749999940 0.499999970
0.249999985 0.499999970 0.749999940
0.749999940 0.499999970 0.249999985
0.499999970 0.749999940 0.249999985
0.499999970 0.249999985 0.749999940
0.000000000 0.499999970 0.499999970
0.749999940 0.749999940 0.000000000
0.249999985 0.249999985 0.000000000
0.249999985 0.000000000 0.249999985
0.749999940 0.000000000 0.749999940
0.499999970 0.000000000 0.499999970
0.000000000 0.749999940 0.749999940
0.000000000 0.249999985 0.249999985
0.499999970 0.499999970 0.000000000
0.374999970 0.374999970 0.374999970
0.624999940 0.624999940 0.624999940
0.374999970 0.874999940 0.874999940
0.624999940 0.124999993 0.124999993
0.874999940 0.874999940 0.374999970
0.124999993 0.124999993 0.624999940
0.874999940 0.374999970 0.874999940
0.124999993 0.624999940 0.124999993
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
root: results/spinel
run_name: run
seed: 123
dataset_seed: 456

# network
num_basis: 8
BesselBasis_trainable: true
PolynomialCutoff_p: 6
l_max: 1
r_max: 8.0
parity: o3_full
num_layers: 2
# num_features: 16

env_embed_multiplicity: 16
embed_initial_edge: true
two_body_latent_mlp_latent_dimensions: [32, 64]
two_body_latent_mlp_nonlinearity: silu
latent_mlp_latent_dimensions: [64, 64]
latent_mlp_nonlinearity: silu
latent_mlp_initialization: uniform
latent_resnet: true
env_embed_mlp_latent_dimensions: []
env_embed_mlp_nonlinearity: null
env_embed_mlp_initialization: uniform
edge_eng_mlp_latent_dimensions: [16]
edge_eng_mlp_nonlinearity: null
edge_eng_mlp_initialization: uniform

model_builders:
- allegro.model.Allegro
- PerSpeciesRescale
- RescaleEnergyEtc


dataset: ase
dataset_file_name: structure.xyz
chemical_symbols:
- Mg
- Al

# logging
wandb: false
# verbose: debug

# training
n_train: 80%
n_val: 20%
batch_size: 5
train_val_split: random
#shuffle: true
metrics_key: validation_loss
use_ema: true
ema_decay: 0.99
ema_use_num_updates: true
max_epochs: 100
learning_rate: 0.01
# loss function
loss_coeffs: total_energy
125 changes: 125 additions & 0 deletions tests/integration/active_learn_nequip/input.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
[sampling]
nreplicas = 2
nprocs_per_replica = 1
kTstart = 1200.0
kTend = 1500.0
nsteps = 320 # Number of steps for sampling
RXtrial_frequency = 16
sample_frequency = 4
print_frequency = 4
reload = false
seed = 12345

[sampling.solver]
type = 'nequip'
base_input_dir = './baseinput_allegro'
perturb = 0.0
# run_scheme = 'subprocess'
ignore_species = ["O"]
seed = 31415

[mlref]
nreplicas = 2
ndata = 20

[mlref.solver]
type = 'user'
function = 'mock.calc_energy'
perturb = 0.05

[train]
type = 'nequip'
base_input_dir = './allegro_train_input'
exe_command = [
'',
'nequip-train'
]
ignore_species = ["O"]
vac_map = []
restart = false

[config]
unitcell = [[8.1135997772, 0.0000000000, 0.0000000000],
[0.0000000000, 8.1135997772, 0.0000000000],
[0.0000000000, 0.0000000000, 8.1135997772]]
supercell = [1,1,1]

[[config.base_structure]]
type = "O"
coords = [
[0.237399980, 0.237399980, 0.237399980],
[0.762599945, 0.762599945, 0.762599945],
[0.512599945, 0.012600004, 0.737399936],
[0.487399966, 0.987399936, 0.262599975],
[0.012600004, 0.737399936, 0.512599945],
[0.987399936, 0.262599975, 0.487399966],
[0.737399936, 0.512599945, 0.012600004],
[0.262599975, 0.487399966, 0.987399936],
[0.987399936, 0.487399966, 0.262599975],
[0.012600004, 0.512599945, 0.737399936],
[0.487399966, 0.262599975, 0.987399936],
[0.512599945, 0.737399936, 0.012600004],
[0.262599975, 0.987399936, 0.487399966],
[0.737399936, 0.012600004, 0.512599945],
[0.237399980, 0.737399936, 0.737399936],
[0.762599945, 0.262599975, 0.262599975],
[0.512599945, 0.512599945, 0.237399980],
[0.487399966, 0.487399966, 0.762599945],
[0.012600004, 0.237399980, 0.012600004],
[0.987399936, 0.762599945, 0.987399936],
[0.987399936, 0.987399936, 0.762599945],
[0.012600004, 0.012600004, 0.237399980],
[0.487399966, 0.762599945, 0.487399966],
[0.512599945, 0.237399980, 0.512599945],
[0.737399936, 0.237399980, 0.737399936],
[0.262599975, 0.762599945, 0.262599975],
[0.237399980, 0.512599945, 0.512599945],
[0.762599945, 0.487399966, 0.487399966],
[0.762599945, 0.987399936, 0.987399936],
[0.237399980, 0.012600004, 0.012600004],
[0.737399936, 0.737399936, 0.237399980],
[0.262599975, 0.262599975, 0.762599945],
]

[[config.defect_structure]]
coords = [
[0.000000000, 0.000000000, 0.000000000],
[0.749999940, 0.249999985, 0.499999970],
[0.249999985, 0.749999940, 0.499999970],
[0.249999985, 0.499999970, 0.749999940],
[0.749999940, 0.499999970, 0.249999985],
[0.499999970, 0.749999940, 0.249999985],
[0.499999970, 0.249999985, 0.749999940],
[0.000000000, 0.499999970, 0.499999970],
[0.749999940, 0.749999940, 0.000000000],
[0.249999985, 0.249999985, 0.000000000],
[0.249999985, 0.000000000, 0.249999985],
[0.749999940, 0.000000000, 0.749999940],
[0.499999970, 0.000000000, 0.499999970],
[0.000000000, 0.749999940, 0.749999940],
[0.000000000, 0.249999985, 0.249999985],
[0.499999970, 0.499999970, 0.000000000],
[0.374999970, 0.374999970, 0.374999970],
[0.624999940, 0.624999940, 0.624999940],
[0.374999970, 0.874999940, 0.874999940],
[0.624999940, 0.124999993, 0.124999993],
[0.874999940, 0.874999940, 0.374999970],
[0.124999993, 0.124999993, 0.624999940],
[0.874999940, 0.374999970, 0.874999940],
[0.124999993, 0.624999940, 0.124999993],
]
[[config.defect_structure.groups]]
name = 'Al'
# species = ['Al'] # default
# coords = [[[0,0,0]]] # default
num = 16 #432 #16000
[[config.defect_structure.groups]]
name = 'Mg'
# species = ['Mg'] # default
# coords = [[[0,0,0]]] # default
num = 8 #216 #8000


[observer]
reference_structure = "MgAl2O4.vasp"
ignored_species = ["Al", "O"]
15 changes: 15 additions & 0 deletions tests/integration/active_learn_nequip/install_nequip.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/sh

# This script installs pytorch, nequip, and allegro
# into python3 environment

set -ue

echo "python3 points to the following:"
which python3

echo

python3 -m pip install torch
python3 -m pip install nequip
python3 -m pip install git+https://github.com/mir-group/allegro.git
Loading

0 comments on commit fa21d87

Please sign in to comment.