Skip to content

Commit

Permalink
asimmodule bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
mkphuthi committed Dec 16, 2024
1 parent 0b52c62 commit 12a12e8
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 18 deletions.
27 changes: 23 additions & 4 deletions asimtools/asimmodules/benchmarking/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
Author: [email protected]
'''
from typing import Dict, List, TypeVar, Sequence
from typing import Dict, Optional
import numpy as np
import matplotlib.pyplot as plt
from ase.units import kg, m as meters
Expand All @@ -20,6 +20,8 @@ def distribution(
unit: str = 'eV',
bins: int = 50,
log: bool = True,
remap_keys: Optional[Dict] = None,
skip_failed: bool = False,
) -> Dict:
unit_factors = {'meV': 1000, 'eV': 1, 'kcal/mol': 23.0621}
unit_factor = unit_factors[unit]
Expand All @@ -38,13 +40,30 @@ def distribution(
images = get_images(**images)
results = {prop: [] for prop in unit_dict}
for i, atoms in enumerate(images):
include = True
results['natoms'].append(len(atoms))
results['energy'].append(atoms.get_potential_energy())
if remap_keys.get('energy', False):
energy = atoms.info[remap_keys['energy']]
else:
energy = atoms.get_potential_energy()
results['energy'].append(energy)
if remap_keys.get('forces', False):
forces = atoms.arrays[remap_keys['forces']]
else:
forces = atoms.get_forces()
results['forces'].extend(
list(np.array(atoms.get_forces()).flatten())
list(np.array(forces).flatten())
)
results['volume'].append(atoms.get_volume())
stress = atoms.get_stress(voigt=True)
if remap_keys.get('stress', False):
stress = atoms.arrays[remap_keys['stress']]
elif remap_keys.get('virial', False):
try:
stress = atoms.info[remap_keys['virial']] / atoms.get_volume()
except KeyError:
print('idx:', i, atoms.info, atoms.arrays)
else:
stress = atoms.get_stress(voigt=True)
results['stress'].extend(
list(np.array(stress)) * unit_factor
)
Expand Down
21 changes: 15 additions & 6 deletions asimtools/asimmodules/benchmarking/parity.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,15 +174,24 @@ def parity(

subsets = _split_data(data, nprocs)
reses = []
with Pool(nprocs) as pool:
reses = pool.map(partial(
calc_parity_data,
if nprocs > 1:
with Pool(nprocs) as pool:
reses = pool.map(partial(
calc_parity_data,
calc_id=calc_id,
properties=properties,
force_prob=force_prob,
),
subsets,
)
else:
reses = [calc_parity_data(
subset,
calc_id=calc_id,
properties=properties,
force_prob=force_prob,
),
subsets,
)
) for subset in subsets
]

res = {prop: {'ref': [], 'pred': []} for prop in properties}
results = {}
Expand Down
5 changes: 5 additions & 0 deletions asimtools/asimmodules/mace/train_mace.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@
import json
from numpy.random import randint
from mace.cli.run_train import main as mace_run_train_main
from mace.cli.create_lammps_model import main as create_lammps_model

def train_mace(
config: Union[Dict,str],
randomize_seed: bool = False,
compile_lammps: bool = False,
) -> Dict:
"""Runs MACE training
Expand All @@ -42,4 +44,7 @@ def train_mace(
logging.getLogger().handlers.clear()
sys.argv = ["program", "--config", config_file_path]
mace_run_train_main()

if compile_lammps:
create_lammps_model('mace_test_compiled.model')
return {}
2 changes: 1 addition & 1 deletion asimtools/asimmodules/phonopy/full_qha.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def full_qha(
"""

if phonopy_save_path is None:
phonopy_save_path = str((Path('..') / 'phonopy_save.yaml').resolve())
phonopy_save_path = str((Path('./phonopy_save.yaml').resolve()))
else:
phonopy_save_path = str(Path(phonopy_save_path).resolve())
ase_cubic_eos_args['image'] = image
Expand Down
14 changes: 9 additions & 5 deletions asimtools/asimmodules/workflows/update_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import os
import subprocess
import logging
import numpy as np
from asimtools.utils import read_yaml

def update_dependencies(
Expand All @@ -34,13 +35,17 @@ def update_dependencies(
:rtype: Dict
"""

if os.environ.get('SLURM_JOB_ID', None) is None:
logging.warning('Not running on a SLURM job, skipping update_dependencies')
return {}
prev_step_dir = Path(prev_step_dir)
next_step_dir = Path(next_step_dir)
prev_output = read_yaml(prev_step_dir / 'output.yaml')
next_output = read_yaml(next_step_dir / 'output.yaml')
next_job_ids = next_output.get('job_ids', [])
job_ids = prev_output.get('job_ids', None)
if job_ids is not None:
using_slurm = np.any([(job_id is not None) for job_id in job_ids])
if using_slurm:
next_step_dir = Path(next_step_dir)
next_output = read_yaml(next_step_dir / 'output.yaml')
next_job_ids = next_output.get('job_ids', [])
job_ids = [str(job_id) for job_id in job_ids]
start_cond = 'afterok'
if skip_failed:
Expand All @@ -65,6 +70,5 @@ def update_dependencies(
with open('sbatch_stderr.txt', 'w', encoding='utf-8') as f:
f.write(completed_process.stderr)
completed_process.check_returncode()
return {}

return {}
6 changes: 4 additions & 2 deletions asimtools/asimmodules/workflows/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,15 @@ def prepare_array_vals(
labels = [label_prefix + '-' + label for label in labels]

assert len(labels) == len(array_values), \
'Num. of array_values must match num. of labels'
f'Num. of array_values ({len(array_values)}) must match num.'\
f'of labels ({len(labels)})'

if secondary_array_values is not None:
nvals = len(secondary_array_values)
nkeys = len(secondary_key_sequences)
assert nvals == nkeys, \
f'{nvals} secondary values does not match {nkeys} secondary keys'
f'Num. of secondary values ({nvals}) does not match num. of '\
f'secondary keys ({nkeys})'
for l in secondary_array_values:
assert len(l) == len(labels), \
f"Secondary values ({len(l)}) not same length as array values"\
Expand Down

0 comments on commit 12a12e8

Please sign in to comment.