Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/source/topics/workflows/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ ph
pw/base
pw/relax
pw/bands
neb
q2r
pdos
```
8 changes: 8 additions & 0 deletions docs/source/topics/workflows/neb.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
(topics-workflows-neb)=

# `NebBaseWorkChain`

```{eval-rst}
.. aiida-workchain:: NebBaseWorkChain
:module: aiida_quantumespresso.workflows.neb.base
```
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ aiida-quantumespresso = 'aiida_quantumespresso.cli:cmd_root'
'quantumespresso.matdyn.base' = 'aiida_quantumespresso.workflows.matdyn.base:MatdynBaseWorkChain'
'quantumespresso.pdos' = 'aiida_quantumespresso.workflows.pdos:PdosWorkChain'
'quantumespresso.bands.base' = 'aiida_quantumespresso.workflows.bands.base:BandsBaseWorkChain'
'quantumespresso.neb.base' = 'aiida_quantumespresso.workflows.neb.base:NebBaseWorkChain'

[tool.flit.module]
name = 'aiida_quantumespresso'
Expand Down
171 changes: 129 additions & 42 deletions src/aiida_quantumespresso/calculations/neb.py

Large diffs are not rendered by default.

171 changes: 138 additions & 33 deletions src/aiida_quantumespresso/parsers/neb.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
# -*- coding: utf-8 -*-
import os
from sys import stdout

from aiida.common import AttributeDict, NotExistent
from aiida.engine import ExitCode
from aiida.orm import ArrayData, Dict, TrajectoryData
import numpy

from aiida_quantumespresso.calculations.neb import NebCalculation
from aiida_quantumespresso.calculations.pw import PwCalculation
from aiida_quantumespresso.parsers.parse_raw import convert_qe_to_aiida_structure
from aiida_quantumespresso.parsers.parse_raw.neb import parse_raw_output_neb
from aiida_quantumespresso.parsers.parse_raw.pw import parse_stdout as parse_pw_stdout
from aiida_quantumespresso.parsers.parse_raw.pw import reduce_symmetries
from aiida_quantumespresso.parsers.parse_xml.exceptions import XMLParseError, XMLUnsupportedFormatError
from aiida_quantumespresso.parsers.parse_xml.pw.parse import parse_xml as parse_pw_xml
from aiida_quantumespresso.parsers.pw import PwParser
from aiida_quantumespresso.utils.mapping import get_logging_container

Expand All @@ -27,7 +28,8 @@ class NebParser(BaseParser):
class_warning_map = {
'scf convergence NOT achieved on image': 'SCF did not converge for a given image',
'Maximum CPU time exceeded': 'Maximum CPU time exceeded',
'reached the maximum number of steps': 'Maximum number of iterations reached in the image optimization',
# !! 'step' and not 'steps' is needed in order to be found by regex
'reached the maximum number of step': 'Maximum number of iterations reached in the image optimization',
}

def parse(self, **kwargs):
Expand All @@ -41,6 +43,8 @@ def parse(self, **kwargs):

prefix = self.node.process_class._PREFIX

self.exit_code_xml = None

# Look for optional settings input node and potential 'parser_options' dictionary within it
# Note that we look for both NEB and PW parser options under "inputs.settings.parser_options";
# we don't even have a namespace "inputs.pw.settings".
Expand All @@ -60,50 +64,42 @@ def parse(self, **kwargs):
if base_exit_code:
return self.exit(base_exit_code, logs)

neb_out_dict, iteration_data = parse_raw_output_neb(stdout)
parsed_data.update(neb_out_dict)
try:
neb_out_dict, iteration_data = parse_raw_output_neb(stdout)
except:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_READ)

num_images = parsed_data['num_of_images']
if len(neb_out_dict['errors']) > 0:
return self.exit(self.exit_codes[neb_out_dict['errors'][0]])

parsed_data.update(neb_out_dict)

# If num_images is empty, it means that the calculation was interrupted before completing
# the first NEB minimization step, so we cannot retrieve any partial trajectory.
try:
num_images = parsed_data['num_of_images']
except:
return self.exit(self.exit_codes.ERROR_NEB_INTERRUPTED_WITHOUT_PARTIAL_TRAJECTORY)

# Now parse the information from the individual pw calculations for the different images
image_data = {}
positions = []
cells = []

for i in range(num_images):
# check if any of the known XML output file names are present, and parse the first that we find
# check if any of the known XML output file names are present, and parse it
relative_output_folder = os.path.join(f'{prefix}_{i + 1}', f'{prefix}.save')
retrieved_files = self.retrieved.base.repository.list_object_names(relative_output_folder)

for xml_filename in PwCalculation.xml_filenames:
if xml_filename in retrieved_files:
xml_file_path = os.path.join(relative_output_folder, xml_filename)
try:
with self.retrieved.base.repository.open(xml_file_path) as xml_file:
parsed_data_xml, logs_xml = parse_pw_xml(xml_file, None)
except IOError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_READ)
except XMLParseError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_PARSE)
except XMLUnsupportedFormatError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_FORMAT)
except Exception:
import traceback
traceback.print_exc()
return self.exit(self.exit_codes.ERROR_UNEXPECTED_PARSER_EXCEPTION)
# this image is dealt with, so break the inner loop and go to the next image
break
# otherwise, if none of the filenames we tried exists, exit with an error
else:
return self.exit(self.exit_codes.ERROR_MISSING_XML_FILE)
parsed_data_xml, logs_xml = self.parse_xml(relative_output_folder)

# look for pw output and parse it
pw_out_file = os.path.join(f'{prefix}_{i + 1}', 'PW.out')
try:
with self.retrieved.base.repository.open(pw_out_file, 'r') as f:
pw_out_text = f.read() # Note: read() and not readlines()
# Output file can contain the output of many scf iterations, analyse only the last one
pw_out_text = ' coordinates at iteration' + pw_out_text.split('coordinates at iteration')[-1]
except IOError:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_READ)
logs_stdout = self.exit_codes.ERROR_OUTPUT_STDOUT_READ

try:
parsed_data_stdout, logs_stdout = parse_pw_stdout(
Expand All @@ -112,9 +108,29 @@ def parse(self, **kwargs):
except Exception as exc:
return self.exit(self.exit_codes.ERROR_UNEXPECTED_PARSER_EXCEPTION.format(exception=exc))

logs_stdout['error'].remove('ERROR_OUTPUT_STDOUT_INCOMPLETE')

# Determine issues coming from electronic structure calculations
exit_code = self.validate_electronic(logs_stdout)
if exit_code:
return self.exit(exit_code)

exit_code = self.validate_premature_exit(logs_stdout)
if exit_code:
return self.exit(exit_code)

if logs_stdout.error and self.exit_code_xml:
return self.exit(self.exit_codes.ERROR_OUTPUT_FILES)

parsed_structure = parsed_data_stdout.pop('structure', {})
parsed_trajectory = parsed_data_xml.pop('trajectory', {})
parsed_parameters = parsed_data_xml

if len(parsed_structure) == 0:
# Before exiting with xml parse error, check if scheduler already reported an exit code.
if self.node.exit_status is not None:
return ExitCode(self.node.exit_status, self.node.exit_message)
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_PARSE)
PwParser.backwards_compatibility_parameters(parsed_parameters, parsed_data_stdout)

# Explicit information about k-points does not need to be queryable so we remove it from the parameters
Expand Down Expand Up @@ -193,7 +209,96 @@ def parse(self, **kwargs):
mep_arraydata.set_array('interpolated_mep', interp_mep)
self.out('output_mep', mep_arraydata)

if 'ERROR_OUTPUT_STDOUT_INCOMPLETE'in logs.error:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_INCOMPLETE, logs)
if logs.error:
# First check whether the scheduler already reported an exit code.
if self.node.exit_status is not None:
# The following scheduler errors should correspond to cases where we can simply restart the calculation
# and have a chance that the calculation will succeed as the error can be transient.
recoverable_scheduler_error = self.node.exit_status in [
NebCalculation.exit_codes.ERROR_SCHEDULER_OUT_OF_WALLTIME.status,
NebCalculation.exit_codes.ERROR_SCHEDULER_NODE_FAILURE.status,
]
if recoverable_scheduler_error:
return NebCalculation.exit_codes.ERROR_NEB_INTERRUPTED_PARTIAL_TRAJECTORY
elif 'Maximum number of iterations reached in the image optimization' in logs.warning:
return NebCalculation.exit_codes.ERROR_NEB_CYCLE_EXCEEDED_NSTEP
else:
# Calculation completed successfully shortly after exceeding walltime but before being terminated by the
# scheduler. In that case 'exit_status' can be reset.
self.node.set_exit_status(None)

return self.exit(logs=logs)

def parse_xml(self, relative_output_folder):
"""Parse the XML output file for the specific image.

:param relative_output_folder: relative path to the output folder of the image.
:return: tuple of two dictionaries, first with raw parsed data and second with log messages
"""
from aiida_quantumespresso.parsers.parse_xml.exceptions import XMLParseError, XMLUnsupportedFormatError
from aiida_quantumespresso.parsers.parse_xml.pw.parse import parse_xml as parse_pw_xml

logs = get_logging_container()
parsed_data = {}

try:
retrieved_files = self.retrieved.base.repository.list_object_names(relative_output_folder)
except:
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_MISSING
return parsed_data, logs

xml_filenames = [os.path.join(relative_output_folder, xml_file) for xml_file in PwCalculation.xml_filenames if xml_file in retrieved_files]
if not xml_filenames:
if not self.node.get_option('without_xml'):
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_MISSING
return parsed_data, logs

if len(xml_filenames) > 1:
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_MULTIPLE
return parsed_data, logs

try:
with self.retrieved.base.repository.open(xml_filenames[0]) as xml_file:
parsed_data, logs = parse_pw_xml(xml_file, None)
except IOError:
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_READ
except XMLParseError:
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_PARSE
except XMLUnsupportedFormatError:
self.exit_code_xml = self.exit_codes.ERROR_OUTPUT_XML_FORMAT
except Exception:
import traceback
logs.critical.append(traceback.format_exc())
self.exit_code_xml = self.exit_codes.ERROR_UNEXPECTED_PARSER_EXCEPTION

return parsed_data, logs

def validate_premature_exit(self, logs):
"""Analyze problems that will cause a pre-mature termination of the calculation, controlled or not."""

for error_label in [
'ERROR_OUT_OF_WALLTIME',
'ERROR_DEXX_IS_NEGATIVE',
'ERROR_COMPUTING_CHOLESKY',
'ERROR_DIAGONALIZATION_TOO_MANY_BANDS_NOT_CONVERGED',
'ERROR_S_MATRIX_NOT_POSITIVE_DEFINITE',
'ERROR_ZHEGVD_FAILED',
'ERROR_QR_FAILED',
'ERROR_EIGENVECTOR_CONVERGENCE',
'ERROR_BROYDEN_FACTORIZATION',
]:
if error_label in logs['error']:
return self.exit_codes.get(error_label)

def validate_electronic(self, logs):
"""Analyze problems that are specific to `electronic` scf calculations."""

if 'ERROR_ELECTRONIC_CONVERGENCE_NOT_REACHED' in logs['error']:
scf_must_converge = self.node.inputs.pw.parameters.base.attributes.get('ELECTRONS',
{}).get('scf_must_converge', True)
electron_maxstep = self.node.inputs.pw.parameters.base.attributes.get('ELECTRONS', {}).get('electron_maxstep', 1)

if electron_maxstep == 0 or not scf_must_converge:
return self.exit_codes.WARNING_ELECTRONIC_CONVERGENCE_NOT_REACHED
else:
return self.exit_codes.ERROR_ELECTRONIC_CONVERGENCE_NOT_REACHED
50 changes: 42 additions & 8 deletions src/aiida_quantumespresso/parsers/parse_raw/neb.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,26 @@
"""
from qe_tools import CONSTANTS


def detect_important_message(logs, line):
message_map = {
'error': {'invalid number of pools, out of range': 'ERROR_NPOOLS_TOO_HIGH',
'invalid number of images, out of range': 'ERROR_NIMAGE_HIGHER_THAN_NPROC',
'n. of images must be divisor of nproc': 'ERROR_NIMAGE_NOT_DIVISOR_OF_NPROC',
'nimage is larger than the available number of images': 'ERROR_NIMAGE_HIGHER_THAN_IMAGES'},
}

for marker, message in message_map['error'].items():
if hasattr(marker, 'search'):
if marker.match(line):
if message is None:
message = line
logs['errors'].append(message)
else:
if marker in line:
if message is None:
message = line
logs['errors'].append(message)

def parse_raw_output_neb(stdout):
"""Parses the output of a neb calculation Receives in input the paths to the output file.

Expand Down Expand Up @@ -54,12 +73,15 @@ def parse_neb_text_output(data):

parsed_data = {}
parsed_data['warnings'] = []
parsed_data['errors'] = []
iteration_data = defaultdict(list)

# set by default the calculation as not converged.
parsed_data['converged'] = [False, 0]

for count, line in enumerate(data.split('\n')):
detect_important_message(parsed_data, line)

if 'initial path length' in line:
initial_path_length = float(line.split('=')[1].split('bohr')[0])
parsed_data['initial_path_length'] = initial_path_length * CONSTANTS.bohr_to_ang
Expand Down Expand Up @@ -99,18 +121,24 @@ def parse_neb_text_output(data):
elif 'neb: convergence achieved in' in line:
parsed_data['converged'] = [True, int(line.split('iteration')[0].split()[-1])]

num_images = parsed_data['num_of_images']
num_images = parsed_data['num_of_images'] if 'num_of_images' in parsed_data else None

iteration_lines = data.split('-- iteration')[1:]
iteration_lines = [i.split('\n') for i in iteration_lines]

for iteration in iteration_lines:
for count, line in enumerate(iteration):
if 'activation energy (->)' in line:
activ_energy = float(line.split('=')[1].split('eV')[0])
try:
activ_energy = float(line.split('=')[1].split('eV')[0])
except Exception:
activ_energy = None
iteration_data['forward_activation_energy'].append(activ_energy)
elif 'activation energy (<-)' in line:
activ_energy = float(line.split('=')[1].split('eV')[0])
try:
activ_energy = float(line.split('=')[1].split('eV')[0])
except Exception:
activ_energy = None
iteration_data['backward_activation_energy'].append(activ_energy)
elif 'image energy (eV) error (eV/A) frozen' in line:
energies = []
Expand All @@ -130,10 +158,16 @@ def parse_neb_text_output(data):
elif 'climbing image' in line:
iteration_data['climbing_image_auto'].append([int(_) for _ in line.split('=')[1].split(',')])
elif 'path length' in line:
path_length = float(line.split('=')[1].split('bohr')[0])
iteration_data['path_length'].append(path_length * CONSTANTS.bohr_to_ang)
try:
path_length = float(line.split('=')[1].split('bohr')[0])
iteration_data['path_length'].append(path_length * CONSTANTS.bohr_to_ang)
except Exception:
iteration_data['path_length'].append(None)
elif 'inter-image distance' in line:
image_dist = float(line.split('=')[1].split('bohr')[0])
iteration_data['image_dist'].append(image_dist * CONSTANTS.bohr_to_ang)
try:
image_dist = float(line.split('=')[1].split('bohr')[0])
iteration_data['image_dist'].append(image_dist * CONSTANTS.bohr_to_ang)
except Exception:
iteration_data['image_dist'].append(None)

return parsed_data, dict(iteration_data)
Empty file.
Loading