diff --git a/.gitignore b/.gitignore index 1c743cdb..7c02194c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Byte-compiled / optimized / DLL files __pycache__/ +.pytest_cache/ *.py[cod] *$py.class @@ -105,3 +106,10 @@ ENV/ # mypy .mypy_cache/ + +# mac stuff +.DS_Store +**/.DS_Store +.AppleDouble +.LSOverride + diff --git a/.travis.yml b/.travis.yml index 5dcf0c36..50569a4d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ notifications: services: - postgresql + - rabbitmq addons: postgresql: "9.5" @@ -20,18 +21,24 @@ before_install: # copied from pgtest's travis.yml - sudo apt-get install locate - sudo service postgresql stop - sudo apt-get remove postgresql - - sudo apt-get install postgresql + - sudo apt-get install postgresql-9.5 - sudo updatedb install: - pip install -U pip wheel setuptools - pip install -e git+https://github.com/aiidateam/aiida_core@develop#egg=aiida-core[testing] + - pip install -e git+https://github.com/JuDFTteam/masci-tools@master#egg=masci-tools - pip install -e . + - pip install codecov env: - TEST_TYPE="unittests" -script: cd ./aiida_kkr/tests/ && ./run_all.sh +script: + - cd ./aiida_kkr/tests/ && ./run_all.sh + +after_success: + - codecov git: depth: 3 diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 156d3a6e..00000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,41 +0,0 @@ -# Changelog - -## Conventions used for this changelog - - - keep it concise but human readable - - keep the *UNRELEASED* section up to date with the `develop` branch - - create a new subsection for each release version - - each version should have the following information: - - a release date in the format `YYYY-MM-DD` - - a list of added new feature - - a list of changed functionnality of existing features - - a list of deprecated features (features that will be deleted in a future release) - - a list of removed feature (previously marked deprecated) - - a list of bug fixes - ----- - -## *UNRELEASED* (last updated: 2018-11-19) - -**Here we collect the list of *added*, *changed*, *deprecated*, *removed* and *fixed* features in preparation for the next release.** - -Start of large KKR repository holding *voronoi*, *KKRhost*, *KKRimp*, *KKRsusc*, and *PKKprime* with major refactoring of code structure. - - -### Added -- None - -### Changed -- kkr calculation retrieves Jij files - -### Deprecated -- KKRimporter calculation now also retrieves Jij files - -### Removed -- None - -### Fixed -- None - ----- - diff --git a/README.md b/README.md index 89d53942..698f7386 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,29 @@ +[![Documentation Status](https://readthedocs.org/projects/aiida-kkr/badge/?version=latest)](https://aiida-kkr.readthedocs.io/en/latest/?badge=latest) +[![Build Status](https://travis-ci.org/JuDFTteam/aiida-kkr.svg?branch=master)](https://travis-ci.org/JuDFTteam/aiida-kkr) +[![codecov](https://codecov.io/gh/JuDFTteam/aiida-kkr/branch/master/graph/badge.svg)](https://codecov.io/gh/JuDFTteam/aiida-kkr) +[![MIT license](http://img.shields.io/badge/license-MIT-brightgreen.svg)](http://opensource.org/licenses/MIT) +[![GitHub version](https://badge.fury.io/gh/JuDFTteam%2Faiida-kkr.svg)](https://badge.fury.io/gh/JuDFTteam%2Faiida-kkr) +[![PyPI version](https://badge.fury.io/py/aiida-kkr.svg)](https://badge.fury.io/py/aiida-kkr) + + # aiida-kkr AiiDA plugin for the KKR codes plus workflows and utility. +## Features + +* KKR calculations for bulk and interfaces +* treatment of alloys using VCA or CPA +* self-consistency, DOS and bandstructure calculations +* extraction of magnetic exchange coupling parameters (*J_ij*, *D_ij*) +* impurity embedding solving the Dyson equation +* ~~import old calculations using the calculation importer~~ (only working with aiida-core<1.0, i.e. in aiida-kkr v0.1.2) + # Installation ```shell -$ git clone https://github.com/broeder-j/aiida-kkr +$ git clone https://github.com/JuDFTteam/aiida-kkr $ cd aiida-kkr $ pip install -e . # also installs aiida, if missing (but not postgres) diff --git a/aiida_kkr/__init__.py b/aiida_kkr/__init__.py index f7a08ccb..2c8235d5 100644 --- a/aiida_kkr/__init__.py +++ b/aiida_kkr/__init__.py @@ -3,4 +3,4 @@ """ -__version__ = "0.1.0" +__version__ = "1.0.0" diff --git a/aiida_kkr/calculations/kkr.py b/aiida_kkr/calculations/kkr.py index 51715935..d9954b03 100644 --- a/aiida_kkr/calculations/kkr.py +++ b/aiida_kkr/calculations/kkr.py @@ -16,9 +16,9 @@ from aiida_kkr.tools.common_workfunctions import (generate_inputcard_from_structure, check_2Dinput_consistency, update_params_wf, vca_check) -from aiida_kkr.tools.common_functions import get_alat_from_bravais, get_Ang2aBohr +from masci_tools.io.common_functions import get_alat_from_bravais, get_Ang2aBohr from aiida_kkr.tools.tools_kkrimp import make_scoef -from aiida_kkr.tools.kkr_params import __kkr_default_params__ +from masci_tools.io.kkr_params import __kkr_default_params__ #define aiida structures from DataFactory of aiida RemoteData = DataFactory('remote') diff --git a/aiida_kkr/calculations/kkrimp.py b/aiida_kkr/calculations/kkrimp.py index 83744d03..5c7694d0 100644 --- a/aiida_kkr/calculations/kkrimp.py +++ b/aiida_kkr/calculations/kkrimp.py @@ -8,11 +8,11 @@ from aiida.common.exceptions import (InputValidationError, ValidationError, UniquenessError) from aiida.common.datastructures import (CalcInfo, CodeInfo) from aiida.orm import DataFactory -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.tools.tools_kkrimp import modify_potential from aiida_kkr.tools.tools_kkrimp import make_scoef -from aiida_kkr.tools.common_functions import search_string +from masci_tools.io.common_functions import search_string from aiida_kkr.calculations.voro import VoronoiCalculation import os from numpy import array, sqrt, sum, where @@ -109,28 +109,38 @@ def _use_methods(cls): 'additional_parameter': None, 'linkname': 'parameters', 'docstring': - ("Use a node that specifies the input parameters (calculation settings)") + ("Use a node that specifies the input parameters (calculation settings).") }, "host_Greenfunction_folder": { 'valid_types': RemoteData, 'additional_parameter': None, 'linkname': 'GFhost_folder', 'docstring': - ("Use a node that specifies the host KKR calculation contaning the host Green function and tmatrix (KkrCalculation with impurity_info input)") + ("Use a node that specifies the host KKR calculation contaning " + "the host Green function and tmatrix (KkrCalculation with " + "impurity_info input).") }, "impurity_potential": { 'valid_types': SinglefileData, 'additional_parameter': None, 'linkname': 'potential', 'docstring': - ("Use a node contains the input potential") + ("Use a node contains the input potential.") }, "parent_calc_folder": { 'valid_types': RemoteData, 'additional_parameter': None, 'linkname': 'parent_calc_folder', 'docstring': - ("Use a node that specifies a parent KKRimp calculation") + ("Use a node that specifies a parent KKRimp calculation.") + }, + "impurity_info": { + 'valid_types': ParameterData, + 'additional_parameter': None, + 'linkname': 'impurity_info', + 'docstring': + ("Use a Parameter node that specifies properties " + "for a impurity calculation.") } }) return use_dict @@ -210,6 +220,28 @@ def _prepare_for_submission(self, tempfolder, inputdict): retrieve_list.append((self._OUT_LDOS_INTERPOL_BASE%(iatom, ispin)).replace(' ', '0')) retrieve_list.append((self._OUT_LMDOS_BASE%(iatom, ispin)).replace(' ', '0')) retrieve_list.append((self._OUT_LMDOS_INTERPOL_BASE%(iatom, ispin)).replace(' ', '0')) + + file = open(tempfolder.get_abs_path(self._CONFIG)) + config = file.readlines() + file.close() + itmp = search_string('NSPIN', config) + if itmp>=0: + nspin = int(config[itmp].split()[-1]) + else: + raise ValueError("Could not extract NSPIN value from config.cfg") + if 'tmatnew' in allopts and nspin>1: + retrieve_list.append(self._OUT_MAGNETICMOMENTS) + file = open(tempfolder.get_abs_path(self._CONFIG)) + outorb = file.readlines() + file.close() + itmp = search_string('CALCORBITALMOMENT', outorb) + if itmp>=0: + calcorb = int(outorb[itmp].split()[-1]) + else: + calcorb = 0 + if calcorb==1: + retrieve_list.append(self._OUT_ORBITALMOMENTS) + # Prepare CalcInfo to be returned to aiida (e.g. retreive_list etc.) calcinfo = CalcInfo() @@ -267,10 +299,51 @@ def _get_and_verify_hostfiles(self, inputdict): parent_calc = parent_calcs[0] # extract impurity_info - imp_info = parent_calc.get_inputs_dict().get('impurity_info', None) - if imp_info is None: - raise InputValidationError("host_Greenfunction calculation does not have an input node impurity_info") - + try: + imp_info_inputnode = inputdict.pop(self.get_linkname('impurity_info')) + if not isinstance(imp_info_inputnode, ParameterData): + raise InputValidationError("impurity_info not of type ParameterData") + imp_info = parent_calc.get_inputs_dict().get('impurity_info', None) + if imp_info is None: + raise InputValidationError("host_Greenfunction calculation does not have an input node impurity_info") + found_impurity_inputnode = True + found_impurity_parent = True + except KeyError: + imp_info = parent_calc.get_inputs_dict().get('impurity_info', None) + if imp_info is None: + raise InputValidationError("host_Greenfunction calculation does not have an input node impurity_info") + found_impurity_inputnode = False + # if impurity input is seperate input, check if it is the same as + # the one from the parent calc (except for 'Zimp'). If that's not the + # case, raise an error + if found_impurity_inputnode and found_impurity_parent: + if (imp_info_inputnode.get_attr('ilayer_center') == imp_info.get_attr('ilayer_center') + and imp_info_inputnode.get_attr('Rcut') == imp_info.get_attr('Rcut')): + check_consistency_imp_info = True + try: + if (imp_info_inputnode.get_attr('hcut') == imp_info.get_attr('hcut') + and imp_info_inputnode.get_attr('cylinder_orient') == imp_info.get_attr('cylinder_orient') + and imp_info_inputnode.get_attr('Rimp_rel') == imp_info.get_attr('Rimp_rel') + and imp_info_inputnode.get_attr('imp_cls') == imp_info.get_attr('imp_cls')): + print('impurity_info node from input and from previous GF calculation are compatible') + check_consistency_imp_info = True + else: + print('impurity_info node from input and from previous GF calculation are NOT compatible!. ' + 'Please check your impurity_info nodes for consistency.') + check_consistency_imp_info = False + except AttributeError: + print("Non default values of the impurity_info node from input and from previous " + "GF calculation are compatible. Default values haven't been checked") + check_consistency_imp_info = True + else: + print('impurity_info node from input and from previous GF calculation are NOT compatible!. ' + 'Please check your impurity_info nodes for consistency.') + check_consistency_imp_info = False + if check_consistency_imp_info: + imp_info = imp_info_inputnode + else: + raise InputValidationError("impurity_info nodes (input and GF calc) are not compatible") + # check if host parent was KKRFLEX calculation hostfolderpath = parent_calc.out.retrieved.folder.abspath hostfolderpath = os.path.join(hostfolderpath, 'path') diff --git a/aiida_kkr/calculations/kkrimporter.py b/aiida_kkr/calculations/kkrimporter.py index 04716e15..7c22e1d4 100644 --- a/aiida_kkr/calculations/kkrimporter.py +++ b/aiida_kkr/calculations/kkrimporter.py @@ -12,7 +12,7 @@ from aiida.common.datastructures import calc_states from aiida.common.links import LinkType from aiida_kkr.calculations.kkr import KkrCalculation -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams from aiida_kkr.tools.common_workfunctions import structure_from_params @@ -474,4 +474,4 @@ def _OUTPUT_FILE_NAMES(self): @_OUTPUT_FILE_NAMES.setter def _OUTPUT_FILE_NAMES(self, value): self._set_attr('output_file_names', value) - \ No newline at end of file + diff --git a/aiida_kkr/parsers/kkr.py b/aiida_kkr/parsers/kkr.py index 5823b458..eb682d2e 100644 --- a/aiida_kkr/parsers/kkr.py +++ b/aiida_kkr/parsers/kkr.py @@ -9,8 +9,8 @@ from aiida.orm.data.parameter import ParameterData from aiida_kkr.calculations.kkr import KkrCalculation from aiida.common.exceptions import InputValidationError -from aiida_kkr.tools.kkrparser_functions import parse_kkr_outputfile, check_error_category -from aiida_kkr.tools.common_functions import search_string +from masci_tools.io.parsers.kkrparser_functions import parse_kkr_outputfile, check_error_category +from masci_tools.io.common_functions import search_string __copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " "IAS-1/PGI-1, Germany. All rights reserved.") diff --git a/aiida_kkr/parsers/kkrimp.py b/aiida_kkr/parsers/kkrimp.py index 22333049..c4e82416 100644 --- a/aiida_kkr/parsers/kkrimp.py +++ b/aiida_kkr/parsers/kkrimp.py @@ -9,7 +9,7 @@ from aiida.orm.data.parameter import ParameterData from aiida_kkr.calculations.kkrimp import KkrimpCalculation from aiida.common.exceptions import InputValidationError -from aiida_kkr.tools.kkrparser_functions import check_error_category +from masci_tools.io.parsers.kkrparser_functions import check_error_category from aiida_kkr.tools.tools_kkrimp import kkrimp_parser_functions @@ -129,7 +129,7 @@ def parse_with_retrieved(self, retrieved): filepath = out_folder.get_abs_path(fname) files['kkrflex_llyfac'] = filepath except OSError: - file_errors.append((2, "Critical error! file '{}' not found ".format(fname))) + file_errors.append((2, "Warning! file '{}' not found ".format(fname))) files['kkrflex_llyfac'] = None try: fname = self._calc._KKRFLEX_ANGLE diff --git a/aiida_kkr/parsers/voro.py b/aiida_kkr/parsers/voro.py index ea9d3402..0c3ced8b 100644 --- a/aiida_kkr/parsers/voro.py +++ b/aiida_kkr/parsers/voro.py @@ -4,7 +4,7 @@ from aiida.orm.data.parameter import ParameterData from aiida_kkr.calculations.voro import VoronoiCalculation from aiida.common.exceptions import InputValidationError -from aiida_kkr.tools.voroparser_functions import parse_voronoi_output +from masci_tools.io.parsers.voroparser_functions import parse_voronoi_output @@ -125,4 +125,4 @@ def parse_with_retrieved(self, retrieved): return success, node_list - \ No newline at end of file + diff --git a/aiida_kkr/tests/codecov.yml b/aiida_kkr/tests/codecov.yml new file mode 100644 index 00000000..03fab7c8 --- /dev/null +++ b/aiida_kkr/tests/codecov.yml @@ -0,0 +1,27 @@ +codecov: + notify: + require_ci_to_pass: yes + +coverage: + precision: 2 + round: down + range: "0...100" + + status: + project: yes + patch: yes + changes: no + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: + layout: "header, diff" + behavior: default + require_changes: no + diff --git a/aiida_kkr/tests/config.yml b/aiida_kkr/tests/config.yml index c877f793..721330ca 100644 --- a/aiida_kkr/tests/config.yml +++ b/aiida_kkr/tests/config.yml @@ -8,8 +8,13 @@ computers: queue_name: test_queue_name codes: - tinpgen: - description: test inpgen code object - default_plugin: fleur.inpgen + voronoi: + description: test voronoi calculation + default_plugin: kkr.voro remote_computer: localhost - remote_abspath: /Users/broeder/codes/fleur_git_v27/fleur/build/inpgen + remote_abspath: /Users/ruess/sourcecodes/voronoi/voronoi.exe + kkr: + description: test KKRhost calculation + default_plugin: kkr.kkr + remote_computer: localhost + remote_abspath: /Users/ruess/sourcecodes/KKRcode/kkr.x diff --git a/aiida_kkr/tests/conftest.py b/aiida_kkr/tests/conftest.py index c8a18fbc..ca194d22 100644 --- a/aiida_kkr/tests/conftest.py +++ b/aiida_kkr/tests/conftest.py @@ -1,3 +1,7 @@ +""" +Here we define the fixtures for the tests +""" + import pytest from aiida.utils.fixtures import fixture_manager @@ -11,9 +15,16 @@ def fresh_aiida_env(aiida_env): yield aiida_env.reset_db() -''' -#usage -def test_my_stuff(fresh_aiida_env): - # run a test - print('test_my_stuf works') -''' + +# for computers and codes +@pytest.fixture(scope='session') +def computers_and_codes(aiida_env): + pass + +# for previous data +@pytest.fixture(scope='session') +def import_data(aiida_env): + from aiida.orm.importexport import import_data + for db_export_file in ['db_dump_kkrcalc.tar.gz', 'db_dump_kkrflex_create.tar.gz', 'db_dump_vorocalc.tar.gz']: + import_data('files/'+db_export_file) + diff --git a/aiida_kkr/tests/files/db_dump_kkrcalc.tar.gz b/aiida_kkr/tests/files/db_dump_kkrcalc.tar.gz new file mode 100644 index 00000000..d1842905 Binary files /dev/null and b/aiida_kkr/tests/files/db_dump_kkrcalc.tar.gz differ diff --git a/aiida_kkr/tests/files/db_dump_kkrflex_create.tar.gz b/aiida_kkr/tests/files/db_dump_kkrflex_create.tar.gz new file mode 100644 index 00000000..cf55bf64 Binary files /dev/null and b/aiida_kkr/tests/files/db_dump_kkrflex_create.tar.gz differ diff --git a/aiida_kkr/tests/files/db_dump_kkrimp_out.tar.gz b/aiida_kkr/tests/files/db_dump_kkrimp_out.tar.gz new file mode 100644 index 00000000..88c4d02d Binary files /dev/null and b/aiida_kkr/tests/files/db_dump_kkrimp_out.tar.gz differ diff --git a/aiida_kkr/tests/files/db_dump_vorocalc.tar.gz b/aiida_kkr/tests/files/db_dump_vorocalc.tar.gz new file mode 100644 index 00000000..e790f874 Binary files /dev/null and b/aiida_kkr/tests/files/db_dump_vorocalc.tar.gz differ diff --git a/aiida_kkr/tests/files/kkr/import_calc_old_style/test.py b/aiida_kkr/tests/files/kkr/import_calc_old_style/test.py index 7fece408..4637d629 100755 --- a/aiida_kkr/tests/files/kkr/import_calc_old_style/test.py +++ b/aiida_kkr/tests/files/kkr/import_calc_old_style/test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams p = kkrparams(params_type='kkr') p.read_keywords_from_inputcard() diff --git a/aiida_kkr/tests/run_all.sh b/aiida_kkr/tests/run_all.sh index 75ffd327..cbd64eb8 100755 --- a/aiida_kkr/tests/run_all.sh +++ b/aiida_kkr/tests/run_all.sh @@ -3,4 +3,5 @@ export AIIDA_PATH='.'; mkdir -p '.aiida'; #pytest -sv #pytest -v -pytest --cov-report=term-missing --cov=aiida_kkr --ignore=test_scf_wc_simple.py +#pytest --cov-report=term-missing --cov=aiida_kkr --ignore=test_scf_wc_simple.py +pytest --cov-report=term-missing --cov=aiida_kkr --ignore=test_scf_wc_simple.py --ignore=test_vorostart_wc.py --ignore=test_dos_wc.py --ignore=test_gf_writeout_wc.py --ignore=test_kkrimp_sub_wc.py --ignore=test_kkrimp_full_wc.py diff --git a/aiida_kkr/tests/run_all_covhtml.sh b/aiida_kkr/tests/run_all_covhtml.sh index ae760897..6fcde110 100755 --- a/aiida_kkr/tests/run_all_covhtml.sh +++ b/aiida_kkr/tests/run_all_covhtml.sh @@ -39,4 +39,10 @@ #verdi code setup < "code_setup_voronoi.txt" #pytest -v --cov-report=html --cov=aiida_kkr --ignore=test_entrypoints.py --ignore=test_scf_wc_simple.py --ignore=test_common_workfunctions.py -pytest -v --cov-report=html --cov=aiida_kkr --ignore=test_scf_wc_simple.py +#pytest -v --cov-report=html --cov=aiida_kkr --ignore=test_scf_wc_simple.py +#pytest -s --cov-report=html --cov=aiida_kkr --ignore=test_scf_wc_simple.py +#pytest -s --cov-report=html --cov=aiida_kkr -k Test_vorostart_workflow +#pytest -s --cov-report=html --cov=aiida_kkr -k Test_kkrimp_full_workflow +#pytest -s --cov-report=html --cov=aiida_kkr -k Test_scf_workflow + +pytest --cov-report=html --cov=aiida_kkr diff --git a/aiida_kkr/tests/test_common_functions.py b/aiida_kkr/tests/test_common_functions.py deleted file mode 100644 index 44f8ebfb..00000000 --- a/aiida_kkr/tests/test_common_functions.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -""" -@author: ruess -""" - -import pytest -from aiida_kkr.tools.common_functions import (interpolate_dos, get_alat_from_bravais, - search_string, angles_to_vec, - vec_to_angles, get_version_info, - get_corestates_from_potential, - get_highest_core_state, - get_ef_from_potfile) - - -class Test_common_functions(): - """ - Tests for the common functions from tools.common_functions - """ - - def test_interpolate_dos(self): - from numpy import load, loadtxt, shape - d0 = '../tests/files/interpol/' - ef, dos, dos_int = interpolate_dos(d0, return_original=True) - assert ef == 0.5256 - dos_ref = loadtxt(d0+'new3.dos') - assert (dos_int.reshape(shape(dos_ref))-dos_ref).max()<10**-4 - assert (dos == load(d0+'/ref_dos.npy')).all() - - def test_get_alat_from_bravais(self): - from numpy import array, sqrt - bravais = array([[0.0, 0.5, 0.5], [0.5, 0.0, 0.5], [0.5, 0.5, 0.0]]) - alat = get_alat_from_bravais(bravais) - assert abs(alat - sqrt(2)/2) < 10**-10 - - def test_search_string(self): - txt = open('files/kkr/kkr_run_dos_output/output.0.txt', 'r').readlines() - alatline = search_string('ALAT', txt) - noline = search_string('ALT', txt) - assert alatline == 23 - assert noline == -1 - - def test_angles_to_vec(self): - from numpy import pi, sqrt, array, sum - vec = angles_to_vec(2., 45./180.*pi, 45./180.*pi) - assert abs(vec[0] - 1.) < 10**-10 - assert abs(vec[1] - 1.) < 10**-10 - assert abs(vec[2] - sqrt(2)) < 10**-10 - vec = angles_to_vec(array([2., 3.]), array([45./180.*pi, pi]), array([45./180.*pi, pi/2])) - assert sum(abs(vec - array([[1., 1., sqrt(2)], [0, 0, -3]]))) < 10**-10 - - def test_vec_to_angles(self): - from numpy import array, sqrt, sum, pi - m, t, p = vec_to_angles(array([[0, 0, 1], [1, 1, sqrt(2)]])) - assert sum(abs(m - array([1, 2]))) < 10**-10 - assert sum(abs(t - array([0, pi/4.]))) < 10**-10 - assert sum(abs(p - array([0, pi/4.]))) < 10**-10 - m, t, p = vec_to_angles([1, 1, sqrt(2)]) - assert (m, t, p) == (2, pi/4., pi/4.) - - def test_get_version_info(self): - version = get_version_info('files/kkr/kkr_run_dos_output/output.0.txt') - assert version == ('v2.2-22-g4f8f5ff', 'openmp-mac', 'kkrjm_v2.2-22-g4f8f5ff_openmp-mac_20171214102522') - - def test_get_corestates_from_potential(self): - from numpy import sum, array - corestates = get_corestates_from_potential('files/kkr/kkr_run_dos_output/out_potential') - ref = ([8, 8, 8, 8], - [array([-1866.96096949, -275.8348967 , -50.32089052, -6.5316706 , -248.12312965, -41.13200278, -3.832432 , -26.5129925 ]), - array([-1866.96096949, -275.8348967 , -50.32089052, -6.5316706 , -248.12312965, -41.13200278, -3.832432 , -26.5129925 ]), - array([-1866.96096949, -275.8348967 , -50.32089052, -6.5316706 , -248.12312965, -41.13200278, -3.832432 , -26.5129925 ]), - array([-1866.96096949, -275.8348967 , -50.32089052, -6.5316706 , -248.12312965, -41.13200278, -3.832432 , -26.5129925 ])], - [array([0, 0, 0, 0, 1, 1, 1, 2]), - array([0, 0, 0, 0, 1, 1, 1, 2]), - array([0, 0, 0, 0, 1, 1, 1, 2]), - array([0, 0, 0, 0, 1, 1, 1, 2])]) - assert corestates[0] == ref[0] - assert sum(abs(array(corestates[1]) - array(ref[1]))) < 10**-7 - assert sum(abs(array(corestates[2]) - array(ref[2]))) < 10**-7 - - def test_get_highest_core_state(self): - from numpy import array - ncore = 8 - ener = array([-1866.96096949, -275.8348967 , -50.32089052, -6.5316706 , -248.12312965, -41.13200278, -3.832432 , -26.5129925 ]) - lval = array([0, 0, 0, 0, 1, 1, 1, 2]) - out = get_highest_core_state(ncore, ener, lval) - assert out == (1, -3.832432, '4p') - - def test_get_ef_from_potfile(self): - ef = get_ef_from_potfile('files/kkr/kkr_run_dos_output/out_potential') - assert ef == 1.05 - - \ No newline at end of file diff --git a/aiida_kkr/tests/test_common_workfunctions.py b/aiida_kkr/tests/test_common_workfunctions.py index f1424051..0a47ecd4 100644 --- a/aiida_kkr/tests/test_common_workfunctions.py +++ b/aiida_kkr/tests/test_common_workfunctions.py @@ -67,7 +67,7 @@ def test_check_2Dinput_consistency(self): def test_update_params_wf(self): from aiida_kkr.tools.common_workfunctions import update_params_wf - from aiida_kkr.tools.kkr_params import kkrparams + from masci_tools.io.kkr_params import kkrparams from aiida.orm import DataFactory ParameterData = DataFactory('parameter') @@ -162,4 +162,4 @@ def test_get_parent_paranode(self): #t6 = t.test_get_inputs_voronoi() #t7 = t.test_get_parent_paranode() #""" - \ No newline at end of file + diff --git a/aiida_kkr/tests/test_dos_wc.py b/aiida_kkr/tests/test_dos_wc.py new file mode 100755 index 00000000..8651c04b --- /dev/null +++ b/aiida_kkr/tests/test_dos_wc.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +kkr_codename = 'KKRhost' +computername = 'localhost' +queuename = '' + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_dos_workflow(): + """ + Tests for the kkr_startpot workflow + """ + + def test_dos_wc_Cu(self): + """ + simple Cu noSOC, FP, lmax2 full example using scf workflow + """ + from aiida.orm import Code, load_node, DataFactory + from aiida.orm.computers import Computer + from aiida.orm.querybuilder import QueryBuilder + from masci_tools.io.kkr_params import kkrparams + from aiida_kkr.workflows.dos import kkr_dos_wc + from numpy import array + + ParameterData = DataFactory('parameter') + StructureData = DataFactory('structure') + + # create or read computer and code + # first check if computer exists already in database + qb = QueryBuilder() + qb.append(Computer, tag='computer') + all_computers = qb.get_results_dict() + computer_found_in_db = False + if len(all_computers)>0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + # if it is not there create a new one + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(kkr_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkr_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkr.x')) + code.set_input_plugin_name('kkr.kkr') + code.store() + + # Then set up the structure + alat = 6.83 # in a_Bohr + abohr = 0.52917721067 # conversion factor to Angstroem units + bravais = array([[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]])# bravais vectors + a = 0.5*alat*abohr + Cu = StructureData(cell=[[a, a, 0.0], [a, 0.0, a], [0.0, a, a]]) + Cu.append_atom(position=[0.0, 0.0, 0.0], symbols='Cu') + + Cu.store() + print(Cu) + + # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion. + wfd = kkr_dos_wc.get_wf_defaults() + wfd['dos_params']['kmesh'] = [10, 10, 10] + wfd['dos_params']['nepts'] = 10 + wfd['queue_name'] = queuename + wfd['use_mpi'] = True + + params_dos = ParameterData(dict=wfd) + + # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations + KKRCode = Code.get_from_string(kkr_codename+'@'+computername) + + label = 'dos Cu bulk' + descr = 'DOS workflow for Cu bulk' + + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrcalc.tar.gz') + kkr_calc_remote = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566').out.remote_folder + + # create process builder to set parameters + builder = kkr_dos_wc.get_builder() + builder.description = descr + builder.label = label + builder.kkr = KKRCode + builder.wf_parameters = params_dos + builder.remote_data = kkr_calc_remote + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) + + # check outcome + n = out['results_wf'] + n = n.get_dict() + assert n.get('successful') + assert n.get('list_of_errors') == [] + assert n.get('dos_params').get('nepts') == 10 + + d = out['dos_data'] + x = d.get_x() + y = d.get_y() + + assert sum(abs(x[1][0] - array([-19.24321191, -16.2197246 , -13.1962373 , -10.17274986, -7.14926255, -4.12577525, -1.10228794, 1.9211995 , 4.94468681, 7.96817411]))) < 10**-7 + assert sum(abs(y[0][1][0] - array([ 9.86819781e-04, 1.40981029e-03, 2.27894713e-03, 4.79231363e-03, 3.59368494e-02, 2.32929524e+00, 3.06973485e-01, 4.17629157e-01, 3.04021941e-01, 1.24897739e-01]))) < 10**-8 + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_dos_workflow() + Test.test_dos_wc_Cu() diff --git a/aiida_kkr/tests/test_entrypoints.py b/aiida_kkr/tests/test_entrypoints.py index 44485e18..02249a8b 100644 --- a/aiida_kkr/tests/test_entrypoints.py +++ b/aiida_kkr/tests/test_entrypoints.py @@ -3,7 +3,7 @@ import pytest @pytest.mark.usefixtures("aiida_env") -class TestAiida_kkr_entrypoints: +class TestAiida_kkr_entrypoints(): """ tests all the entry points of the Kkr plugin. Therefore if the plugin is reconized by AiiDA and installed right. @@ -11,21 +11,21 @@ class TestAiida_kkr_entrypoints: # Calculation - def test_kkrcalculation_entry_point(aiida_env): + def test_kkrcalculation_entry_point(self): from aiida.orm import CalculationFactory from aiida_kkr.calculations.kkr import KkrCalculation kkr_calculation = CalculationFactory('kkr.kkr') assert kkr_calculation == KkrCalculation - def test_kkrimportercalculation_entry_point(aiida_env): + def test_kkrimportercalculation_entry_point(self): from aiida.orm import CalculationFactory from aiida_kkr.calculations.kkrimporter import KkrImporterCalculation kkrimporter_calculation = CalculationFactory('kkr.kkrimporter') assert kkrimporter_calculation == KkrImporterCalculation - def test_kkrimpcalculation_entry_point(aiida_env): + def test_kkrimpcalculation_entry_point(self): from aiida.orm import CalculationFactory from aiida_kkr.calculations.kkrimp import KkrimpCalculation @@ -33,7 +33,7 @@ def test_kkrimpcalculation_entry_point(aiida_env): assert kkrimp_calculation == KkrimpCalculation - def test_voronoicalculation_entry_point(aiida_env): + def test_voronoicalculation_entry_point(self): from aiida.orm import CalculationFactory from aiida_kkr.calculations.voro import VoronoiCalculation @@ -43,7 +43,7 @@ def test_voronoicalculation_entry_point(aiida_env): # Data - def test_kkrstructuredata_entry_point(aiida_env): + def test_kkrstructuredata_entry_point(self): from aiida.orm import DataFactory, Data from aiida_kkr.data.kkrstructure import KkrstructureData @@ -56,14 +56,14 @@ def test_kkrstructuredata_entry_point(aiida_env): # Parsers - def test_kkr_parser_entry_point(aiida_env): + def test_kkr_parser_entry_point(self): from aiida.parsers import ParserFactory from aiida_kkr.parsers.kkr import KkrParser parser = ParserFactory('kkr.kkrparser') assert parser == KkrParser - def test_kkrimporter_parser_entry_point(aiida_env): + def test_kkrimporter_parser_entry_point(self): from aiida.parsers import ParserFactory from aiida_kkr.parsers.kkrimporter import KkrImporterParser @@ -71,7 +71,7 @@ def test_kkrimporter_parser_entry_point(aiida_env): assert parser == KkrImporterParser - def test_voronoi_parser_entry_point(aiida_env): + def test_voronoi_parser_entry_point(self): from aiida.parsers import ParserFactory from aiida_kkr.parsers.voro import VoronoiParser @@ -79,9 +79,17 @@ def test_voronoi_parser_entry_point(aiida_env): assert parser == VoronoiParser + def test_kkrimp_parser_entry_point(self): + from aiida.parsers import ParserFactory + from aiida_kkr.parsers.kkrimp import KkrimpParser + + parser = ParserFactory('kkr.kkrimpparser') + assert parser == KkrimpParser + + # Workchains - def test_scf_workchain_entry_point(aiida_env): + def test_scf_workchain_entry_point(self): from aiida_kkr.workflows.kkr_scf import kkr_scf_wc from aiida.orm import WorkflowFactory @@ -89,7 +97,7 @@ def test_scf_workchain_entry_point(aiida_env): assert wf == kkr_scf_wc - def test_dos_workchain_entry_point(aiida_env): + def test_dos_workchain_entry_point(self): from aiida_kkr.workflows.dos import kkr_dos_wc from aiida.orm import WorkflowFactory @@ -97,7 +105,7 @@ def test_dos_workchain_entry_point(aiida_env): assert wf == kkr_dos_wc - def test_eos_workchain_entry_point(aiida_env): + def test_eos_workchain_entry_point(self): from aiida_kkr.workflows.eos import kkr_eos_wc from aiida.orm import WorkflowFactory @@ -105,7 +113,7 @@ def test_eos_workchain_entry_point(aiida_env): assert wf == kkr_eos_wc - def test_startpot_workchain_entry_point(aiida_env): + def test_startpot_workchain_entry_point(self): from aiida_kkr.workflows.voro_start import kkr_startpot_wc from aiida.orm import WorkflowFactory @@ -113,7 +121,7 @@ def test_startpot_workchain_entry_point(aiida_env): assert wf == kkr_startpot_wc - def test_maginit_workchain_entry_point(aiida_env): + def test_maginit_workchain_entry_point(self): from aiida_kkr.workflows.check_magnetic_state import kkr_check_mag_wc from aiida.orm import WorkflowFactory @@ -121,11 +129,33 @@ def test_maginit_workchain_entry_point(aiida_env): assert wf == kkr_check_mag_wc - def test_conv_workchain_entry_point(aiida_env): + def test_conv_workchain_entry_point(self): from aiida_kkr.workflows.check_para_convergence import kkr_check_para_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.convergence_check') assert wf == kkr_check_para_wc + + + def test_gf_writeout_workchain_entry_point(self): + from aiida_kkr.workflows.gf_writeout import kkr_flex_wc + from aiida.orm import WorkflowFactory + wf = WorkflowFactory('kkr.gf_writeout') + assert wf == kkr_flex_wc + + + def test_kkrimp_workchain_entry_point(self): + from aiida_kkr.workflows.kkr_imp import kkr_imp_wc + from aiida.orm import WorkflowFactory + + wf = WorkflowFactory('kkr.imp') + assert wf == kkr_imp_wc + + + def test_kkrimp_sub_workchain_entry_point(self): + from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc + from aiida.orm import WorkflowFactory + wf = WorkflowFactory('kkr.imp_sub') + assert wf == kkr_imp_sub_wc diff --git a/aiida_kkr/tests/test_gf_writeout_wc.py b/aiida_kkr/tests/test_gf_writeout_wc.py new file mode 100755 index 00000000..b86b3ed8 --- /dev/null +++ b/aiida_kkr/tests/test_gf_writeout_wc.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +kkr_codename = 'KKRhost' +computername = 'localhost' +queuename = '' + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_gf_writeout_workflow(): + """ + Tests for the kkr_startpot workflow + """ + + def test_kkrflex_writeout_wc(self): + """ + simple Cu noSOC, FP, lmax2 full example using scf workflow + """ + from aiida.orm import Code, load_node, DataFactory + from aiida.orm.computers import Computer + from aiida.orm.querybuilder import QueryBuilder + from masci_tools.io.kkr_params import kkrparams + from aiida_kkr.workflows.gf_writeout import kkr_flex_wc + from numpy import array + import os + + ParameterData = DataFactory('parameter') + StructureData = DataFactory('structure') + + # create or read computer and code + # first check if computer exists already in database + qb = QueryBuilder() + qb.append(Computer, tag='computer') + all_computers = qb.get_results_dict() + computer_found_in_db = False + if len(all_computers)>0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + # if it is not there create a new one + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(kkr_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkr_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkr.x')) + code.set_input_plugin_name('kkr.kkr') + code.store() + + + # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion. + wfd =kkr_flex_wc.get_wf_defaults() + wfd['queue_name'] = queuename + wfd['use_mpi'] = True + options = ParameterData(dict=wfd) + + # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations + KKRCode = Code.get_from_string(kkr_codename+'@'+computername) + + imp_info = ParameterData(dict={'Rcut':1.01, 'ilayer_center': 0, 'Zimp':[79.]}) + + label = 'GF_writeout Cu bulk' + descr = 'GF_writeout workflow for Cu bulk' + + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrcalc.tar.gz') + kkr_calc_remote = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566').out.remote_folder + + # create process builder to set parameters + builder = kkr_flex_wc.get_builder() + builder.description = descr + builder.label = label + builder.kkr = KKRCode + builder.options_parameters = options + builder.remote_data = kkr_calc_remote + builder.imp_info = imp_info + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) + + n = out['calculation_info'] + n = n.get_dict() + + assert n.get('successful') + assert n.get('list_of_errors') == [] + + d = out['GF_host_remote'] + assert isinstance(d, DataFactory('remote')) + + kkrflex_retrieved = load_node(n.get('pk_flexcalc')) + kkrflex_retrieved = kkrflex_retrieved.out.retrieved + kkrflex_path = kkrflex_retrieved.get_abs_path('') + for name in 'tmat green atominfo intercell_cmoms intercell_ref'.split(): + assert 'kkrflex_'+name in os.listdir(kkrflex_path) + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_gf_writeout_workflow() + Test.test_kkrflex_writeout_wc() diff --git a/aiida_kkr/tests/test_kkr_parser.py b/aiida_kkr/tests/test_kkr_parser.py new file mode 100755 index 00000000..026ac9e7 --- /dev/null +++ b/aiida_kkr/tests/test_kkr_parser.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_kkr_parser(): + """ + Tests for the kkr parser + """ + + def test_parse_kkr_calc(self): + """ + ... + """ + from aiida.orm import load_node + from aiida_kkr.parsers.kkr import KkrParser + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrcalc.tar.gz') + kkr_calc = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566') + parser = KkrParser(kkr_calc) + success, outnodes = parser.parse_from_calc() + assert success diff --git a/aiida_kkr/tests/test_kkrcalc.py b/aiida_kkr/tests/test_kkrcalc.py new file mode 100755 index 00000000..7f7ea032 --- /dev/null +++ b/aiida_kkr/tests/test_kkrcalc.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +codename = 'KKRhost@iff003' +queuename = 'th1_node' +eps = 10**-14 # threshold for float comparison equivalence + +from test_vorocalc import wait_for_it + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_kkr_calculation(): + """ + Tests for the kkr calculation + """ + + def test_kkr_from_voronoi(self): + """ + simple Cu noSOC, FP, lmax2 full example + """ + from aiida.orm import Code, load_node, DataFactory + from masci_tools.io.kkr_params import kkrparams + ParameterData = DataFactory('parameter') + + # load necessary files from db_dump files + from aiida.orm.importexport import import_data + import_data('files/db_dump_vorocalc.tar.gz') + import_data('files/db_dump_kkrcalc.tar.gz') + + # first load parent voronoi calculation + voro_calc = load_node('559b9d9b-3525-402e-9b24-ecd8b801853c') + + # extract and update KKR parameter (add missing values) + params = kkrparams(**voro_calc.inp.parameters.get_dict()) + params.set_multiple_values(RMAX=7., GMAX=65.) + params_node = ParameterData(dict=params.get_dict()) + + # load code from database and create new voronoi calculation + code = Code.get_from_string(codename) + + calc = code.new_calc() + calc.set_resources({'num_machines':1, 'tot_num_mpiprocs':1}) + calc.use_parameters(params_node) + calc.set_queue_name(queuename) + calc.use_parent_folder(voro_calc.out.remote_folder) + + # now store all nodes and submit calculation + #calc.store_all() + #calc.submit() + calc.submit_test() + """ + + # now wait for the calculation to finish + #wait_for_it(calc) + + # finally check some output + print '\n\ncheck values ...\n-------------------------------------------------' + + test_ok = calc.get_state() == u'FINISHED' + print 'calculation state', calc.get_state(), 'OK?', test_ok + assert test_ok + + test_ok = calc.res.alat_internal == 4.82381975 + print 'alat internal units', calc.res.alat_internal, 'OK?', test_ok + assert test_ok + + test_ok = abs(calc.res.charge_valence_states_per_atom[0]-10.965496)0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + print comp + # if it is not there create a new one + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(kkrimp_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkrimp_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkrflex.exe')) + code.set_input_plugin_name('kkr.kkrimp') + code.store() + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(kkr_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkr_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkr.x')) + code.set_input_plugin_name('kkr.kkr') + code.store() + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(voro_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = voro_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/voronoi.exe')) + code.set_prepend_text('ln -s /Users/ruess/sourcecodes/aiida/codes_localhost/ElementDataBase .') + code.set_input_plugin_name('kkr.voro') + code.store() + + + options, wfd, voro_aux_settings =kkr_imp_wc.get_wf_defaults() + + wfd['nsteps'] = 20 + wfd['strmix'] = 0.05 + options['queue_name'] = queuename + options['use_mpi'] = True + voro_aux_settings['check_dos'] = False + voro_aux_settings['dos_params']['kmesh'] = [10,10,10] + voro_aux_settings['dos_params']['nepts'] = 10 + voro_aux_settings['natom_in_cls_min'] = 50 + voro_aux_settings['rclustz'] = 1.5 + + options = ParameterData(dict=options) + voro_aux_settings = ParameterData(dict=voro_aux_settings) + wf_inputs = ParameterData(dict=wfd) + + # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations + KKRhostCode = Code.get_from_string(kkr_codename+'@'+computername) + KKRimpCode = Code.get_from_string(kkrimp_codename+'@'+computername) + VoroCode = Code.get_from_string(voro_codename+'@'+computername) + + imp_info = ParameterData(dict={'Rcut':1.01, 'ilayer_center': 0, 'Zimp':[30.]}) + + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrcalc.tar.gz') + kkr_calc_remote = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566').out.remote_folder + + label = 'kkrimp_scf full Cu host_in_host' + descr = 'kkrimp_scf full workflow for Cu bulk inlcuding GF writeout and vorostart for starting potential' + + # create process builder to set parameters + builder = kkr_imp_wc.get_builder() + builder.description = descr + builder.label = label + builder.kkrimpcode = KKRimpCode + builder.vorocode = VoroCode + builder.kkrcode = KKRhostCode + builder.options_parameters = options + builder.voro_aux_parameters = voro_aux_settings + builder.wf_parameters = wf_inputs + builder.impurity_info = imp_info + builder.remote_converged_host = kkr_calc_remote + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) + + # check outcome + n = out['workflow_info'] + n = n.get_dict() + for sub in 'auxiliary_voronoi gf_writeout kkr_imp_sub'.split(): + assert sub in n.get('used_subworkflows').keys() + + kkrimp_sub = load_node(n['used_subworkflows']['kkr_imp_sub']) + assert kkrimp_sub.out.calculation_info.get_attr('successful') + + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_kkrimp_full_workflow() + Test.test_kkrimp_full_wc() diff --git a/aiida_kkr/tests/test_kkrimp_parser.py b/aiida_kkr/tests/test_kkrimp_parser.py new file mode 100755 index 00000000..9859abb8 --- /dev/null +++ b/aiida_kkr/tests/test_kkrimp_parser.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_kkrimp_parser(): + """ + Tests for the kkrimp calculation + """ + + def test_parse_kkrimp_calc(self): + """ + simple Cu noSOC, FP, lmax2 + """ + from aiida.orm import load_node + from aiida_kkr.parsers.kkrimp import KkrimpParser + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrimp_out.tar.gz') + kkrimp_calc = load_node('eab8db1b-2cc7-4b85-a524-0df4ff2b7da6') + parser = KkrimpParser(kkrimp_calc) + success, outnodes = parser.parse_from_calc() + assert success + + +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + t = Test_kkrimp_parser() + t.test_parse_kkrimp_calc() diff --git a/aiida_kkr/tests/test_kkrimp_sub_wc.py b/aiida_kkr/tests/test_kkrimp_sub_wc.py new file mode 100755 index 00000000..a083558c --- /dev/null +++ b/aiida_kkr/tests/test_kkrimp_sub_wc.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +kkrimp_codename = 'KKRimp' +computername = 'localhost' +queuename = '' + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_kkrimp_scf_workflow(): + """ + Tests for the kkrimp_scf workflow + """ + + def test_kkrimp_sub_wc(self): + """ + simple Cu noSOC, FP, lmax2 full example using scf workflow for impurity host-in-host + """ + from aiida.orm import Code, load_node, DataFactory + from aiida.orm.computers import Computer + from aiida.orm.querybuilder import QueryBuilder + from masci_tools.io.kkr_params import kkrparams + from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc + from numpy import array + + ParameterData = DataFactory('parameter') + StructureData = DataFactory('structure') + + # create or read computer and code + # first check if computer exists already in database + qb = QueryBuilder() + qb.append(Computer, tag='computer') + all_computers = qb.get_results_dict() + computer_found_in_db = False + if len(all_computers)>0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + print comp + # if it is not there create a new one + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(kkrimp_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkrimp_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkrflex.exe')) + code.set_input_plugin_name('kkr.kkrimp') + code.store() + + + options, wfd =kkr_imp_sub_wc.get_wf_defaults() + + wfd['nsteps'] = 20 + wfd['strmix'] = 0.05 + options['queue_name'] = queuename + options['use_mpi'] = True + + options = ParameterData(dict=options) + + # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations + KKRimpCode = Code.get_from_string(kkrimp_codename+'@'+computername) + + # import previous GF writeout + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrflex_create.tar.gz') + GF_host_calc = load_node('de9b5093-25e7-407e-939e-9282c4431343') + + # now create a SingleFileData node containing the impurity starting potential + from aiida_kkr.tools.common_workfunctions import neworder_potential_wf + from numpy import loadtxt + neworder_pot1 = [int(i) for i in loadtxt(GF_host_calc.out.retrieved.get_abs_path('scoef'), skiprows=1)[:,3]-1] + settings_dict = {'pot1': 'out_potential', 'out_pot': 'potential_imp', 'neworder': neworder_pot1} + settings = ParameterData(dict=settings_dict) + startpot_imp_sfd = neworder_potential_wf(settings_node=settings, parent_calc_folder=GF_host_calc.out.remote_folder) + + label = 'kkrimp_scf Cu host_in_host' + descr = 'kkrimp_scf workflow for Cu bulk' + + # create process builder to set parameters + builder = kkr_imp_sub_wc.get_builder() + builder.description = descr + builder.label = label + builder.kkrimp = KKRimpCode + builder.options_parameters = options + builder.GF_remote_data = GF_host_calc.out.remote_folder + builder.wf_parameters = ParameterData(dict=wfd) + builder.host_imp_startpot = startpot_imp_sfd + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) + + n = out['calculation_info'] + n = n.get_dict() + + assert n.get('successful') + assert n.get('convergence_reached') + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_kkrimp_scf_workflow() + Test.test_kkrimp_sub_wc() diff --git a/aiida_kkr/tests/test_kkrimp_tools.py b/aiida_kkr/tests/test_kkrimp_tools.py index 78703576..73b0b47d 100644 --- a/aiida_kkr/tests/test_kkrimp_tools.py +++ b/aiida_kkr/tests/test_kkrimp_tools.py @@ -51,8 +51,7 @@ class Test_kkrimp_parser_functions(): """ Tests for the KKRimp parser functions. """ def test_parse_outfiles_full(self): - path = '../files/kkrimp_parser/test1/' - path = '/Users/ruess/sourcecodes/aiida/repositories/repository-ruess_test/repository/node/5e/cb/721a-bd7c-4abf-b41f-ab0f701f1408/path/' + path = 'files/kkrimp_parser/test1/' files = {} files['outfile'] = path+'out_kkrimp' files['out_log'] = path+'out_log.000.txt' diff --git a/aiida_kkr/tests/test_kkrimpcalc.py b/aiida_kkr/tests/test_kkrimpcalc.py new file mode 100755 index 00000000..c6cfcf15 --- /dev/null +++ b/aiida_kkr/tests/test_kkrimpcalc.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +codename = 'KKRimp@iff003' +queuename = 'th1_node' + +from test_vorocalc import wait_for_it + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_kkrimp_calculation(): + """ + Tests for the kkrimp calculation + """ + + def test_host_in_host(self): + """ + simple Cu noSOC, FP, lmax2 + """ + from aiida.orm import Code, load_node, DataFactory + from masci_tools.io.kkr_params import kkrparams + ParameterData = DataFactory('parameter') + + # first load parent voronoi calculation + from aiida.orm.importexport import import_data + import_data('files/db_dump_kkrflex_create.tar.gz') + GF_host_calc = load_node('de9b5093-25e7-407e-939e-9282c4431343') #'9459b4ea-ead5-4268-aa29-1c5e18654d77') + + # now create a SingleFileData node containing the impurity starting potential + from aiida_kkr.tools.common_workfunctions import neworder_potential_wf + from numpy import loadtxt + neworder_pot1 = [int(i) for i in loadtxt(GF_host_calc.out.retrieved.get_abs_path('scoef'), skiprows=1)[:,3]-1] + settings_dict = {'pot1': 'out_potential', 'out_pot': 'potential_imp', 'neworder': neworder_pot1} + settings = ParameterData(dict=settings_dict) + startpot_imp_sfd = neworder_potential_wf(settings_node=settings, parent_calc_folder=GF_host_calc.out.remote_folder) + + # set 1 simple mixing step + kkrimp_params = kkrparams(params_type='kkrimp') + kkrimp_params.set_multiple_values(SCFSTEPS=1, IMIX=0, MIXFAC=0.05) + ParamsKKRimp = ParameterData(dict=kkrimp_params.get_dict()) + + # create new KKRimp calculation + kkrimp_code = Code.get_from_string(codename) + + calc = kkrimp_code.new_calc() + calc.set_resources({'num_machines':1, 'tot_num_mpiprocs':1}) + calc.use_parameters(ParamsKKRimp) + calc.set_queue_name(queuename) + calc.use_host_Greenfunction_folder(GF_host_calc.out.remote_folder) + calc.use_impurity_potential(startpot_imp_sfd) + + + # now store all nodes and submit calculation + #calc.store_all() + #calc.submit() + calc.submit_test() + """ + + # now wait for the calculation to finish + #wait_for_it(calc) + + # finally check some output + print '\n\ncheck values ...\n-------------------------------------------------' + + test_ok = calc.get_state() == u'FINISHED' + print 'calculation state', calc.get_state(), 'OK?', test_ok + assert test_ok + + print '\ndone with checks\n' + """ + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_kkrimp_calculation() + Test.test_host_in_host() diff --git a/aiida_kkr/tests/test_kkrparams.py b/aiida_kkr/tests/test_kkrparams.py deleted file mode 100644 index 282289eb..00000000 --- a/aiida_kkr/tests/test_kkrparams.py +++ /dev/null @@ -1,449 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Created on Wed Nov 15 16:43:31 2017 - -@author: ruess -""" - -import pytest -from aiida_kkr.tools.kkr_params import kkrparams - - -# helper functions - -def check_full_dict(p,p0): - """ - helper function that compares full dictionary - """ - from numpy import ndarray, array - for key in [i[0] for i in p.get_set_values()]: - v = p.get_value(key) - v0 = p0.get_value(key) - if type(v) != list and type(v) != ndarray: - if v!=v0: - print(key, v, v0) - assert v==v0 - elif type(v[0]) != str: - if abs(array(v)-array(v0)).max()>=10**-14: - print(key, abs(array(v)-array(v0)).max()) - assert abs(array(v)-array(v0)).max()<10**-14 - else: - if set(v)-set(v0)!=set(): - print(key, set(v)-set(v0)) - assert set(v)-set(v0)==set() - -# tests - - -class Test_create_and_set_keys(): - def test_create_params_with_inital_values(self): - p = kkrparams(RBASIS=[0,0,0], params_type='voronoi') - assert type(p)==kkrparams - assert p.values[''] == [0,0,0] - - def test_default_values(self): - p = kkrparams() - assert p.values['EMIN'] is None - - def test_set_single_value(self): - p = kkrparams() - p.set_value('EMIN', 2) - assert p.values['EMIN'] == 2. - assert p.values['EMAX'] is None - - def test_set_multiple_values(self): - p = kkrparams() - p.set_multiple_values(EMIN=1, EMAX=2) - assert p.values['EMIN']== 1. - assert p.values['EMAX']== 2. - - -class Test_capture_wrong_input(): - def test_wrong_input_type(self): - p = kkrparams() - known_error = False - try: - p.set_value('EMIN', '2') - except TypeError: - known_error = True - assert known_error - - known_error = False - try: - p.set_value('EMIN', False) - except TypeError: - known_error = True - assert known_error - - def test_wrong_input_array_dimension(self): - p = kkrparams() - from numpy import array, sqrt - bravais = array([[0.7071067812, -0.5, 0.0], [0.7071067812, 0.5, 0.0], [sqrt(2), 0.0, 0.866025404]]) - - # atom positions in relative coordinates - basis_vectors = [] - for iatom in range(6): - tmp = array([0, 0, 0])+iatom*array([0.5, 0.5, bravais[2, 2]]) - tmp[0] = tmp[0]%1 - tmp[1] = tmp[1]%1 - print(iatom, tmp) - basis_vectors.append(tmp) - basis_vectors = array(basis_vectors) - p.set_value('INTERFACE', True) - p.set_value('', array([[1,1],[0,1]])) - - def test_input_consistency_check_fail(self): - knownError = False - try: - p = kkrparams(ZATOM=29., LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], RMAX=7, GMAX=65, NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1) - p.set_value('LDAU_PARA', [1,2]) - p._check_input_consistency() - except TypeError: - knownError = True - assert knownError - - def test_inconsistency_bulk_mode_bravais(self): - p = kkrparams(LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,0]], NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1, RMAX=7, GMAX=65, ZATOM=29.) - knownError = False - try: - p.fill_keywords_to_inputfile() - except ValueError: - knownError = True - assert knownError - - -class Test_get_info(): - def test_get_mandatory(self): - p = kkrparams() - manlist = p.get_all_mandatory() - assert set(manlist)==set(['LMAX', 'NAEZ', 'BRAVAIS', 'RMAX', 'GMAX', 'NSPIN', '', 'ALATBASIS', '']) - - def test_get_set_values(self): - p = kkrparams() - setlist = p.get_set_values() - assert setlist==[] - - def test_get_set_values2(self): - from numpy import array - p = kkrparams() - p.set_multiple_values(EMIN=1, EMAX=2) - setlist = p.get_set_values() - assert set(array(setlist).flatten()) == set(array([['EMIN', 1.], ['EMAX', 2.]]).flatten()) - - def test_get_description(self): - p = kkrparams() - desc = p.get_description('EMIN') - assert desc=='Accuracy, Valence energy contour: Lower value (in Ryd) for the energy contour' - - def test_get_type(self): - p = kkrparams() - tlist = p.get_type('BRAVAIS') - assert tlist == [float, float, float, float, float, float, float, float, float] - - def test_is_mandatory(self): - p = kkrparams() - man = p.is_mandatory('EMAX') - assert (not man) - - def test_get_value(self): - p = kkrparams(LMAX=3) - # check for KeyError if wrong key is checked - known_error = False - try: - p.get_value('something_wrong') - except KeyError: - known_error = True - assert known_error - # check for returning unset value - npol = p.get_value('NPOL') - assert npol == None - # check correct LMAX value - lmax = p.get_value('LMAX') - assert lmax == 3 - # check for returning lists for RUNOPT and TESTOPT - runopt = p.get_value('RUNOPT') - testopt = p.get_value('TESTOPT') - assert runopt == [] - assert testopt == [] - p = kkrparams(TESTOPT=['test1', 'test2'], RUNOPT=['NEWSOSOL']) - runopt = p.get_value('RUNOPT') - testopt = p.get_value('TESTOPT') - assert runopt == ['NEWSOSOL'] - assert set(testopt) == set(['test1', 'test2']) - - - -class Test_fill_inputfile(): - def test_fill_inputfile_minimal_Voronoi(self): - p = kkrparams(ZATOM=29., LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], RCLUSTZ=1.5, NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1) - p.fill_keywords_to_inputfile(is_voro_calc=True) - txt = open('inputcard').readlines() - ref = ['ALATBASIS= 1.00000000000000\n', 'BRAVAIS\n', '1.00000000000000 0.00000000000000 0.00000000000000\n', - '0.00000000000000 1.00000000000000 0.00000000000000\n', '0.00000000000000 0.00000000000000 1.00000000000000\n', 'NAEZ= 1\n', '\n', - '0.00000000000000 0.00000000000000 0.00000000000000\n', '\n', '29.00000000000000\n', 'NSPIN= 2\n', 'LMAX= 2\n', 'RCLUSTZ= 1.50000000000000\n'] - done=False - while not done: - try: - txt.remove('\n') - except ValueError: - done = True - assert len(txt)==len(ref) - txt.sort() - ref.sort() - print(txt, ref) - for i in range(len(txt)): - print(i, txt[i], ref[i]) - assert set(txt[i].split())==set(ref[i].split()) - - def test_fill_inputfile_KKR(self): - reffile = ['ALATBASIS= 1.00000000000000\n', 'BRAVAIS\n', '1.00000000000000 0.00000000000000 0.00000000000000\n', '\n', '29.00000000000000\n', - '0.00000000000000 1.00000000000000 0.00000000000000\n', '0.00000000000000 0.00000000000000 1.00000000000000\n', 'NAEZ= 1\n', - '\n', '0.00000000000000 0.00000000000000 0.00000000000000\n', 'NSPIN= 2\n', 'LMAX= 2\n', - 'RCLUSTZ= 1.50000000000000\n', 'RMAX= 7.00000000000000\n', 'GMAX= 65.00000000000000\n'] - p = kkrparams(ZATOM=29., LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], RMAX=7, GMAX=65, RCLUSTZ=1.5, NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1) - p.fill_keywords_to_inputfile() - txt = open('inputcard').readlines() - done=False - while not done: - try: - txt.remove('\n') - except ValueError: - done = True - assert len(txt)==len(reffile) - txt.sort() - reffile.sort() - for i in range(len(txt)): - assert set(txt[i].split())==set(reffile[i].split()) - - def test_fill_inputfile_empty_check(self): - p = kkrparams(LMAX=2, NAEZ=1) - known_error = False - try: - p.fill_keywords_to_inputfile() - except ValueError: - known_error = True - assert known_error - - def test_fill_inputfile_all_keys(self): - """Example filling all keys""" - from numpy import array, sqrt - - alat=5.416871386 - naez=6 - bravais=array([[0.7071067812, -0.5, 0.0], [0.7071067812, 0.5, 0.0], [sqrt(2), 0.0, 0.866025404]]) - lmax=2 - nspin=2 - nucl_numbers=[0,0,26,27,26,27,0,0] - cpa_info = [naez+2, [1., 1., 0.98, 0.02, 0.98, 0.02, 1., 1.], [1, 2, 3, 3, 4, 4, 5, 6]] - npol=4 - npt1, npt2, npt3 = 3, 10, 3 - tempr= 800 - basis_vectors = [] - for iatom in range(naez): - tmp = array([0, 0, 0])+iatom*array([0.5, 0.5, bravais[2, 2]]) - tmp[0] = tmp[0]%1 - tmp[1] = tmp[1]%1 - print(iatom, tmp) - basis_vectors.append(tmp) - basis_vectors = array(basis_vectors) - natyp = cpa_info[0] - cpa_conc = cpa_info[1] - cpa_sites = cpa_info[2] - ins=1 - kshape=ins - rmax, gmax= 7, 65 - rcls=1.5 - bzdivide=[10,10,0] - emin=-0.4 - p = kkrparams() - p.set_multiple_values(ZATOM=nucl_numbers, RBASIS=basis_vectors, BRAVAIS=bravais, NAEZ=naez, ALATBASIS=alat) - p.set_multiple_values(NSPIN=nspin, LMAX=lmax, NPOL=npol, NPT1=npt1, NPT2=npt2, NPT3=npt3, TEMPR=tempr) - p.set_multiple_values(RMAX=rmax, GMAX=gmax) - p.set_multiple_values(RCLUSTZ=rcls, BZDIVIDE=bzdivide, EMIN=emin) - p.set_multiple_values(INS=ins, KSHAPE=kshape) - p.set_multiple_values(INTERFACE=True, NLBASIS=1, NRBASIS=1, - ZPERIODL=array([-0.5, -0.5, -bravais[2, 2]]), - ZPERIODR=array([0.5, 0.5, bravais[2, 2]]), - RBLEFT=basis_vectors[0]+array([-0.5, -0.5, -bravais[2, 2]]), - RBRIGHT=basis_vectors[naez-1]+array([0.5, 0.5, bravais[2, 2]])) - p.set_value('LINIPOL', True) - p.set_value('XINIPOL', [1 for i in range(natyp)]) - p.set_value('HFIELD', 0.02) - p.set_value('NSTEPS', 1) - p.set_value('IMIX', 0) - p.set_value('STRMIX', 0.01) - p.set_value('CARTESIAN', False) - p.set_multiple_values(KAOEZR=1, KAOEZL=1, FPRADIUS=[-1 for i in range(natyp)], RCLUSTXY=rcls, - TKSEMI=800,EMAX=1, NPOLSEMI=0, N2SEMI=0, N1SEMI=0, N3SEMI=0, FSEMICORE=0, - KVREL=1, NCHEB=7, VCONST=0, SOCSCL=[1 for i in range(natyp)], - LAMBDA_XC=1, FCM=20, ITDBRY=20, KREADLDAU=0, RUNOPT=['LDAU', 'SEMICORE', 'IRGENDWAS FALSCHES'], - TESTOPT=['TSTOPTX0', 'TSTOPTX1', 'TSTOPTX2', 'TSTOPTX3', 'TSTOPTX4', 'TSTOPTX5', 'TSTOPTX6', 'TSTOPTX7', 'TSTOPTX8', 'TSTOPTXYZZZZZZ'], QBOUND=10**-3, - NPAN_LOG=3, NPAN_EQ=4, CPAINFO=[10**-3, 20], LLOYD=0, EMUSEMI=0, ICST=2, - TOLRDIF=0.01, BRYMIX=0.01, EBOTSEMI=0, NRIGHTHO=10, KEXCOR=2, NLEFTHOS=10, - R_LOG=0.4, LDAU_PARA=[1, 2, 0, 0, 0], NAT_LDAU=0, - RMTREFL=2.3, RMTREFR=2.3, DELTAE=[10**-5, 0], RMTREF=[2.3 for i in range(natyp)]) - p.set_value('', [1 for i in range(natyp)]) - p.set_multiple_values(NATYP=natyp, SITE=cpa_sites) - p.set_value('', cpa_conc) - p.set_value('FILES', ['output.pot', '']) - p.fill_keywords_to_inputfile(is_voro_calc=True) - - def test_set_rmtcore(self): - #test rmtcore - from numpy import array - from aiida_kkr.tools.common_functions import search_string - - para_dict = dict([(u'INS', 0), - (u'RCLUSTZ', 1.69), - (u'LMAX', 2), - (u'GMAX', 65.0), - (u'', [0.3535533906, 0.3535533906, 0.3535533906, 0.3535533906]), - (u'RMAX', 7.0), - (u'NSPIN', 1)]) - zatom = array([ 47., 47., 47., 47.]) - alat = 7.8692316414074615 - natom = 4 - positions = array([[ 0. , 0. , 0. ], - [ 0. , 0.5, 0.5], - [ 0.5, 0. , 0.5], - [ 0.5, 0.5, 0. ]]) - bravais = array([[ 1., 0., 0.], - [ 0., 1., 0.], - [ 0., 0., 1.]]) - k =kkrparams(**para_dict) - k.set_multiple_values(ZATOM=zatom, NAEZ=natom, ALATBASIS=alat, RBASIS=positions, BRAVAIS=bravais) - k.fill_keywords_to_inputfile() - - txt = open('inputcard').readlines() - naez = int(txt[search_string('NAEZ', txt)].split()[-1]) - rmtcore = [] - l_offset = search_string('RMTCORE', txt) - for iatom in range(naez): - rmtcore_at = float(txt[l_offset+1+iatom].split()[-1]) - rmtcore.append(rmtcore_at) - maxdiff = (max(abs(array(para_dict['']) - array(rmtcore)))) - assert maxdiff < 10**-6 - - def test_set_kkrimp_params_full(self): - p = kkrparams(params_type='kkrimp') - p.set_multiple_values(CALCORBITALMOMENT=0, RUNFLAG='', QBOUND=10**-7, NSPIN=1, - TESTFLAG='', NPAN_EQ=7, CALCFORCE=0, NPAN_LOGPANELFAC=2, - SPINORBIT=0, ITDBRY=20, NPAN_LOG=5, INS=1, ICST=2, - CALCJIJMAT=0, NCHEB=10, HFIELD=[0.00, 0], BRYMIX=0.05, - KVREL=1, IMIX=0, RADIUS_MIN=-1, NCOLL=0, RADIUS_LOGPANELS=0.6, - MIXFAC=0.05, SCFSTEPS=1, XC='LDA-VWN') - p.fill_keywords_to_inputfile(output='config.cfg') - reftxt = ['RUNFLAG=\n', 'TESTFLAG=\n', '\n', 'INS= 1\n', 'KVREL= 1\n', 'NSPIN= 1\n', '\n', 'SCFSTEPS= 1\n', 'IMIX= 0\n', 'ITDBRY= 20\n', 'MIXFAC= 0.05000000000000\n', 'BRYMIX= 0.05000000000000\n', 'QBOUND= 1.000000e-07\n', '\n', 'XC= LDA-VWN\n', 'ICST= 2\n', 'SPINORBIT= 0\n', 'NCOLL= 0\n', 'NPAN_LOGPANELFAC= 2\n', 'RADIUS_LOGPANELS= 0.60000000000000\n', 'RADIUS_MIN= -1\n', 'NPAN_LOG= 5\n', 'NPAN_EQ= 7\n', 'NCHEB= 10\n', '\n', 'HFIELD= 0.00000000000000 0\n', '\n', 'CALCORBITALMOMENT= 0\n', 'CALCFORCE= 0\n', 'CALCJIJMAT= 0\n'] - txt = open('config.cfg').readlines() - assert txt==reftxt - - -class Test_read_inputfile(): - def test_read_minimal_inputfile(self): - p = kkrparams(ZATOM=26., LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], RCLUSTZ=1.5, NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1) - p.fill_keywords_to_inputfile(is_voro_calc=True) - p2 = kkrparams(params_type='voronoi') - p2.read_keywords_from_inputcard() - check_full_dict(p,p2) - - def test_read_unsorted_inputfile(self): - p = kkrparams(ZATOM=26., LMAX=2, NAEZ=1, BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], RCLUSTZ=1.5, NSPIN=2, RBASIS=[0,0,0], ALATBASIS=1, RMAX=7, GMAX=65) - p.fill_keywords_to_inputfile(output='input.temp.txt') - txt = open('input.temp.txt', 'r').readlines() - # exchange some lines - tmp = txt[0]; txt[0] = txt[5]; txt[5]=tmp - tmp = txt[-1]; txt[-1] = txt[-2]; txt[-2]=tmp - tmp = txt[-2]; txt[-2] = txt[-4]; txt[-4]=tmp - tmp = txt[-3]; txt[-3] = txt[-1]; txt[-1]=tmp - open('input.temp_unsorted.txt', 'w').writelines(txt) - p2 = kkrparams() - p2.read_keywords_from_inputcard(inputcard='input.temp_unsorted.txt') - print(p2.get_dict()) - print(dict(p2.get_set_values())) - check_full_dict(p,p2) - - def test_read_slab(self): - from numpy import array - from aiida_kkr.tools.common_functions import get_Ang2aBohr - p = kkrparams(params_type='kkr') - - # automatically read keywords from inpucard - p.read_keywords_from_inputcard(inputcard='../tests/files/kkr/import_calc_old_style/inputcard') - # convert some read-in stuff back from Ang. units to alat units - rbl = p.get_value('') - rbr = p.get_value('') - zper_l = p.get_value('ZPERIODL') - zper_r = p.get_value('ZPERIODR') - ang2alat = get_Ang2aBohr()/p.get_value('ALATBASIS') - if rbl is not None: p.set_value('', array(rbl)*ang2alat) - if rbr is not None: p.set_value('', array(rbr)*ang2alat) - if zper_l is not None: p.set_value('ZPERIODL', array(zper_l)*ang2alat) - if zper_r is not None: p.set_value('ZPERIODR', array(zper_r)*ang2alat) - - # set parameters of expected values manually - p0 = kkrparams(RUNOPT=['xigid-ef','LLOYD', 'ewald2d', 'NEWSOSOL', 'DOS'], TESTOPT=['ie','RMESH','clusters','MPIenerg','fullBZ','DOS'], LMAX=3, NSPIN=2, NATYP=80, NAEZ=80, CARTESIAN=True, ALATBASIS=20.156973053, BRAVAIS=[[0.38437499, 0., 0.], [0.19218749, -0.33287851, 0.], [0.19218749, -0.11095950, 1.]], INTERFACE=True, NRIGHTHO=10, NLEFTHOS=10, NLBASIS=10, NRBASIS=10, ZPERIODL=[-1.92187500000000e-01, 1.10959504859881e-01, -1.00000000000000e+00], ZPERIODR=[1.92187500000000e-01, -1.10959504859881e-01, 1.00000000000000e+00], RCLUSTZ=0.65, RCLUSTXY=0.65, EMIN=-1.2, EMAX=1.2, TEMPR=473., NPOL=7, NPT1=7, NPT2=40, NPT3=6, KSHAPE=2, INS=1, ICST=2, KEXCOR=2, HFIELD=0, VCONST=0, NPAN_LOG=17, NPAN_EQ=7, NCHEB=12, R_LOG=0.8, BZDIVIDE=[40, 40, 1], NSTEPS=500, IMIX=5, STRMIX=0.02, FCM=20., QBOUND=10**-7, BRYMIX=0.02, ITDBRY=30, LINIPOL=False, FILES=['potential', 'shapefun'], RMAX=15., GMAX=900.) - p0.set_value('', [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 51.0, 0.0, 52.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - p0.set_value('', [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) - p0.set_multiple_values(KAOEZR=[i for i in range(1,11)], KAOEZL=[i for i in range(1,11)], KVREL=1, RMTREFL=[2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000], RMTREFR=[2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000]) - p0.set_multiple_values(RMTREF=[2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000, 2.2671000, 2.2671000, 2.4948000, 2.3562000, 2.3562000, 2.3562000, 2.4948000, 2.2671000, 2.2671000, 2.5740000]) - p0.set_multiple_values(RBLEFT=[[-1.92187500000000e-01, 1.10959504859881e-01, -1.00000000000000e+00], [ 8.32667268468867e-17, 2.77555756156289e-17, -9.49500000000000e-01], [ 1.92187500000000e-01, -1.10959504859881e-01, -8.33000000000000e-01], [ 3.84375000000000e-01, -2.21919009719762e-01, -7.16500000000000e-01], [ 8.32667268468867e-17, 0.00000000000000e+00, -6.33000000000000e-01], [ 1.92187500000000e-01, -1.10959504859881e-01, -5.49500000000000e-01], [ 3.84375000000000e-01, -2.21919009719762e-01, -4.33000000000000e-01], [ 2.77555756156289e-17, 1.38777878078145e-17, -3.16500000000000e-01], [ 1.92187500000000e-01, -1.10959504859881e-01, -2.66000000000000e-01], [ 3.84375000000000e-01, -2.21919009719762e-01, -1.33000000000000e-01]], - RBRIGHT=[[1.53750000000000e+00, -8.87676038879049e-01, 8.00000000000000e+00], [1.72968750000000e+00, -9.98635543738930e-01, 8.05050000000000e+00], [1.92187500000000e+00, -1.10959504859881e+00, 8.16700000000000e+00], [2.11406250000000e+00, -1.22055455345869e+00, 8.28350000000000e+00], [1.72968750000000e+00, -9.98635543738930e-01, 8.36700000000000e+00], [1.92187500000000e+00, -1.10959504859881e+00, 8.45050000000000e+00], [2.11406250000000e+00, -1.22055455345869e+00, 8.56700000000000e+00], [1.72968750000000e+00, -9.98635543738930e-01, 8.68350000000000e+00], [1.92187500000000e+00, -1.10959504859881e+00, 8.73400000000000e+00], [2.11406250000000e+00, -1.22055455345869e+00, 8.86700000000000e+00]], - RBASIS=[[0.0, 0.0, 0.0], [0.1921875, -0.110959504859881, 0.0505000000000001], [0.384375, -0.221919009719762, 0.167], [0.5765625, -0.332878514579644, 0.2835], [0.1921875, -0.110959504859881, 0.367], [0.384375, -0.221919009719762, 0.4505], [0.5765625, -0.332878514579644, 0.567], [0.1921875, -0.110959504859881, 0.6835], [0.384375, -0.221919009719762, 0.734], [0.5765625, -0.332878514579644, 0.867], [0.1921875, -0.110959504859881, 1.0], [0.384375, -0.221919009719762, 1.0505], [0.5765625, -0.332878514579643, 1.167], [0.76875, -0.443838019439525, 1.2835], [0.384375, -0.221919009719762, 1.367], [0.5765625, -0.332878514579643, 1.4505], [0.76875, -0.443838019439525, 1.567], [0.384375, -0.221919009719762, 1.6835], [0.5765625, -0.332878514579643, 1.734], [0.76875, -0.443838019439525, 1.867], [0.384375, -0.221919009719762, 2.0], [0.5765625, -0.332878514579643, 2.0505], [0.76875, -0.443838019439525, 2.167], [0.9609375, -0.554797524299406, 2.2835], [0.5765625, -0.332878514579643, 2.367], [0.76875, -0.443838019439525, 2.4505], [0.9609375, -0.554797524299406, 2.567], [0.5765625, -0.332878514579643, 2.6835], [0.76875, -0.443838019439525, 2.734], [0.9609375, -0.554797524299406, 2.867], [0.5765625, -0.332878514579643, 3.0], [0.76875, -0.443838019439525, 3.0505], [0.9609375, -0.554797524299406, 3.167], [1.153125, -0.665757029159287, 3.2835], [0.76875, -0.443838019439525, 3.367], [0.9609375, -0.554797524299406, 3.4505], [1.153125, -0.665757029159287, 3.567], [0.76875, -0.443838019439525, 3.6835], [0.9609375, -0.554797524299406, 3.734], [1.153125, -0.665757029159287, 3.867], [0.76875, -0.443838019439525, 4.0], [0.9609375, -0.554797524299406, 4.0505], [1.153125, -0.665757029159287, 4.167], [1.3453125, -0.776716534019168, 4.2835], [0.9609375, -0.554797524299406, 4.367], [1.153125, -0.665757029159287, 4.4505], [1.3453125, -0.776716534019168, 4.567], [0.9609375, -0.554797524299406, 4.6835], [1.153125, -0.665757029159287, 4.734], [1.3453125, -0.776716534019168, 4.867], [0.9609375, -0.554797524299406, 5.0], [1.153125, -0.665757029159287, 5.0505], [1.3453125, -0.776716534019168, 5.167], [1.5375, -0.887676038879049, 5.2835], [1.153125, -0.665757029159287, 5.367], [1.3453125, -0.776716534019168, 5.4505], [1.5375, -0.887676038879049, 5.567], [1.153125, -0.665757029159287, 5.6835], [1.3453125, -0.776716534019168, 5.734], [1.5375, -0.887676038879049, 5.867], [1.153125, -0.665757029159287, 6.0], [1.3453125, -0.776716534019168, 6.0505], [1.5375, -0.887676038879049, 6.167], [1.7296875, -0.99863554373893, 6.2835], [1.3453125, -0.776716534019168, 6.367], [1.5375, -0.887676038879049, 6.4505], [1.7296875, -0.99863554373893, 6.567], [1.3453125, -0.776716534019168, 6.6835], [1.5375, -0.887676038879049, 6.734], [1.7296875, -0.99863554373893, 6.867], [1.3453125, -0.776716534019168, 7.0], [1.5375, -0.887676038879049, 7.0505], [1.7296875, -0.99863554373893, 7.167], [1.921875, -1.10959504859881, 7.2835], [1.5375, -0.887676038879049, 7.367], [1.7296875, -0.99863554373893, 7.4505], [1.921875, -1.10959504859881, 7.567], [1.5375, -0.887676038879049, 7.6835], [1.7296875, -0.99863554373893, 7.734], [1.921875, -1.10959504859881, 7.867]]) - - # check all values - check_full_dict(p,p0) - - -class Test_other(): - def test_get_missing_keys(self): - p = kkrparams() - missing = p.get_missing_keys() - assert set(missing)==set(['', 'BRAVAIS', 'LMAX', 'GMAX', 'RMAX', 'NAEZ', '', 'NSPIN', 'ALATBASIS']) - missing = p.get_missing_keys(use_aiida=True) - assert set(missing)==set(['LMAX', 'GMAX', 'RMAX', 'NSPIN']) - - p = kkrparams(params_type='voronoi', EMIN=-2, LMAX=3) - missing = p.get_missing_keys() - assert set(missing)==set(['', 'BRAVAIS', 'RCLUSTZ', 'NAEZ', '', 'NSPIN', 'ALATBASIS']) - - def test_set_value_None(self): - p = kkrparams() - p.set_value('EMIN', -1) - assert p.values['EMIN'] == -1 - - p.set_value('EMIN',None) - assert p.values['EMIN'] == -1 - - p.remove_value('EMIN') - assert p.values['EMIN'] is None - - def test_set_potname_empty(self): - p = kkrparams() - p.set_multiple_values(RMAX=1, GMAX=1, NSPIN=1, RBASIS=[0,0,0], LMAX=2, RCLUSTZ=1.2, NAEZ=1, ZATOM=[0], BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], ALATBASIS=1, FILES=['','shapenew']) - p.fill_keywords_to_inputfile() - from aiida_kkr.tools.common_functions import search_string - txt = open('inputcard').readlines() - itmp = search_string('FILES', txt) - potname = txt[itmp+2].split()[0] - shapename = txt[itmp+4].split()[0] - assert 'potential' == potname - assert 'shapenew' == shapename - - def test_get_dict(self): - d0 = {'': None, 'ICST': None, '': None, 'N1SEMI': None, '': None, '': None, '': None, 'XINIPOL': None, 'EMAX': None, '': None, 'NLEFTHOS': None, '': [0.0], 'RCLUSTXY': None, 'NPAN_EQ': None, '': None, 'BRAVAIS': [[1, 0, 0], [0, 1, 0], [0, 0, 1]], 'INS': None, 'NAT_LDAU': None, '': None, 'ZPERIODL': None, 'TESTOPT': None, 'KEXCOR': None, '': None, 'TEMPR': None, 'EBOTSEMI': None, 'NATYP': None, 'RUNOPT': None, 'HFIELD': None, 'NPOL': None, 'RCLUSTZ': 1.2, 'ZPERIODR': None, 'N3SEMI': None, 'LMAX': 2, 'ITDBRY': None, '': None, '': None, 'STRMIX': None, 'CPAINFO': None, 'FCM': None, '': None, 'NPAN_LOG': None, 'CARTESIAN': None, 'FSEMICORE': None, 'LAMBDA_XC': None, 'GMAX': None, '': None, 'RMAX': None, 'NCHEB': None, 'EMIN': None, 'NAEZ': 1, '': None, 'KREADLDAU': None, '': [0, 0, 0], '': None, 'NPT2': None, 'NPT3': None, 'NPT1': None, 'N2SEMI': None, 'NPOLSEMI': None, '': None, 'FILES': ['', 'shapenew'], 'LDAU_PARA': None, 'NSPIN': 1, 'QBOUND': None, 'NRIGHTHO': None, 'KVREL': None, 'TKSEMI': None, '': None, 'NSTEPS': None, 'KSHAPE': None, '': None, 'LINIPOL': None, 'BZDIVIDE': None, 'INTERFACE': None, 'BRYMIX': None, 'EMUSEMI': None, 'ALATBASIS': 1.0, 'R_LOG': None, 'IMIX': None, 'VCONST': None} - p = kkrparams() - p.set_multiple_values(RMAX=1, GMAX=1, NSPIN=1, RBASIS=[0,0,0], LMAX=2, RCLUSTZ=1.2, NAEZ=1, ZATOM=[0], BRAVAIS=[[1,0,0],[0,1,0],[0,0,1]], ALATBASIS=1, FILES=['','shapenew']) - assert set(d0.keys()) == set(p.get_dict().keys()) - - l0 = ['', 'KSHAPE', 'ZPERIODL', '', '', '', 'NAEZ', 'CARTESIAN', '', '', 'INTERFACE', 'BRAVAIS', 'ALATBASIS', 'ZPERIODR'] - assert p.get_dict(group='lattice').keys() == l0 - - l0 = ['ZPERIODL', '', '', '', '', 'INTERFACE', 'ZPERIODR'] - assert l0 == p.get_dict(group='lattice', subgroup='2D mode').keys() - - def test_get_KKRcalc_parameter_defaults(self): - d = kkrparams.get_KKRcalc_parameter_defaults() - from aiida_kkr.tools.kkr_params import __kkr_default_params__ - d0 = __kkr_default_params__ - assert d[0]==d0 diff --git a/aiida_kkr/tests/test_kkrparser_functions.py b/aiida_kkr/tests/test_kkrparser_functions.py deleted file mode 100644 index 8dc53672..00000000 --- a/aiida_kkr/tests/test_kkrparser_functions.py +++ /dev/null @@ -1,229 +0,0 @@ -# -*- coding: utf-8 -*- -""" -@author: ruess -""" - -import pytest -from aiida_kkr.tools.kkrparser_functions import parse_kkr_outputfile, check_error_category - - -class Test_kkr_parser_functions(): - """ - Tests for the kkr parser functions - """ - #some global definitions - global dref, grouping_ref, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file - dref = {'fermi_energy_units':'Ry', 'nspin': 2, 'single_particle_energies': [0.33016425691737111, 1.5169676617833023, 38.200748406400834, 38.200748406400834, 1.5169676617833023, 0.33016425691737111], 'energy_contour_group': {'emin_unit': 'Rydberg', 'emin': -0.6, 'npol': 7, 'temperature_unit': 'Kelvin', 'n1': 3, 'n2': 32, 'n3': 3, 'number_of_energy_points': 45, 'temperature': 800.0}, 'energy': -69143.004155165298, 'warnings_group': {'number_of_warnings': 1, 'warnings_list': ['WARNING: HFIELD>0.0 found, set KHFIELD to 1']}, 'energy_unit': 'eV', 'charge_core_states_per_atom': [0.0, 0.0, 18.0, 18.0, 0.0, 0.0], 'ewald_sum_group': {'rsum_number_of_vectors': 425, 'gsum_cutoff_unit': '1/a_Bohr', 'rsum_number_of_shells': 74, 'gsum_cutoff': 11.98427, 'rsum_cutoff': 37.9646, 'gsum_number_of_shells': 1496, 'ewald_summation_mode': '3D', 'rsum_cutoff_unit': 'a_Bohr', 'gsum_number_of_vectors': 16167}, 'timings_group': {'main1a ': '22.6248', 'main0': '1.101', 'main2': '0.4791', 'main1c ': '46.1649', 'Time in Iteration': '72.2019', 'main1b ': '2.9331'}, 'core_states_group': {'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', 'energy_highest_lying_core_state_per_atom': [None, None, None, None, -3.38073664131, -3.38073663703, -3.38073664131, -3.38073663703, None, None, None, None], 'number_of_core_states_per_atom': [0, 0, 0, 0, 5, 5, 5, 5, 0, 0, 0, 0], 'descr_highest_lying_core_state_per_atom': ['no core states', 'no core states', 'no core states', 'no core states', '3p', '3p', '3p', '3p', 'no core states', 'no core states', 'no core states', 'no core states']}, 'total_energy_Ry': -5081.9171143599997, 'fermi_energy': 0.49301096760000002, - 'convergence_group': {'rms': 0.23807, 'strmix': 0.01, 'calculation_converged': False, 'charge_neutrality': -0.27584700000000001, 'orbital_moment_per_atom_all_iterations': [[-0.0, -0.0, 0.0, 0.0, -0.0, -0.0], [-0.0, -0.0, -0.0, -0.0, -0.0, -0.0], [-0.0, -0.0, -0.0, -0.0, -0.0, -0.0], [-0.0, -0.0, -0.0, -0.0, -0.0, -0.0], [-0.0, -0.0, -0.0, -0.0, -0.0, -0.0], [-0.0, 0.0, -0.0, -0.0, 0.0, -0.0], [-0.0, 0.0, -0.0, -0.0, 0.0, -0.0], [-0.0, 0.0, -0.0, -0.0, 0.0, -0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0]], 'fermi_energy_all_iterations_units': 'Ry', 'dos_at_fermi_energy_all_iterations': [10.238607, 15.315281, 15.391192, 15.298192, 15.258272, 15.20493, 15.159147, 15.114337, 15.072376, 15.032559], 'rms_unit': 'unitless', 'charge_neutrality_all_iterations': [-4.899746, -0.590384, -0.298448, -0.371115, -0.329622, -0.324519, -0.309258, -0.298029, -0.286475, -0.275847], 'qbound': 0.0, 'rms_per_atom': [0.31221, 0.092203, 0.15861, 0.15861, 0.092203, 0.31221], 'rms_all_iterations': [2.3466, 0.2333, 0.23309, 0.23439, 0.23513, 0.23596, 0.23664, 0.23724, 0.23771, 0.23807], 'imix': 0, 'nsteps_exhausted': True, 'number_of_iterations_max': 10, 'total_spin_moment_all_iterations': [0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0], 'idtbry': 40, 'charge_neutrality_unit': 'electrons', 'total_energy_Ry_all_iterations': [-5079.95190252, -5081.86670188, -5081.87281356, -5081.88207486, -5081.88933086, -5081.89617526, -5081.9022393, -5081.90772537, -5081.91266074, -5081.91711436], 'fcm': 20.0, 'number_of_iterations': 10, 'spin_moment_per_atom_all_iterations': [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0]], 'fermi_energy_all_iterations': [0.459241, 0.4656657829, 0.468897589, 0.4729407141, 0.4765411904, 0.4800983619, 0.4834984885, 0.4867848704, 0.4899526366, 0.4930109676], 'brymix': 0.01}, - 'total_energy_Ry_unit': 'Rydberg', 'number_of_atoms_in_unit_cell': 6, 'use_newsosol': True, 'two_pi_over_alat_internal_unit': '1/a_Bohr', 'magnetism_group': {'spin_moment_unit': 'mu_Bohr', 'total_spin_moment_unit': 'mu_Bohr', 'spin_moment_per_atom': [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], 'spin_moment_angles_per_atom_unit': 'degree', 'orbital_moment_per_atom': [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], 'spin_moment_vector_per_atom': [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [-0.0, -0.0, -0.0], [-0.0, -0.0, -0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], 'spin_moment_angles_per_atom': [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], 'total_spin_moment': -0.0, 'orbital_moment_unit': 'mu_Bohr', 'total_orbital_moment': 0.0}, 'charge_core_states_per_atom_unit': 'electron charge', 'two_pi_over_alat_internal': 1.15850818, 'total_charge_per_atom': [0.0, 0.0, 26.0, 26.0, 0.0, 0.0], 'alat_internal_unit': 'a_Bohr', 'total_charge_per_atom_unit': 'electron charge', 'charge_valence_states_per_atom_unit': 'electron charge', 'parser_warnings': [], 'kmesh_group': {'kmesh_energypoint': [4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2, 1], 'number_different_kmeshes': 4, 'number_kpoints_per_kmesh': {'n_kx': [10, 7, 5, 3], 'n_ky': [10, 7, 5, 3], 'n_kz': [10, 7, 5, 3], 'number_of_kpts': [1000, 343, 125, 27]}}, 'symmetries_group': {'number_of_used_symmetries': 1, 'number_of_lattice_symmetries': 4, 'symmetry_description': {'E': {'has_inversion': 0, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}}}, 'alat_internal': 5.423514, 'timings_unit': 'seconds', 'code_info_group': {'code_version': 'v2.2-22-g4f8f5ff', 'calculation_serial_number': 'kkrjm_v2.2-22-g4f8f5ff_openmp_20171208103325', 'compile_options': 'openmp'}, 'single_particle_energies_unit': 'eV', 'dos_at_fermi_energy': 15.032558999999999, 'charge_valence_states_per_atom': [0.0040260000000000001, 0.22986200000000001, 7.628188999999999, 7.628188999999999, 0.22986200000000001, 0.0040260000000000001]} - grouping_ref = ['energy_contour_group', 'warnings_group', 'ewald_sum_group', 'timings_group', 'core_states_group', 'convergence_group', 'magnetism_group', 'kmesh_group', 'symmetries_group', 'code_info_group'] - path0 = './files/kkr/kkr_run_slab_soc_simple/' - outfile = path0+'out_kkr' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'out_potential' - nonco_out_file = path0+'nonco_angle_out.dat' - - - def test_complete_kkr_output(self): - """ - Parse complete output of kkr calculation - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - print(out_dict['convergence_group']) - assert success - assert set(out_dict.keys()) == set(dref.keys()) - assert out_dict == dref - assert msg_list == [] - groups = [i for i in out_dict.keys() if 'group' in i] - assert set(groups) == set(grouping_ref) - - def test_mag_orbmom_kkr_output(self): - """ - Parse complete output of kkr calculation with orbital moments - """ - dref = {'fermi_energy_units':'Ry', 'nspin': 2, 'single_particle_energies': [0.2097247970611916, 1.2887334935546728, 37.826589199624905, 37.826589199624905, 1.2887334935546728, 0.2097247970611916], 'energy_contour_group': {'emin_unit': 'Rydberg', 'emin': -0.6, 'npol': 7, 'temperature_unit': 'Kelvin', 'n1': 3, 'n2': 32, 'n3': 3, 'number_of_energy_points': 45, 'temperature': 800.0}, 'energy': -69143.565895181309, 'warnings_group': {'number_of_warnings': 1, 'warnings_list': ['WARNING: HFIELD>0.0 found, set KHFIELD to 1']}, 'energy_unit': 'eV', 'charge_core_states_per_atom': [0.0, 0.0, 18.0, 18.0, 0.0, 0.0], 'ewald_sum_group': {'rsum_number_of_vectors': 425, 'gsum_cutoff_unit': '1/a_Bohr', 'rsum_number_of_shells': 74, 'gsum_cutoff': 11.98427, 'rsum_cutoff': 37.9646, 'gsum_number_of_shells': 1496, 'ewald_summation_mode': '3D', 'rsum_cutoff_unit': 'a_Bohr', 'gsum_number_of_vectors': 16167}, 'timings_group': {'main1a ': '22.7591', 'main0': '1.1051', 'main2': '0.4782', 'main1c ': '46.5031', 'Time in Iteration': '72.7002', 'main1b ': '2.9598'}, 'core_states_group': {'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', 'energy_highest_lying_core_state_per_atom': [None, None, None, None, -3.3177936736000002, -3.4353219668800001, -3.3177936736000002, -3.4353219668800001, None, None, None, None], 'number_of_core_states_per_atom': [0, 0, 0, 0, 5, 5, 5, 5, 0, 0, 0, 0], 'descr_highest_lying_core_state_per_atom': ['no core states', 'no core states', 'no core states', 'no core states', '3p', '3p', '3p', '3p', 'no core states', 'no core states', 'no core states', 'no core states']}, 'total_energy_Ry': -5081.9584014900001, 'fermi_energy': 0.49007270419999999, - 'convergence_group': {'rms': 0.21679000000000001, 'strmix': 0.01, 'calculation_converged': False, 'charge_neutrality': -0.17172599999999999, 'orbital_moment_per_atom_all_iterations': [[-0.0, -0.0001, -0.0063, -0.0063, -0.0001, -0.0], [0.0, -0.0001, 0.0464, 0.0464, -0.0001, 0.0], [0.0, -0.0001, 0.052, 0.052, -0.0001, 0.0], [0.0, -0.0001, 0.053, 0.053, -0.0001, 0.0], [0.0, -0.0001, 0.0539, 0.0539, -0.0001, 0.0], [0.0, -0.0001, 0.0547, 0.0547, -0.0001, 0.0], [0.0, -0.0001, 0.0558, 0.0558, -0.0001, 0.0], [0.0, -0.0002, 0.0572, 0.0572, -0.0002, 0.0], [0.0, -0.0002, 0.0585, 0.0585, -0.0002, 0.0], [-0.0, -0.0002, 0.0599, 0.0599, -0.0002, -0.0]], 'fermi_energy_all_iterations_units': 'Ry', 'dos_at_fermi_energy_all_iterations': [10.778086, 13.756463, 13.070528, 12.371442, 11.652055, 10.987681, 10.402299, 9.890784, 9.437221, 9.013371], 'rms_unit': 'unitless', 'charge_neutrality_all_iterations': [-4.914607, -0.4306, -0.254987, -0.262482, -0.239525, -0.215776, -0.190357, -0.173403, -0.168785, -0.171726], 'qbound': 0.0, 'rms_per_atom': [0.26244, 0.052778, 0.17549, 0.17549, 0.052778, 0.26244], 'rms_all_iterations': [2.17, 0.22841, 0.22738, 0.22601, 0.22458, 0.22304, 0.22139, 0.21969, 0.21811, 0.21679], 'imix': 0, 'nsteps_exhausted': True, 'number_of_iterations_max': 10, 'total_spin_moment_all_iterations': [1.150471, 2.201148, 2.530913, 2.835644, 3.131747, 3.409436, 3.669548, 3.912214, 4.137758, 4.346569], 'idtbry': 40, 'charge_neutrality_unit': 'electrons', 'total_energy_Ry_all_iterations': [-5080.21763742, -5081.87827258, -5081.89042264, -5081.90349614, -5081.91552341, -5081.92632203, -5081.93573754, -5081.94396567, -5081.95138589, -5081.95840149], 'fcm': 20.0, 'number_of_iterations': 10, 'spin_moment_per_atom_all_iterations': [[0.0005, 0.0214, 0.5534, 0.5534, 0.0214, 0.0005], [0.0005, 0.0156, 1.0844, 1.0844, 0.0156, 0.0005], [0.0005, 0.0152, 1.2497, 1.2497, 0.0152, 0.0005], [0.0005, 0.0159, 1.4015, 1.4015, 0.0159, 0.0005], [0.0005, 0.0166, 1.5488, 1.5488, 0.0166, 0.0005], [0.0005, 0.0174, 1.6869, 1.6869, 0.0174, 0.0005], [0.0004, 0.0181, 1.8162, 1.8162, 0.0181, 0.0004], [0.0004, 0.0187, 1.937, 1.937, 0.0187, 0.0004], [0.0004, 0.0193, 2.0492, 2.0492, 0.0193, 0.0004], [0.0003, 0.02, 2.153, 2.153, 0.02, 0.0003]], 'fermi_energy_all_iterations': [0.459241, 0.4644579419, 0.4677093646, 0.4712454875, 0.4746715679, 0.4779445657, 0.4809944912, 0.4839164583, 0.4868973048, 0.4900727042], 'brymix': 0.01}, - 'total_energy_Ry_unit': 'Rydberg', 'number_of_atoms_in_unit_cell': 6, 'use_newsosol': True, 'two_pi_over_alat_internal_unit': '1/a_Bohr', 'magnetism_group': {'spin_moment_unit': 'mu_Bohr', 'total_spin_moment_unit': 'mu_Bohr', 'spin_moment_per_atom': [0.0003, 0.02, 2.153, 2.153, 0.02, 0.0003], 'spin_moment_angles_per_atom_unit': 'degree', 'orbital_moment_per_atom': [-0.0, -0.0002, 0.0599, 0.0599, -0.0002, -0.0], 'spin_moment_vector_per_atom': [[0.0, 0.0, 0.0003], [0.0, 0.0, 0.02], [0.0, 0.0, 2.153], [0.0, 0.0, 2.153], [0.0, 0.0, 0.02], [0.0, 0.0, 0.0003]], 'spin_moment_angles_per_atom': [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], 'total_spin_moment': 4.3465689999999997, 'orbital_moment_unit': 'mu_Bohr', 'total_orbital_moment': 0.11940000000000001}, 'charge_core_states_per_atom_unit': 'electron charge', 'two_pi_over_alat_internal': 1.15850818, 'total_charge_per_atom': [0.0, 0.0, 26.0, 26.0, 0.0, 0.0], 'alat_internal_unit': 'a_Bohr', 'total_charge_per_atom_unit': 'electron charge', 'charge_valence_states_per_atom_unit': 'electron charge', 'parser_warnings': [], 'kmesh_group': {'kmesh_energypoint': [4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2, 1], 'number_different_kmeshes': 4, 'number_kpoints_per_kmesh': {'n_kx': [10, 7, 5, 3], 'n_ky': [10, 7, 5, 3], 'n_kz': [10, 7, 5, 3], 'number_of_kpts': [1000, 343, 125, 27]}}, 'symmetries_group': {'number_of_used_symmetries': 1, 'number_of_lattice_symmetries': 4, 'symmetry_description': {'E': {'has_inversion': 0, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}}}, 'alat_internal': 5.423514, 'timings_unit': 'seconds', 'code_info_group': {'code_version': 'v2.2-22-g4f8f5ff', 'calculation_serial_number': 'kkrjm_v2.2-22-g4f8f5ff_openmp_20171208132839', 'compile_options': 'openmp'}, 'single_particle_energies_unit': 'eV', 'dos_at_fermi_energy': 9.0133709999999994, 'charge_valence_states_per_atom': [0.003503, 0.21339, 7.6972440000000013, 7.6972440000000013, 0.21339, 0.003503]} - path0 = './files/kkr/kkr_run_slab_soc_mag/' - outfile = path0+'out_kkr' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'out_potential' - nonco_out_file = path0+'nonco_angle_out.dat' - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - print(out_dict['convergence_group']) - assert success - assert set(out_dict.keys()) == set(dref.keys()) - assert out_dict == dref - assert msg_list == [] - - def test_nosoc_kkr_output(self): - """ - Parse complete output of kkr calculation nosoc, magnetic - """ - dref = {'fermi_energy_units':'Ry', 'nspin': 2, 'single_particle_energies': [0.3300528004408107, 1.5175235386980168, 38.205408680071912, 38.205408680071912, 1.5175235386980168, 0.3300528004408107], 'energy_contour_group': {'emin_unit': 'Rydberg', 'emin': -0.6, 'npol': 7, 'temperature_unit': 'Kelvin', 'n1': 3, 'n2': 32, 'n3': 3, 'number_of_energy_points': 45, 'temperature': 800.0}, 'energy': -69142.986794301018, 'warnings_group': {'number_of_warnings': 1, 'warnings_list': ['WARNING: HFIELD>0.0 found, set KHFIELD to 1']}, 'energy_unit': 'eV', 'charge_core_states_per_atom': [0.0, 0.0, 18.0, 18.0, 0.0, 0.0], 'ewald_sum_group': {'rsum_number_of_vectors': 425, 'gsum_cutoff_unit': '1/a_Bohr', 'rsum_number_of_shells': 74, 'gsum_cutoff': 11.98427, 'rsum_cutoff': 37.9646, 'gsum_number_of_shells': 1496, 'ewald_summation_mode': '3D', 'rsum_cutoff_unit': 'a_Bohr', 'gsum_number_of_vectors': 16167}, 'timings_group': {'main1a ': '4.7957', 'main0': '0.976', 'main2': '0.5074', 'main1c ': '10.57', 'Time in Iteration': '17.4351', 'main1b ': '1.562'}, 'core_states_group': {'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', 'energy_highest_lying_core_state_per_atom': [None, None, None, None, -3.3808088817000002, -3.3808088773999998, -3.3808088817000002, -3.3808088773999998, None, None, None, None], 'number_of_core_states_per_atom': [0, 0, 0, 0, 5, 5, 5, 5, 0, 0, 0, 0], 'descr_highest_lying_core_state_per_atom': ['no core states', 'no core states', 'no core states', 'no core states', '3p', '3p', '3p', '3p', 'no core states', 'no core states', 'no core states', 'no core states']}, 'total_energy_Ry': -5081.9158383599997, 'fermi_energy': 0.49281398320000003, - 'convergence_group': {'rms': 0.23827000000000001, 'strmix': 0.01, 'calculation_converged': False, 'charge_neutrality': -0.27584399999999998, 'fermi_energy_all_iterations_units': 'Ry', 'dos_at_fermi_energy_all_iterations': [10.260433, 15.367202, 15.427578, 15.336628, 15.293781, 15.239316, 15.191993, 15.145917, 15.102739, 15.061817], 'rms_unit': 'unitless', 'charge_neutrality_all_iterations': [-4.90193, -0.576195, -0.303203, -0.369647, -0.330104, -0.324371, -0.309302, -0.298009, -0.286475, -0.275844], 'qbound': 0.0, 'rms_per_atom': [0.31264, 0.092533, 0.15846, 0.15846, 0.092533, 0.31264], 'rms_all_iterations': [2.3414, 0.23344, 0.23332, 0.2346, 0.23535, 0.23618, 0.23686, 0.23745, 0.23792, 0.23827], 'imix': 0, 'nsteps_exhausted': True, 'number_of_iterations_max': 10, 'total_spin_moment_all_iterations': [0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0], 'idtbry': 40, 'charge_neutrality_unit': 'electrons', 'total_energy_Ry_all_iterations': [-5079.95660683, -5081.8656148, -5081.87192003, -5081.88104897, -5081.88827002, -5081.89505638, -5081.90107597, -5081.90651978, -5081.91141803, -5081.91583836], 'fcm': 20.0, 'number_of_iterations': 10, 'spin_moment_per_atom_all_iterations': [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0], [0.0, 0.0, -0.0, -0.0, 0.0, 0.0]], 'fermi_energy_all_iterations': [0.459241, 0.4654901812, 0.4687657284, 0.472782771, 0.476380134, 0.4799276602, 0.4833209134, 0.4866002281, 0.4897616294, 0.4928139832], 'brymix': 0.01}, - 'total_energy_Ry_unit': 'Rydberg', 'number_of_atoms_in_unit_cell': 6, 'use_newsosol': False, 'two_pi_over_alat_internal_unit': '1/a_Bohr', 'magnetism_group': {'spin_moment_unit': 'mu_Bohr', 'total_spin_moment': -0.0, 'total_spin_moment_unit': 'mu_Bohr', 'spin_moment_per_atom': [0.0, 0.0, -0.0, -0.0, 0.0, 0.0]}, 'charge_core_states_per_atom_unit': 'electron charge', 'two_pi_over_alat_internal': 1.15850818, 'total_charge_per_atom': [0.0, 0.0, 26.0, 26.0, 0.0, 0.0], 'alat_internal_unit': 'a_Bohr', 'total_charge_per_atom_unit': 'electron charge', 'charge_valence_states_per_atom_unit': 'electron charge', 'parser_warnings': [], 'kmesh_group': {'kmesh_energypoint': [4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2, 1, 4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2, 1], 'number_different_kmeshes': 4, 'number_kpoints_per_kmesh': {'n_kx': [10, 7, 5, 3], 'n_ky': [10, 7, 5, 3], 'n_kz': [10, 7, 5, 3], 'number_of_kpts': [310, 112, 45, 12]}}, 'symmetries_group': {'number_of_used_symmetries': 4, 'number_of_lattice_symmetries': 4, 'symmetry_description': {'C2z': {'has_inversion': 0, 'euler_angles': [180.0, 0.0, 0.0], 'is_unitary': 1}, 'IC2x': {'has_inversion': 1, 'euler_angles': [180.0, 180.0, 0.0], 'is_unitary': 1}, 'IC2y': {'has_inversion': 1, 'euler_angles': [0.0, 180.0, 0.0], 'is_unitary': 1}, 'E': {'has_inversion': 0, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}}}, 'alat_internal': 5.423514, 'timings_unit': 'seconds', 'code_info_group': {'code_version': 'v2.2-22-g4f8f5ff', 'calculation_serial_number': 'kkrjm_v2.2-22-g4f8f5ff_openmp_20171208160428', 'compile_options': 'openmp'}, 'single_particle_energies_unit': 'eV', 'dos_at_fermi_energy': 15.061817, 'charge_valence_states_per_atom': [0.0040280000000000003, 0.22997500000000001, 7.6280740000000016, 7.6280740000000016, 0.22997500000000001, 0.0040280000000000003]} - path0 = './files/kkr/kkr_run_slab_nosoc/' - outfile = path0+'out_kkr' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'out_potential' - nonco_out_file = path0+'nonco_angle_out.dat' - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - print(out_dict['convergence_group']) - assert success - assert set(out_dict.keys()) == set(dref.keys()) - assert out_dict == dref - assert msg_list == [] - - def test_missing_outfile(self): - """ - Parse kkr output where out_kkr is missing. Compares error messages - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, 'wrong_name', outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - return msg_list - assert not success - assert set(msg_list) == set(['Error parsing output of KKR: Version Info', 'Error parsing output of KKR: rms-error', 'Error parsing output of KKR: charge neutrality', 'Error parsing output of KKR: total magnetic moment', 'Error parsing output of KKR: spin moment per atom', 'Error parsing output of KKR: orbital moment', 'Error parsing output of KKR: EF', 'Error parsing output of KKR: DOS@EF', 'Error parsing output of KKR: total energy', 'Error parsing output of KKR: search for warnings', 'Error parsing output of KKR: charges', 'Error parsing output of KKR: scfinfo']) - - def test_missing_outfile0init(self): - """ - Parse kkr output where output.0.txt is missing. Compares error messages - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, 'wrong_name', outfile_000, timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - return msg_list - assert not success - assert set(msg_list) == set(['Error parsing output of KKR: nspin/natom', 'Error parsing output of KKR: spin moment per atom', 'Error parsing output of KKR: orbital moment', 'Error parsing output of KKR: energy contour', 'Error parsing output of KKR: alat, 2*pi/alat', 'Error parsing output of KKR: scfinfo', 'Error parsing output of KKR: kmesh', 'Error parsing output of KKR: symmetries', 'Error parsing output of KKR: ewald summation for madelung poterntial']) - - def test_missing_outfile000(self): - """ - Parse kkr output where output.000.txt is missing. Compares error messages - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, 'wrong_name', timing_file, potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - return msg_list - assert not success - assert set(msg_list) == set(['Error parsing output of KKR: rms-error', 'Error parsing output of KKR: single particle energies', 'Error parsing output of KKR: charges', 'Error parsing output of KKR: scfinfo', 'Error parsing output of KKR: kmesh']) - - def test_missing_timingfile(self): - """ - Parse kkr output where out_timing.000.txt is missing. Compares error messages - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, 'wrong_name', potfile_out, nonco_out_file) - out_dict['parser_warnings'] = msg_list - return msg_list - assert not success - assert msg_list == ['Error parsing output of KKR: timings'] - - def test_missing_potfile(self): - """ - Parse kkr output where out_potential is missing. Compares error messages - """ - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, 'wrong_name', nonco_out_file) - out_dict['parser_warnings'] = msg_list - assert not success - assert msg_list == ['Error parsing output of KKR: core_states'] - - - def test_missing_nonco_angles(self): - """ - Parse kkr output where out_potential is missing. Compares error messages - """ - path0 = './files/kkr/kkr_run_slab_soc_mag/' - outfile = path0+'out_kkr' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'out_potential' - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, 'wrong_name') - out_dict['parser_warnings'] = msg_list - assert not success - assert msg_list == ['Error parsing output of KKR: spin moment per atom'] - - def test_check_error_category(self): - """ - Check check_error_category function used in parser after parse_kkr_outputfile is used - """ - fname = 'nonco_angles_out.dat' - err_cat, err_msg = (2, "Error! NONCO_ANGLES_OUT not found {}".format(fname)) - assert not check_error_category(err_cat, err_msg, {'use_newsosol': False}) - assert check_error_category(err_cat, err_msg, {'use_newsosol': True}) - - def test_parse_dosout(self): - """ - Parse output of dos calculation since ouput changes slightly (e.g. no ewald sum) - """ - path0 = './files/kkr/kkr_run_dos_output/' - outfile = path0+'out_kkr' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'out_potential' - dref = {'fermi_energy_units':'Ry', 'nspin': 1, 'single_particle_energies': [489.07122759181328, 489.07122759181328, 489.07122759181328, 489.07122759181328], 'energy_contour_group': {'emin_unit': 'Rydberg', 'emin': -1.0, 'npol': 0, 'temperature_unit': 'Kelvin', 'n1': 0, 'n2': 21, 'n3': 0, 'number_of_energy_points': 21, 'temperature': 200.0}, 'energy': -390210.37840783992, 'warnings_group': {'number_of_warnings': 0, 'warnings_list': []}, 'energy_unit': 'eV', 'charge_core_states_per_atom': [], 'timings_group': {'main1c - serial part': '0.0054', 'main0': '0.136', 'main2': '0.2291', 'main1b - calctref13': '0.1511', 'main1c ': '0.7165', 'main1a - tbref': '0.9809', 'Time in Iteration': '5.9128', 'main1b ': '3.537', 'main1a ': '1.4302'}, 'core_states_group': {'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', 'energy_highest_lying_core_state_per_atom': [-3.83243200276, -3.83243200276, -3.83243200276, -3.83243200276], 'number_of_core_states_per_atom': [8, 8, 8, 8], 'descr_highest_lying_core_state_per_atom': ['4p', '4p', '4p', '4p']}, 'total_energy_Ry': -28679.93406508, 'fermi_energy': 1.05, - 'convergence_group': {'strmix': 0.0, 'rms': 12.977, 'rms_unit': 'unitless', 'qbound': 0.0, 'calculation_converged': False, 'nsteps_exhausted': True, 'brymix': 0.01, 'charge_neutrality': -137.449522, 'number_of_iterations_max': 1, 'number_of_iterations': 1, 'rms_per_atom': [12.977, 12.977, 12.977, 12.977], 'fcm': 20.0, 'fermi_energy_all_iterations': [1.05], 'dos_at_fermi_energy_all_iterations': [3.672746], 'rms_all_iterations': [12.977], 'idtbry': 40, 'fermi_energy_all_iterations_units': 'Ry', 'charge_neutrality_unit': 'electrons', 'charge_neutrality_all_iterations': [-137.449522], 'total_energy_Ry_all_iterations': [-28679.93406508], 'imix': 0}, - 'total_energy_Ry_unit': 'Rydberg', 'use_newsosol': False, 'two_pi_over_alat_internal_unit': '1/a_Bohr', 'charge_core_states_per_atom_unit': 'electron charge', 'two_pi_over_alat_internal': 0.79844546, 'alat_internal_unit': 'a_Bohr', 'charge_valence_states_per_atom_unit': 'electron charge', 'parser_warnings': [], 'kmesh_group': {'kmesh_energypoint': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 'number_different_kmeshes': 1, 'number_kpoints_per_kmesh': {'n_kx': [10], 'n_ky': [10], 'n_kz': [10], 'number_of_kpts': [216]}}, 'symmetries_group': {'number_of_used_symmetries': 8, 'number_of_lattice_symmetries': 8, 'symmetry_description': {'E': {'has_inversion': 0, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}, 'C2z': {'has_inversion': 0, 'euler_angles': [180.0, 0.0, 0.0], 'is_unitary': 1}, 'C2x': {'has_inversion': 0, 'euler_angles': [180.0, 180.0, 0.0], 'is_unitary': 1}, 'C2y': {'has_inversion': 0, 'euler_angles': [0.0, 180.0, 0.0], 'is_unitary': 1}, 'IC2z': {'has_inversion': 1, 'euler_angles': [180.0, 0.0, 0.0], 'is_unitary': 1}, 'IC2x': {'has_inversion': 1, 'euler_angles': [180.0, 180.0, 0.0], 'is_unitary': 1}, 'IC2y': {'has_inversion': 1, 'euler_angles': [0.0, 180.0, 0.0], 'is_unitary': 1}, 'IE': {'has_inversion': 1, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}}}, 'alat_internal': 7.869273, 'timings_unit': 'seconds', 'total_charge_per_atom_unit': 'electron charge', 'code_info_group': {'code_version': 'v2.2-22-g4f8f5ff', 'calculation_serial_number': 'kkrjm_v2.2-22-g4f8f5ff_openmp-mac_20171214102522', 'compile_options': 'openmp-mac'}, 'single_particle_energies_unit': 'eV', 'dos_at_fermi_energy': 3.6727460000000001, 'number_of_atoms_in_unit_cell': 4, 'total_charge_per_atom': []} - out_dict = {} - success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, 'wrong_name') - out_dict['parser_warnings'] = msg_list - print(out_dict['convergence_group']) - #return success, msg_list, out_dict - assert success - assert msg_list == [] - assert set(out_dict.keys()) == set(dref.keys()) - assert out_dict == dref - - def test_parse_3Dsymmetries(self): - """ - Parse output of a dos calculation in 3D (used to fail due to symmetries reading) - """ - p = './files/kkr/parser_3Dsymmetries/' - dref = {'fermi_energy_units':'Ry', 'nspin': 2, 'single_particle_energies': [49.621809689579912, 49.621809689579912, 49.621809689579912, 49.621809689579912], 'energy_contour_group': {'emin_unit': 'Rydberg', 'emin': -0.673499, 'npol': 0, 'temperature_unit': 'Kelvin', 'n1': 0, 'n2': 61, 'n3': 0, 'number_of_energy_points': 61, 'temperature': 400.0}, 'energy': -374811.44613886473, 'warnings_group': {'number_of_warnings': 0, 'warnings_list': []}, 'energy_unit': 'eV', 'charge_core_states_per_atom': [], 'timings_group': {'main1c - serial part': '0.0136', 'main0': '0.1273', 'main2': '0.1171', 'main1b - calctref13': '0.4801', 'main1c ': '0.0975', 'main1a - tbref': '0.5523', 'Time in Iteration': '61.063', 'main1b ': '60.1609', 'main1a ': '0.6874'}, 'core_states_group': {'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', 'energy_highest_lying_core_state_per_atom': [-3.5445692, -3.5445692, -3.5445692, -3.5445692, -3.5445692, -3.5445692, -3.5445692, -3.5445692], 'number_of_core_states_per_atom': [8, 8, 8, 8, 8, 8, 8, 8], 'descr_highest_lying_core_state_per_atom': ['4p', '4p', '4p', '4p', '4p', '4p', '4p', '4p']}, 'total_energy_Ry': -27548.133409369999, 'fermi_energy': 0.15357699999999999, - 'convergence_group': {'rms': 18.829999999999998, 'strmix': 0.0, 'calculation_converged': False, 'charge_neutrality': -147.80077700000001, 'fermi_energy_all_iterations_units': 'Ry', 'dos_at_fermi_energy_all_iterations': [7.537613], 'rms_unit': 'unitless', 'charge_neutrality_all_iterations': [-147.800777], 'qbound': 0.0, 'rms_per_atom': [18.83, 18.83, 18.83, 18.83], 'rms_all_iterations': [18.83], 'imix': 0, 'nsteps_exhausted': True, 'number_of_iterations_max': 1, 'total_spin_moment_all_iterations': [0.0], 'idtbry': 40, 'charge_neutrality_unit': 'electrons', 'total_energy_Ry_all_iterations': [-27548.13340937], 'fcm': 20.0, 'number_of_iterations': 1, 'spin_moment_per_atom_all_iterations': [[0.0, 0.0, 0.0, 0.0]], 'fermi_energy_all_iterations': [0.153577], 'brymix': 0.01}, - 'total_energy_Ry_unit': 'Rydberg', 'use_newsosol': False, 'two_pi_over_alat_internal_unit': '1/a_Bohr', 'magnetism_group': {'spin_moment_unit': 'mu_Bohr', 'total_spin_moment': 0.0, 'total_spin_moment_unit': 'mu_Bohr', 'spin_moment_per_atom': [0.0, 0.0, 0.0, 0.0]}, 'charge_core_states_per_atom_unit': 'electron charge', 'two_pi_over_alat_internal': 0.79844546, 'alat_internal_unit': 'a_Bohr', 'charge_valence_states_per_atom_unit': 'electron charge', 'kmesh_group': {'kmesh_energypoint': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 'number_different_kmeshes': 1, 'number_kpoints_per_kmesh': {'n_kx': [20], 'n_ky': [20], 'n_kz': [20], 'number_of_kpts': [1331]}}, 'symmetries_group': {'number_of_used_symmetries': 8, 'number_of_lattice_symmetries': 8, 'symmetry_description': {'E': {'has_inversion': 0, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}, 'C2z': {'has_inversion': 0, 'euler_angles': [180.0, 0.0, 0.0], 'is_unitary': 1}, 'C2x': {'has_inversion': 0, 'euler_angles': [180.0, 180.0, 0.0], 'is_unitary': 1}, 'C2y': {'has_inversion': 0, 'euler_angles': [0.0, 180.0, 0.0], 'is_unitary': 1}, 'IC2z': {'has_inversion': 1, 'euler_angles': [180.0, 0.0, 0.0], 'is_unitary': 1}, 'IC2x': {'has_inversion': 1, 'euler_angles': [180.0, 180.0, 0.0], 'is_unitary': 1}, 'IC2y': {'has_inversion': 1, 'euler_angles': [0.0, 180.0, 0.0], 'is_unitary': 1}, 'IE': {'has_inversion': 1, 'euler_angles': [0.0, 0.0, 0.0], 'is_unitary': 1}}}, 'alat_internal': 7.869273, 'timings_unit': 'seconds', 'total_charge_per_atom_unit': 'electron charge', 'code_info_group': {'code_version': 'v2.2-23-g4a095c6', 'calculation_serial_number': 'kkrjm_v2.2-23-g4a095c6_openmp-mac_20180105092029', 'compile_options': 'openmp-mac'}, 'single_particle_energies_unit': 'eV', 'dos_at_fermi_energy': 7.5376130000000003, 'number_of_atoms_in_unit_cell': 4, 'total_charge_per_atom': []} - success, msg_list, out_dict = parse_kkr_outputfile({}, p+'out_kkr', p+'output.0.txt', p+'output.000.txt', p+'out_timing.000.txt', p+'out_potential', p+'nonco_angle_out.dat') - #return success, msg_list, out_dict - print(out_dict['timings_group']) - print(out_dict['convergence_group']) - assert success - assert msg_list == [] - assert set(out_dict.keys()) == set(dref.keys()) - assert out_dict == dref - - def test_Nan_output(self): - """ - Parse output of a dos calculation in 3D (used to fail due to symmetries reading) - """ - p = './files/kkr/parser_3Dsymmetries/' - success, msg_list, out_dict = parse_kkr_outputfile({}, p+'out_kkr', p+'output.0.txt', p+'output.000.txt', p+'out_timing.000.txt', p+'out_potential', p+'nonco_angle_out.dat', p+'output.2.txt') - #return success, msg_list, out_dict - from numpy import isnan - captured_nan = False - for key, val in out_dict['convergence_group'].iteritems(): - if key in ['charge_neutrality', 'rms']: - if isnan(val): - captured_nan = True - elif key in ['charge_neutrality_all_iterations', 'dos_at_fermi_energy_all_iterations', 'fermi_energy_all_iterations', 'rms_all_iterations', 'total_energy_Ry_all_iterations', 'rms_per_atom']: - for isub in val: - if isnan(isub): - captured_nan = True - assert success - assert not captured_nan - diff --git a/aiida_kkr/tests/test_scf_wc_simple.py b/aiida_kkr/tests/test_scf_wc_simple.py index 55b63aa3..d0f2c5aa 100755 --- a/aiida_kkr/tests/test_scf_wc_simple.py +++ b/aiida_kkr/tests/test_scf_wc_simple.py @@ -2,6 +2,13 @@ import pytest +# some global settings + +voro_codename = 'voronoi' +kkr_codename = 'KKRhost' +computername = 'localhost' +queuename = '' + # helper function def print_clean_inouts(node): from pprint import pprint @@ -34,14 +41,61 @@ def test_scf_wc_Cu_simple(self): """ from aiida.orm import Code, load_node, DataFactory from aiida.work import run - from aiida_kkr.tools.kkr_params import kkrparams + from masci_tools.io.kkr_params import kkrparams from aiida_kkr.workflows.kkr_scf import kkr_scf_wc from pprint import pprint - from scipy import array + from numpy import array ParameterData = DataFactory('parameter') StructureData = DataFactory('structure') - + + from aiida.orm.implementation.django.code import Code + from aiida.orm.computers import Computer + from aiida.orm.querybuilder import QueryBuilder + + qb = QueryBuilder() + qb.append(Computer, tag='computer') + all_computers = qb.get_results_dict() + computer_found_in_db = False + if len(all_computers)>0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(voro_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = voro_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/voronoi.exe')) + code.set_input_plugin_name('kkr.voro') + code.set_prepend_text('ln -s /Users/ruess/sourcecodes/aiida/codes_localhost/ElementDataBase .') + code.store() + try: + code = Code.get_from_string(kkr_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = kkr_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/kkr.x')) + code.set_input_plugin_name('kkr.kkr') + code.store() + print 'stored kkr code in database' + print + + # create structure alat = 6.83 # in a_Bohr abohr = 0.52917721067 # conversion factor to Angstroem units # bravais vectors @@ -61,9 +115,9 @@ def test_scf_wc_Cu_simple(self): wfd['check_dos'] = False wfd['kkr_runmax'] = 5 wfd['nsteps'] = 50 - wfd['queue_name'] = '' + wfd['queue_name'] = queuename wfd['resources']['num_machines'] = 1 - wfd['use_mpi'] = False #True + wfd['use_mpi'] = True wfd['num_rerun'] = 2 wfd['natom_in_cls_min'] = 20 @@ -71,30 +125,37 @@ def test_scf_wc_Cu_simple(self): KKRscf_wf_parameters = ParameterData(dict=wfd) # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations - VoroCode = Code.get_from_string('voronoi@my_mac') - KKRCode = Code.get_from_string('KKRcode@my_mac') + VoroCode = Code.get_from_string(voro_codename+'@'+computername) + KKRCode = Code.get_from_string(kkr_codename+'@'+computername) # Finally we use the kkrparams class to prepare a valid set of KKR parameters that are stored as a ParameterData object for the use in aiida ParaNode = ParameterData(dict=kkrparams(LMAX=2, RMAX=7, GMAX=65, NSPIN=1, RCLUSTZ=1.9).get_dict()) label = 'KKR-scf for Cu bulk' descr = 'KKR self-consistency workflow for Cu bulk' - try: - out = run(kkr_scf_wc, structure=Cu, calc_parameters=ParaNode, voronoi=VoroCode, - kkr=KKRCode, wf_parameters=KKRscf_wf_parameters, _label=label, _description=descr) - except: - print 'some Error occured in run of kkr_scf_wc' - + + # create process builder to set parameters + builder = kkr_scf_wc.get_builder() + builder.calc_parameters = ParaNode + builder.voronoi = VoroCode + builder.kkr = KKRCode + builder.structure = Cu + builder.wf_parameters = KKRscf_wf_parameters + builder.label = label + builder.description = descr + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) # load node of workflow print out - n = load_node(out[1]) + n = out['output_kkr_scf_wc_ParameterResults'] print '\noutputs of workflow\n-------------------------------------------------' pprint(n.get_outputs_dict()) # get output dictionary - n = n.get_outputs()[-1] out = n.get_dict() print '\n\noutput dictionary:\n-------------------------------------------------' pprint(out) diff --git a/aiida_kkr/tests/test_vorocalc.py b/aiida_kkr/tests/test_vorocalc.py new file mode 100755 index 00000000..5c6cbece --- /dev/null +++ b/aiida_kkr/tests/test_vorocalc.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python + +import pytest + +#TODO +# implement missing tests: +# * test_vca_structure +# * test_overwrite_alat_input +# * test_voronoi_after_kkr +# * test_overwrite_potential + +# some global settings + +codename = 'voronoi@iff003' +queuename = 'th1_node' + +def wait_for_it(calc, maxwait=300, dT=10): + """ + helper function used to wait until calculation reaches FINISHED state + wait for maximally seconds and check the calculation's state every
seconds + """ + from time import sleep + nsteps = maxwait/dT + print 'waiting for calculation to finish (maximally wait for {} seconds)'.format(maxwait) + istep = 0 + calcstate = u'UNKNOWN' + while istep < nsteps: + print 'checking status' + sleep(dT) + calcstate = calc.get_state() + istep += 1 + if calcstate == u'FINISHED' or calcstate == u'FAILED': + break + + if calcstate == u'FINISHED': + print 'calculation reached FINISHED state' + elif calcstate == u'FAILED': + print 'calculation in FAILED state' + else: + print 'maximum waiting time exhausted' + + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_voronoi_calculation(): + """ + Tests for the voronoi calculation + """ + + def test_startpot_Cu_simple(self): + """ + simple Cu noSOC, FP, lmax2 full example + """ + from aiida.orm import Code, load_node, DataFactory + from masci_tools.io.kkr_params import kkrparams + + ParameterData = DataFactory('parameter') + StructureData = DataFactory('structure') + + # create StructureData instance for Cu + alat = 3.61 # lattice constant in Angstroem + bravais = [[0.5*alat, 0.5*alat, 0], [0.5*alat, 0, 0.5*alat], [0, 0.5*alat, 0.5*alat]] # Bravais matrix in Ang. units + Cu = StructureData(cell=bravais) + Cu.append_atom(position=[0,0,0], symbols='Cu') + + # create parameterData input node using kkrparams class from masci-tools + params = kkrparams(params_type='voronoi') + params.set_multiple_values(LMAX=2, NSPIN=1, RCLUSTZ=2.3) + ParameterData = DataFactory('parameter') # use DataFactory to get ParamerterData class + ParaNode = ParameterData(dict=params.get_dict()) + + # import computer etc from database dump + from aiida.orm.importexport import import_data + import_data('files/db_dump_vorocalc.tar.gz') + + # load code from database and create new voronoi calculation + code = Code.get_from_string(codename) + + voro_calc = code.new_calc() + voro_calc.set_resources({'num_machines':1, 'tot_num_mpiprocs':1}) + voro_calc.use_structure(Cu) + voro_calc.use_parameters(ParaNode) + voro_calc.set_queue_name(queuename) + + #first run a submit-test + voro_calc.submit_test() + """ + + # now store all nodes and submit calculation + #voro_calc.store_all() + #voro_calc.submit() + + # now wait for the calculation to finish + #wait_for_it(voro_calc) + + # finally check some output + print '\n\ncheck values ...\n-------------------------------------------------' + + test_ok = voro_calc.get_state() == u'FINISHED' + print 'calculation state', voro_calc.get_state(), 'OK?', test_ok + assert test_ok + + test_ok = voro_calc.res.parser_errors == [] + print 'parser_errors', voro_calc.res.parser_errors, 'OK?', test_ok + assert test_ok + + test_ok = voro_calc.res.emin == -0.5 + print 'emin', voro_calc.res.emin, 'OK?', test_ok + assert test_ok + + test_ok = voro_calc.res.start_from_jellium_potentials + print 'jellstart', voro_calc.res.start_from_jellium_potentials, 'OK?', test_ok + assert test_ok + + test_ok = voro_calc.res.radial_meshpoints == [484] + print 'radmesh', voro_calc.res.radial_meshpoints, 'OK?', test_ok + assert test_ok + + print '\ndone with checks\n' + """ + + def test_vca_structure(self): + """ + test for vca_structure behaviour + """ + pass + + def test_overwrite_alat_input(self): + """ + test using 'use_alat_input' keyword in input parameters + """ + pass + + def test_voronoi_after_kkr(self): + """ + test voronoi run from parent kkr calculation (e.g. to update to a higher lmax value) + """ + pass + + def test_overwrite_potential(self): + """ + test providing overwirte_potential input node which overwrites the starting potentai with the given input + """ + pass + + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_voronoi_calculation() + Test.test_startpot_Cu_simple() diff --git a/aiida_kkr/tests/test_voronoi_parser.py b/aiida_kkr/tests/test_voronoi_parser.py new file mode 100755 index 00000000..fcdc0b39 --- /dev/null +++ b/aiida_kkr/tests/test_voronoi_parser.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_voronoi_parser(): + """ + Tests for the voronoi parser + """ + + def test_parse_voronoi_calc(self): + """ + ... + """ + from aiida.orm import load_node + from aiida_kkr.parsers.voro import VoronoiParser + from aiida.orm.importexport import import_data + import_data('files/db_dump_vorocalc.tar.gz') + voro_calc = load_node('559b9d9b-3525-402e-9b24-ecd8b801853c') + parser = VoronoiParser(voro_calc) + success, outnodes = parser.parse_from_calc() + assert success diff --git a/aiida_kkr/tests/test_voroparser_functions.py b/aiida_kkr/tests/test_voroparser_functions.py deleted file mode 100644 index 2ad89ab6..00000000 --- a/aiida_kkr/tests/test_voroparser_functions.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -""" -@author: ruess -""" - -import pytest -from aiida_kkr.tools.voroparser_functions import parse_voronoi_output - -class Test_voronoi_parser_functions(): - """ - Tests for the voronoi parser functions - """ - #some global definitions - global dref, grouping_ref, outfile, potfile, atominfo, radii, inputfile - dref = {'volumes_group': {'volume_total': 3.00000186, 'volume_unit': 'alat^3', 'volume_atoms': [{'iatom': 1, 'v_atom': 0.50000031}, {'iatom': 2, 'v_atom': 0.50000031}, {'iatom': 3, 'v_atom': 0.50000031}, {'iatom': 4, 'v_atom': 0.50000031}, {'iatom': 5, 'v_atom': 0.50000031}, {'iatom': 6, 'v_atom': 0.50000031}]}, 'parser_version': 'some_version_number', 'emin': -0.5, 'alat_unit': 'a_Bohr', 'radii_atoms_group': [{'rout': 0.5590171328, 'iatom': 1, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 2, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 3, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 4, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 5, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 6, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'radii_units': 'alat'}], 'shapes': [1, 1, 1, 1, 1, 1], 'code_info_group': {'code_version': 'v1.0-6-gf0c2ac3', 'calculation_serial_number': 'voro_v1.0-6-gf0c2ac3_serial_20171207092915', 'compile_options': 'serial-O2 -r8 -traceback -i8-mkl -Wl,-stack_size,0x40000000,-stack_addr,0xf0000000'}, 'fpradius_atoms_unit': 'alat', 'alat': 5.423514, 'parser_warnings': [], 'start_from_jellium_potentials': True, 'emin_units': 'Ry', 'fpradius_atoms': [0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902], 'cluster_info_group': {'cluster_info_atoms': [{'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 1, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 2, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 3, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 4, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 5, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 6, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 7, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 8, 'tb_cluster_id': 1, 'sites': 27}], 'number_of_clusters': 1}, - 'core_states_group': {'descr_highest_lying_core_state_per_atom': ['no core states','no core states','no core states','no core states','3p','3p','3p','3p','no core states','no core states','no core states','no core states'], - 'energy_highest_lying_core_state_per_atom': [None,None,None,None,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,None,None,None,None], - 'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', - 'number_of_core_states_per_atom': [0,0,0,0,5,5,5,5,0,0,0,0]}} - grouping_ref = ['volumes_group', 'radii_atoms_group', 'code_info_group', 'cluster_info_group'] - path0 = '../tests/files/voronoi/' - outfile = path0+'out_voronoi' - potfile = path0+'output.pot' - atominfo = path0+'atominfo.txt' - radii = path0+'radii.dat' - inputfile = path0+'inputcard' - - def test_complete_voro_output(self): - """ - Parse complete output of voronoi calculation and compare out_dict, grouping, warnings - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, outfile, potfile, atominfo, radii, inputfile) - out_dict['parser_warnings'] = msg_list - assert success - return out_dict - assert out_dict == dref - assert msg_list == [] - groups = [i for i in out_dict.keys() if 'group' in i] - assert set(groups) == set(grouping_ref) - - def test_missing_outfile(self): - """ - Parse output where out_voronoi is missing and compare error messages/rest of out_dict - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, '', potfile, atominfo, radii, inputfile) - out_dict['parser_warnings'] = msg_list - dref2 = {'parser_warnings': ['Error parsing output of voronoi: Version Info', "Error parsing output of voronoi: 'EMIN'", 'Error parsing output of voronoi: Cluster Info', 'Error parsing output of voronoi: Jellium startpot', 'Error parsing output of voronoi: SHAPE Info', 'Error parsing output of voronoi: Volume Info', 'Error parsing output of voronoi: radii.dat Info', 'Error parsing output of voronoi: full potential radius'], 'alat_unit': 'a_Bohr', 'parser_version': 'some_version_number', 'alat': 5.423514, - 'core_states_group': {'descr_highest_lying_core_state_per_atom': ['no core states','no core states','no core states','no core states','3p','3p','3p','3p','no core states','no core states','no core states','no core states'], - 'energy_highest_lying_core_state_per_atom': [None,None,None,None,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,None,None,None,None], - 'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', - 'number_of_core_states_per_atom': [0,0,0,0,5,5,5,5,0,0,0,0]}, - 'radial_meshpoints': [484.0 for i in range(12)]} - assert not success - assert out_dict == dref2 - assert msg_list == ['Error parsing output of voronoi: Version Info', "Error parsing output of voronoi: 'EMIN'", 'Error parsing output of voronoi: Cluster Info', 'Error parsing output of voronoi: Jellium startpot', 'Error parsing output of voronoi: SHAPE Info', 'Error parsing output of voronoi: Volume Info', 'Error parsing output of voronoi: radii.dat Info', 'Error parsing output of voronoi: full potential radius'] - - def test_missing_atominfo(self): - """ - Parse output where atominfo.txt is missing and compare error messages/rest of out_dict - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, outfile, potfile, 'wrong_name', radii, inputfile) - out_dict['parser_warnings'] = msg_list - dref2 = {'parser_version': 'some_version_number', 'emin': -0.5, 'alat_unit': 'a_Bohr', 'code_info_group': {'code_version': 'v1.0-6-gf0c2ac3', 'calculation_serial_number': 'voro_v1.0-6-gf0c2ac3_serial_20171207092915', 'compile_options': 'serial-O2 -r8 -traceback -i8-mkl -Wl,-stack_size,0x40000000,-stack_addr,0xf0000000'}, 'alat': 5.423514, 'parser_warnings': ['Error parsing output of voronoi: SHAPE Info', 'Error parsing output of voronoi: Volume Info', 'Error parsing output of voronoi: radii.dat Info', 'Error parsing output of voronoi: full potential radius'], 'start_from_jellium_potentials': True, 'emin_units': 'Ry', 'cluster_info_group': {'cluster_info_atoms': [{'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 1, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 2, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 3, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 4, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 5, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 6, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 7, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 8, 'tb_cluster_id': 1, 'sites': 27}], 'number_of_clusters': 1}, - 'emin_minus_efermi': -12.370853917196168, 'emin_minus_efermi_Ry': -0.90924099999999997, 'emin_minus_efermi_Ry_units': 'Ry', 'emin_minus_efermi_units': 'eV', - 'core_states_group': {'descr_highest_lying_core_state_per_atom': ['no core states','no core states','no core states','no core states','3p','3p','3p','3p','no core states','no core states','no core states','no core states'], - 'energy_highest_lying_core_state_per_atom': [None,None,None,None,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,None,None,None,None], - 'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', - 'number_of_core_states_per_atom': [0,0,0,0,5,5,5,5,0,0,0,0]}, - 'radial_meshpoints': [484.0 for i in range(12)]} - assert not success - assert out_dict == dref2 - assert msg_list == ['Error parsing output of voronoi: SHAPE Info', 'Error parsing output of voronoi: Volume Info', 'Error parsing output of voronoi: radii.dat Info', 'Error parsing output of voronoi: full potential radius'] - return out_dict - - def test_missing_inputfile(self): - """ - Parse output where inputcard is missing and compare error messages/rest of out_dict - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, outfile, potfile, atominfo, radii, 'wrong_name') - out_dict['parser_warnings'] = msg_list - dref2 = {'volumes_group': {'volume_total': 3.00000186, 'volume_unit': 'alat^3', 'volume_atoms': [{'iatom': 1, 'v_atom': 0.50000031}, {'iatom': 2, 'v_atom': 0.50000031}, {'iatom': 3, 'v_atom': 0.50000031}, {'iatom': 4, 'v_atom': 0.50000031}, {'iatom': 5, 'v_atom': 0.50000031}, {'iatom': 6, 'v_atom': 0.50000031}]}, 'parser_version': 'some_version_number', 'emin': -0.5, 'radii_atoms_group': [{'rout': 0.5590171328, 'iatom': 1, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 2, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 3, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 4, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 5, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 6, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'radii_units': 'alat'}], 'shapes': [1, 1, 1, 1, 1, 1], 'code_info_group': {'code_version': 'v1.0-6-gf0c2ac3', 'calculation_serial_number': 'voro_v1.0-6-gf0c2ac3_serial_20171207092915', 'compile_options': 'serial-O2 -r8 -traceback -i8-mkl -Wl,-stack_size,0x40000000,-stack_addr,0xf0000000'}, 'fpradius_atoms_unit': 'alat', 'parser_warnings': ['Error parsing output of voronoi: alat'], 'start_from_jellium_potentials': True, 'emin_units': 'Ry', 'fpradius_atoms': [0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902], 'cluster_info_group': {'cluster_info_atoms': [{'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 1, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 2, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 3, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 4, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 5, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 6, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 7, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 8, 'tb_cluster_id': 1, 'sites': 27}], 'number_of_clusters': 1}, - 'emin_minus_efermi': -12.370853917196168, 'emin_minus_efermi_Ry': -0.90924099999999997, 'emin_minus_efermi_Ry_units': 'Ry', 'emin_minus_efermi_units': 'eV', - 'core_states_group': {'descr_highest_lying_core_state_per_atom': ['no core states','no core states','no core states','no core states','3p','3p','3p','3p','no core states','no core states','no core states','no core states'], - 'energy_highest_lying_core_state_per_atom': [None,None,None,None,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,None,None,None,None], - 'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', - 'number_of_core_states_per_atom': [0,0,0,0,5,5,5,5,0,0,0,0]}, - 'radial_meshpoints': [484.0 for i in range(12)]} - assert not success - assert out_dict == dref2 - assert msg_list == ['Error parsing output of voronoi: alat'] - return out_dict - - def test_missing_potfile(self): - """ - Parse output where output.pot is missing and compare error messages/rest of out_dict - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, outfile, 'wrong_name', atominfo, radii, inputfile) - out_dict['parser_warnings'] = msg_list - dref2 = {'volumes_group': {'volume_total': 3.00000186, 'volume_unit': 'alat^3', 'volume_atoms': [{'iatom': 1, 'v_atom': 0.50000031}, {'iatom': 2, 'v_atom': 0.50000031}, {'iatom': 3, 'v_atom': 0.50000031}, {'iatom': 4, 'v_atom': 0.50000031}, {'iatom': 5, 'v_atom': 0.50000031}, {'iatom': 6, 'v_atom': 0.50000031}]}, 'parser_version': 'some_version_number', 'fpradius_atoms': [0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902], 'alat_unit': 'a_Bohr', 'shapes': [1, 1, 1, 1, 1, 1], 'code_info_group': {'code_version': 'v1.0-6-gf0c2ac3', 'calculation_serial_number': 'voro_v1.0-6-gf0c2ac3_serial_20171207092915', 'compile_options': 'serial-O2 -r8 -traceback -i8-mkl -Wl,-stack_size,0x40000000,-stack_addr,0xf0000000'}, 'fpradius_atoms_unit': 'alat', 'alat': 5.423514, 'parser_warnings': ["Error parsing output of voronoi: 'EMIN'", 'Error parsing output of voronoi: core_states', 'Error parsing output of voronoi: radial meshpoints'], 'start_from_jellium_potentials': True, 'radii_atoms_group': [{'rout': 0.5590171328, 'iatom': 1, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 2, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 3, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 4, 'dist_nn': 0.8660247659, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 5, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'rout': 0.5590171328, 'iatom': 6, 'dist_nn': 0.8660255824, 'rout_over_dist_nn': 64.55, 'rmt0_over_rout': 77.46, 'rmt0': 0.4330127912}, {'radii_units': 'alat'}], 'cluster_info_group': {'cluster_info_atoms': [{'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 1, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 2, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 3, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 4, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 5, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 6, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 7, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 8, 'tb_cluster_id': 1, 'sites': 27}], 'number_of_clusters': 1}} - assert not success - assert out_dict == dref2 - assert msg_list == ["Error parsing output of voronoi: 'EMIN'", 'Error parsing output of voronoi: core_states', 'Error parsing output of voronoi: radial meshpoints'] - return out_dict - - def test_missing_radii(self): - """ - Parse output where radii.dat is missing and compare error messages/rest of out_dict - """ - out_dict = {'parser_version': 'some_version_number'} - success, msg_list, out_dict = parse_voronoi_output(out_dict, outfile, potfile, atominfo, 'wrong_name', inputfile) - out_dict['parser_warnings'] = msg_list - dref2 = {'volumes_group': {'volume_total': 3.00000186, 'volume_unit': 'alat^3', 'volume_atoms': [{'iatom': 1, 'v_atom': 0.50000031}, {'iatom': 2, 'v_atom': 0.50000031}, {'iatom': 3, 'v_atom': 0.50000031}, {'iatom': 4, 'v_atom': 0.50000031}, {'iatom': 5, 'v_atom': 0.50000031}, {'iatom': 6, 'v_atom': 0.50000031}]}, 'parser_version': 'some_version_number', 'emin': -0.5, 'alat_unit': 'a_Bohr', 'shapes': [1, 1, 1, 1, 1, 1], 'code_info_group': {'code_version': 'v1.0-6-gf0c2ac3', 'calculation_serial_number': 'voro_v1.0-6-gf0c2ac3_serial_20171207092915', 'compile_options': 'serial-O2 -r8 -traceback -i8-mkl -Wl,-stack_size,0x40000000,-stack_addr,0xf0000000'}, 'fpradius_atoms_unit': 'alat', 'alat': 5.423514, 'parser_warnings': ['Error parsing output of voronoi: radii.dat Info'], 'start_from_jellium_potentials': True, 'emin_units': 'Ry', 'fpradius_atoms': [0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902, 0.4696902], 'cluster_info_group': {'cluster_info_atoms': [{'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 1, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 2, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 3, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 4, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 5, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 6, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 7, 'tb_cluster_id': 1, 'sites': 27}, {'rmt_ref': 2.3166, 'refpot': 1, 'iatom': 8, 'tb_cluster_id': 1, 'sites': 27}], 'number_of_clusters': 1}, - 'emin_minus_efermi': -12.370853917196168, 'emin_minus_efermi_Ry': -0.90924099999999997, 'emin_minus_efermi_Ry_units': 'Ry', 'emin_minus_efermi_units': 'eV', - 'core_states_group': {'descr_highest_lying_core_state_per_atom': ['no core states','no core states','no core states','no core states','3p','3p','3p','3p','no core states','no core states','no core states','no core states'], - 'energy_highest_lying_core_state_per_atom': [None,None,None,None,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,-3.3287908000000002,None,None,None,None], - 'energy_highest_lying_core_state_per_atom_unit': 'Rydberg', - 'number_of_core_states_per_atom': [0,0,0,0,5,5,5,5,0,0,0,0]}, - 'radial_meshpoints': [484. for i in range(12)]} - assert not success - assert out_dict == dref2 - assert msg_list == ['Error parsing output of voronoi: radii.dat Info'] - return out_dict - - \ No newline at end of file diff --git a/aiida_kkr/tests/test_vorostart_wc.py b/aiida_kkr/tests/test_vorostart_wc.py new file mode 100755 index 00000000..2bd6beb8 --- /dev/null +++ b/aiida_kkr/tests/test_vorostart_wc.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python + +import pytest + +# some global settings + +voro_codename = 'voronoi' +computername = 'localhost' +queuename = '' + +# tests +@pytest.mark.usefixtures("aiida_env") +class Test_vorostart_workflow(): + """ + Tests for the kkr_startpot workflow + """ + + def test_vorostart_wc_Cu(self): + """ + simple Cu noSOC, FP, lmax2 full example using scf workflow + """ + from aiida.orm import Code, load_node, DataFactory + from aiida.orm.computers import Computer + from aiida.orm.querybuilder import QueryBuilder + from masci_tools.io.kkr_params import kkrparams + from aiida_kkr.workflows.voro_start import kkr_startpot_wc + from numpy import array + + ParameterData = DataFactory('parameter') + StructureData = DataFactory('structure') + + # create or read computer and code + # first check if computer exists already in database + qb = QueryBuilder() + qb.append(Computer, tag='computer') + all_computers = qb.get_results_dict() + computer_found_in_db = False + if len(all_computers)>0: + for icomp in range(len(all_computers)): + c = all_computers[icomp].get('computer').get('*') + if c.get_name() == computername: + computer_found_in_db = True + comp = Computer.from_backend_entity(c) + # if it is not there create a new one + if not computer_found_in_db: + comp = Computer(computername, 'test computer', transport_type='local', scheduler_type='direct', workdir='/temp/ruess/aiida_run_iff734/') + comp.set_default_mpiprocs_per_machine(4) + comp.store() + print 'computer stored now cofigure' + comp.configure() + else: + print 'found computer in database' + + # then get code from database or create a new code + from aiida.common.exceptions import NotExistent + try: + code = Code.get_from_string(voro_codename+'@'+computername) + except NotExistent as exception: + code = Code() + code.label = voro_codename + code.description = '' + code.set_remote_computer_exec((comp, '/Users/ruess/sourcecodes/aiida/codes_localhost/voronoi.exe')) + code.set_input_plugin_name('kkr.voro') + code.set_prepend_text('ln -s /Users/ruess/sourcecodes/aiida/codes_localhost/ElementDataBase .') + code.store() + + # Then set up the structure + alat = 6.83 # in a_Bohr + abohr = 0.52917721067 # conversion factor to Angstroem units + bravais = array([[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]])# bravais vectors + a = 0.5*alat*abohr + Cu = StructureData(cell=[[a, a, 0.0], [a, 0.0, a], [0.0, a, a]]) + Cu.append_atom(position=[0.0, 0.0, 0.0], symbols='Cu') + + Cu.store() + print(Cu) + + # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion. + wfd = kkr_startpot_wc.get_wf_defaults() + wfd['check_dos'] = False + wfd['natom_in_cls_min'] = 20 + wfd['num_rerun'] = 2 + wfd['queue_name'] = queuename + wfd['resources']['num_machines'] = 1 + params_vorostart = ParameterData(dict=wfd) + + # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations + VoroCode = Code.get_from_string(voro_codename+'@'+computername) + + # Finally we use the kkrparams class to prepare a valid set of KKR parameters that are stored as a ParameterData object for the use in aiida + ParaNode = ParameterData(dict=kkrparams(LMAX=2, NSPIN=1, RCLUSTZ=1.9).get_dict()) + + # create process builder to set parameters + builder = kkr_startpot_wc.get_builder() + builder.calc_parameters = ParaNode + builder.description = 'voronoi startpot workflow for Cu bulk' + builder.label = 'startpot for Cu bulk' + builder.voronoi = VoroCode + builder.structure = Cu + builder.wf_parameters = params_vorostart + + # now run calculation + from aiida.work.launch import run, submit + out = run(builder) + + # check output + n = out['results_vorostart_wc'] + n = n.get_dict() + assert n.get('successful') + assert n.get('last_voro_ok') + assert n.get('list_of_errors') == [] + assert abs(n.get('starting_fermi_energy') - 0.409241) < 10**-14 + +#run test manually +if __name__=='__main__': + from aiida import is_dbenv_loaded, load_dbenv + if not is_dbenv_loaded(): + load_dbenv() + Test = Test_vorostart_workflow() + Test.test_vorostart_wc_Cu() diff --git a/aiida_kkr/tools/common_functions.py b/aiida_kkr/tools/common_functions.py deleted file mode 100644 index a6dc3b39..00000000 --- a/aiida_kkr/tools/common_functions.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Here commonly used functions that do not need aiida-stuff (i.e. can be tested -without a database) are collected. -""" - - -#helper functions used in calculation, parser etc. -def get_alat_from_bravais(bravais, is3D=True): - from numpy import sqrt, sum - bravais_tmp = bravais - if not is3D: - #take only in-plane lattice to find maximum as alat - bravais_tmp = bravais[:2,:2] - return sqrt(sum(bravais_tmp**2, axis=1)).max() - -def get_Ang2aBohr(): - return 1.8897261254578281 - -def get_aBohr2Ang(): - return 1/get_Ang2aBohr() - -def get_Ry2eV(): - return 13.605693009 - -def search_string(searchkey, txt): - iline = 0 - for line in txt: - if searchkey in line: - return iline - iline+=1 - return -1 - - -def angles_to_vec(magnitude, theta, phi): - """ - convert (magnitude, theta, phi) to (x,y,z) - - theta/phi need to be in radians! - - Input can be single number, list of numpy.ndarray data - Returns x,y,z vector - """ - from numpy import ndarray, array, cos, sin - - # correct data type if necessary - if type(magnitude) == list: - magnitude = array(magnitude) - if type(theta) == list: - theta = array(theta) - if type(phi) == list: - phi = array(phi) - single_value_input = False - if type(magnitude) != ndarray: - magnitude = array([magnitude]) - single_value_input = True - if type(theta) != ndarray: - theta = array([theta]) - single_value_input = True - if type(phi) != ndarray: - phi = array([phi]) - single_value_input = True - - vec = [] - for ivec in range(len(magnitude)): - r_inplane = magnitude[ivec]*sin(theta[ivec]) - x = r_inplane*cos(phi[ivec]) - y = r_inplane*sin(phi[ivec]) - z = cos(theta[ivec])*magnitude[ivec] - vec.append([x,y,z]) - vec = array(vec) - - if single_value_input: - vec = vec[0] - - return vec - - -def vec_to_angles(vec): - """ - converts vector (x,y,z) to (magnitude, theta, phi) - """ - from numpy import array, arctan2, sqrt, shape - magnitude, theta, phi = [], [], [] - if len(vec)==3 and len(shape(vec))<2: - vec = array([vec]) - multiple_entries = False - else: - multiple_entries = True - - for ivec in range(len(vec)): - phi.append(arctan2(vec[ivec, 1], vec[ivec, 0])) - r_inplane = sqrt(vec[ivec, 0]**2+vec[ivec, 1]**2) - theta.append(arctan2(r_inplane, vec[ivec, 2])) - magnitude.append(sqrt(r_inplane**2+vec[ivec, 2]**2)) - if multiple_entries: - magnitude, theta, phi = array(magnitude), array(theta), array(phi) - else: - magnitude, theta, phi = magnitude[0], theta[0], phi[0] - return magnitude, theta, phi - - - -def get_version_info(outfile): - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = search_string('Code version:', tmptxt) - if itmp==-1: # try to find serial number from header of file - itmp = search_string('# serial:', tmptxt) - code_version = tmptxt[itmp].split(':')[1].split('_')[1].strip() - compile_options = tmptxt[itmp].split(':')[1].split('_')[2].strip() - serial_number = tmptxt[itmp].split(':')[1].split('_')[3].strip() - else: - code_version = tmptxt.pop(itmp).split(':')[1].strip() - itmp = search_string('Compile options:', tmptxt) - compile_options = tmptxt.pop(itmp).split(':')[1].strip() - itmp = search_string('serial number for files:', tmptxt) - serial_number = tmptxt.pop(itmp).split(':')[1].strip() - return code_version, compile_options, serial_number - - -def get_corestates_from_potential(potfile='potential'): - """Read core states from potential file""" - from numpy import zeros - txt = open(potfile).readlines() - - #get start of each potential part - istarts = [iline for iline in range(len(txt)) if 'POTENTIAL' in txt[iline]] - - n_core_states = [] #number of core states per potential - e_core_states = [] #energies of core states - l_core_states = [] #angular momentum index, i.e. 0=s, 1=p etc... - for ipot in range(len(istarts)): - line = txt[istarts[ipot]+6] - n = int(line.split()[0]) - n_core_states.append(n) - elevels = zeros(n) #temp array for energies - langmom = zeros(n, dtype=int) #temp array for angular momentum index - for icore in range(n): - line = txt[istarts[ipot]+7+icore].split() - langmom[icore] = int(line[0]) - elevels[icore] = float(line[1].replace('D', 'E')) - e_core_states.append(elevels) - l_core_states.append(langmom) - - return n_core_states, e_core_states, l_core_states - - -def get_highest_core_state(nstates, energies, lmoments): - """Find highest lying core state from list of core states, needed to find and check energy contour""" - idx = energies.argmax() - lval = lmoments[idx] - nquant = sum(lmoments == lval) + lval - level_descr = '%i%s'%(nquant, 'spdfgh'[lval]) - - return lval, energies[idx], level_descr - - -def interpolate_dos(dospath, return_original=False, ): - """ - interpolation function copied from complexdos3 fortran code - - Principle of DOS here: Two-point contour integration - for DOS in the middle of the two points. The input DOS - and energy must be complex. Parameter deltae should be - of the order of magnitude of eim:: - - <-2*deltae-> _ - /\ | DOS=(n(1)+n(2))/2 + (n(1)-n(2))*eim/deltae - / \ | - (1) (2) 2*i*eim=2*i*pi*Kb*Tk - / \ | - / \ | - ------------------------ (Real E axis) - - :param input: dospath, path where 'complex.dos' file can be found - - :returns: E_Fermi, numpy array of interpolated dos - - :note: output units are in Ry! - """ - from numpy import array, real, imag - - f = open(dospath+'/complex.dos', 'r') - text = f.readline() # dummy readin of header, may be replaced later - npot = int(f.readline().split()[0]) - iemax = int(f.readline().split()[0]) - lmax = int(f.readline().split()[0]) - - dosnew_all_atoms = [] - dos_all_atoms = [] - - for i1 in range(npot): - #print('Reading potential',i1) - # Read header (not used) - for iheader in range(3): - text = f.readline() - - # extract EF - ef = float(f.readline().split()[7]) - - # some more dummy lines - for iheader in range(5,9+1): - text = f.readline() - - # now header is done. start reading DOS - # Read dos: (total dos stored at DOS(LMAX+1,IE)) - dos_l_cmplx = [] - for ie in range(iemax): - tmpline = f.readline().replace('(','').replace(')','').replace(',','').split() - ez = float(tmpline[0])+1j*float(tmpline[1]) - dostmp_complex = [[tmpline[len(tmpline)-2], tmpline[len(tmpline)-1]]] - dostmp_complex += [[tmpline[iline], tmpline[iline+1]] for iline in range(2,len(tmpline)-2,2)] - dostmp = [ez]+[float(ds[0])+1j*float(ds[1]) for ds in dostmp_complex] - dos_l_cmplx.append(dostmp) - dos_l_cmplx = array(dos_l_cmplx) - dos_l = imag(dos_l_cmplx.copy()) - dos_l[:,0] = real(dos_l_cmplx.copy()[:,0]) - dos_all_atoms.append(dos_l) - - # Compute and write out corrected dos at new (middle) energy points: - dosnew = [] - ez = dos_l_cmplx[:,0] - for ie in range(1, iemax-1): - deltae = real(ez[ie+1] - ez[ie]) - eim = imag(ez[ie]) - enew = real(ez[ie]) # Real quantity - - tmpdos = [enew] - for ll in range(1,lmax+3): - t = (dos_l_cmplx[ie-1, ll]-dos_l_cmplx[ie+1, ll])*0.5*(0.0+eim*1j)/deltae - #print ie+1, ll, dos_l_cmplx[ie, ll], deltae, eim, t, shape(dos_l_cmplx[ie]), lmax - #tmpdos.append(dos_l_cmplx[ie, ll] + 0.5*(dos_l_cmplx[ie-1, ll]-dos_l_cmplx[ie+1, ll])*(0.+1j*eim)/deltae) - tmpdos.append(dos_l_cmplx[ie, ll]+t) - tmpdos = array(tmpdos) - # build imaginary part (factor -1/2pi is already included) - tmpdos = array([real(tmpdos[0])]+[imag(ds) for ds in tmpdos[1:]]) - dosnew.append(tmpdos) - - # save to big array with all atoms - dosnew_all_atoms.append(dosnew) - - if i1 != npot: - text = f.readline() # dummy line - - dosnew_all_atoms = array(dosnew_all_atoms) - dos_all_atoms = array(dos_all_atoms) - - # close complex.dos file - f.close() - - if return_original: - return ef, dos_all_atoms, dosnew_all_atoms - else: - return ef, dosnew_all_atoms - -def get_ef_from_potfile(potfile): - """ - extract fermi energy from potfile - """ - f = open(potfile) - txt = f.readlines() - f.close() - ef = float(txt[3].split()[1]) - return ef - diff --git a/aiida_kkr/tools/common_workfunctions.py b/aiida_kkr/tools/common_workfunctions.py index 6c5fceca..1596abfd 100644 --- a/aiida_kkr/tools/common_workfunctions.py +++ b/aiida_kkr/tools/common_workfunctions.py @@ -1,18 +1,13 @@ -#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Here workfunctions and normal functions using aiida-stuff (typically used within workfunctions) are collected. """ -if __name__=='__main__': - from aiida import is_dbenv_loaded, load_dbenv - if not is_dbenv_loaded(): - load_dbenv() from aiida.common.exceptions import InputValidationError from aiida.work import workfunction as wf from aiida.orm import DataFactory -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams #define aiida structures from DataFactory of aiida ParameterData = DataFactory('parameter') @@ -223,7 +218,7 @@ def test_and_get_codenode(codenode, expected_code_type, use_exceptions=False): return code -def get_inputs_kkr(code, remote, options, label='', description='', parameters=None, serial=False): +def get_inputs_kkr(code, remote, options, label='', description='', parameters=None, serial=False, imp_info=None): """ Get the input for a voronoi calc. Wrapper for KkrProcess setting structure, code, options, label, description etc. @@ -235,10 +230,12 @@ def get_inputs_kkr(code, remote, options, label='', description='', parameters=N KkrProcess = KkrCalculation.process() # then reuse common inputs setter - inputs = get_inputs_common(KkrProcess, code, remote, None, options, label, description, parameters, serial) + inputs = get_inputs_common(KkrProcess, code, remote, None, options, label, + description, parameters, serial, imp_info) return inputs - + + def get_inputs_kkrimporter(code, remote, options, label='', description='', parameters=None, serial=False): """ @@ -249,7 +246,8 @@ def get_inputs_kkrimporter(code, remote, options, label='', description='', para KkrProcess = KkrCalculation.process() # then reuse common inputs setter - inputs = get_inputs_common(KkrProcess, code, remote, None, options, label, description, parameters, serial) + inputs = get_inputs_common(KkrProcess, code, remote, None, options, label, + description, parameters, serial) return inputs @@ -264,16 +262,35 @@ def get_inputs_voronoi(code, structure, options, label='', description='', param VoronoiProcess = VoronoiCalculation.process() # then reuse common inputs setter all options - inputs = get_inputs_common(VoronoiProcess, code, None, structure, options, label, description, params, serial) + inputs = get_inputs_common(VoronoiProcess, code, None, structure, options, label, + description, params, serial) return VoronoiProcess, inputs -def get_inputs_common(process, code, remote, structure, options, label, description, params, serial): +def get_inputs_kkrimp(code, options, label='', description='', parameters=None, serial=False, imp_info=None, host_GF=None, imp_pot=None): + """ + Get the input for a kkrimp calc. + Wrapper for KkrimpProcess setting structure, code, options, label, description etc. + :param code: a valid KKRimpcode installation (e.g. input from Code.get_from_string('codename@computername')) + TBD + """ + + from aiida_kkr.calculations.kkrimp import KkrimpCalculation + KkrimpProcess = KkrimpCalculation.process() + + # then reuse common inputs setter + inputs = get_inputs_common(KkrimpProcess, code, None, None, options, label, + description, parameters, serial, imp_info, host_GF, imp_pot) + + return inputs + + +def get_inputs_common(process, code, remote, structure, options, label, description, params, serial, imp_info=None, host_GF=None, imp_pot=None): """ Base function common in get_inputs_* functions for different codes """ - inputs = process.get_inputs_template() + inputs = process.get_builder() if structure: inputs.structure = structure @@ -286,27 +303,33 @@ def get_inputs_common(process, code, remote, structure, options, label, descript if params: inputs.parameters = params + + if not options: + options = {} - for key, val in options.iteritems(): - if val==None: - #leave them out, otherwise the dict schema won't validate - continue - else: - inputs._options[key] = val + #for key, val in options.iteritems(): + # if val==None: + # #leave them out, otherwise the dict schema won't validate + # continue + # else: + # inputs.options[key] = val if description: - inputs['_description'] = description + inputs.description = description else: - inputs['_description'] = '' + inputs.description = '' if label: - inputs['_label'] = label + inputs.label = label else: - inputs['_label'] = '' + inputs.label = '' if serial: - inputs._options.withmpi = False # for now - inputs._options.resources = {"num_machines": 1} + options['withmpi'] = False # for now + options['resources'] = {"num_machines": 1} + + if options: + inputs.options = options ''' options = { "max_wallclock_seconds": int, @@ -323,6 +346,16 @@ def get_inputs_common(process, code, remote, structure, options, label, descript "prepend_text": unicode, "append_text": unicode} ''' + + # for kkrimp calculations + if imp_info is not None: + inputs.impurity_info = imp_info + + if host_GF is not None: + inputs.host_Greenfunction_folder = host_GF + + if imp_pot is not None: + inputs.impurity_potential = imp_pot return inputs @@ -360,8 +393,8 @@ def generate_inputcard_from_structure(parameters, structure, input_filename, par from aiida.common.constants import elements as PeriodicTableElements from numpy import array - from aiida_kkr.tools.kkr_params import kkrparams - from aiida_kkr.tools.common_functions import get_Ang2aBohr, get_alat_from_bravais + from masci_tools.io.kkr_params import kkrparams + from masci_tools.io.common_functions import get_Ang2aBohr, get_alat_from_bravais from aiida_kkr.calculations.voro import VoronoiCalculation #list of globally used constants @@ -437,8 +470,9 @@ def generate_inputcard_from_structure(parameters, structure, input_filename, par if isvoronoi: from numpy import where mask_replace_Bi_Pb = where(charges==83) - charges[mask_replace_Bi_Pb] = 82 - print('WARNING: Bi potential not available, using Pb instead!!!') + if len(mask_replace_Bi_Pb[0])>0: + charges[mask_replace_Bi_Pb] = 82 + print('WARNING: Bi potential not available, using Pb instead!!!') ###################################### @@ -575,9 +609,9 @@ def structure_from_params(parameters): :returns: success, boolean to determine if structure creatoin was successful :returns: structure, an aiida StructureData object """ - from aiida_kkr.tools.common_functions import get_aBohr2Ang + from masci_tools.io.common_functions import get_aBohr2Ang from aiida.common.constants import elements as PeriodicTableElements - from aiida_kkr.tools.kkr_params import kkrparams + from masci_tools.io.kkr_params import kkrparams from numpy import array #check input @@ -898,4 +932,4 @@ def vca_check(structure, parameters): calc.submit() print("submitted calculation; calc=Calculation(uuid='{}') # ID={}".format(calc.uuid, calc.dbnode.pk)) #""" -''' \ No newline at end of file +''' diff --git a/aiida_kkr/tools/kkr_params.py b/aiida_kkr/tools/kkr_params.py deleted file mode 100644 index 01bc6cd5..00000000 --- a/aiida_kkr/tools/kkr_params.py +++ /dev/null @@ -1,1190 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" -In this module you find the kkrparams class that helps defining the KKR input parameters -Also some defaults for the parameters are defined -""" -#use print('message') instead of print 'message' in python 2.7 as well: -from __future__ import print_function -# redefine raw_input for python 3/2.7 compatilbility -from sys import version_info -if version_info[0] >= 3: - def raw_input(msg): - return input(msg) - - -__copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH," - "IAS-1/PGI-1, Germany. All rights reserved.") -__license__ = "MIT license, see LICENSE.txt file" -__version__ = "0.6" -__contributors__ = u"Philipp Rüßmann" - -# This defines the default parameters for KKR used in the aiida plugin: -__kkr_default_params__ = {"LMAX": 3, # lmax-cutoff - "INS": 1, # use shape corrections (full potential) - "KSHAPE": 2, # basically the same information as INS (KSHAPE=2*INS should always hold!) - "NSPIN": 2, # spin-polarized calculation (but by default not automatically initialized with external field) - "RMAX": 10., # Madelung sum real-space cutoff - "GMAX": 100., # Madelung sum reciprocal-space cutoff - "RCLUSTZ": 2.3 # size of screening cluster (in alat units) - } - - - -class kkrparams(object): - """ - Class for creating and handling the parameter input for a KKR calculation - Optional keyword arguments are passed to init and stored in values dictionary. - - Example usage: - params = kkrparams(LMAX=3, BRAVAIS=array([[1,0,0], [0,1,0], [0,0,1]])) - - Alternatively values can be set afterwards either individually with - params.set_value('LMAX', 3) - or multiple keys at once with - params.set_multiple_values(EMIN=-0.5, EMAX=1) - - Other useful functions: - - print the description of a keyword: params.get_description([key]) where [key] is a string for a keyword in params.values - - print a list of mandatory keywords: params.get_all_mandatory() - - print a list of keywords that are set including their value: params.get_set_values() - - Note: KKR-units (e.g. atomic units with energy in Ry, length in a_Bohr) are assumed - except for the keys'', '', 'ZPERIODL', and 'ZPERIODR' which should be given in Ang. units! - """ - - def __init__(self, **kwargs): - """ - Initialize class instance with containing the attribute values that also have - a format, mandatory flags (defaults for KKRcode, changed for example via params_type='voronoi' keyword) and a description. - """ - if 'params_type' in kwargs: - self.__params_type = kwargs.pop('params_type') - else: - #parameter are set for kkr or voronoi code? (changes mandatory flags) - self.__params_type = 'kkr' #default value, also possible: 'voronoi', 'kkrimp' - valid_types = ['kkr', 'voronoi', 'kkrimp'] - if self.__params_type not in valid_types: - raise ValueError("params_type can only be one of {} but got {}".format(valid_types, self.__params_type)) - - # initialize keywords dict - if self.__params_type == 'kkrimp': - keyw = self._create_keywords_dict_kkrimp(**kwargs) - else: - keyw = self._create_keywords_dict(**kwargs) - - #values of keywords: - self.values = {} - #formatting info - self.__format = {} - #mandatory flag - self._mandatory = {} - # description of each key - self.__description = {} - - for key in keyw: - self.values[key] = keyw[key][0] - self.__format[key] = keyw[key][1] - self._mandatory[key] = keyw[key][2] - self.__description[key] = keyw[key][3] - - # update mandatory set for voronoi, kkrimp cases - self._update_mandatory() - - - @classmethod - def get_KKRcalc_parameter_defaults(self, silent=False): - """ - set defaults (defined in header of this file) and returns dict, kkrparams_version - """ - p = kkrparams() - for key, val in __kkr_default_params__.items(): - p.set_value(key,val,silent=silent) - return dict(p.get_set_values()), __version__ - - - def get_dict(self, group=None, subgroup=None): - """ - Returns values dictionary. - - Prints values belonging to a certain group only if the 'group' argument - is one of the following: 'lattice', 'chemistry', 'accuracy', - 'external fields', 'scf cycle', 'other' - - Additionally the subgroups argument allows to print only a subset of - all keys in a certain group. The following subgroups are available: - in 'lattice' group: '2D mode', 'shape functions' - in 'chemistry' group: 'Atom types', 'Exchange-correlation', 'CPA mode', - '2D mode' - in 'accuracy' group: 'Valence energy contour', 'Semicore energy contour', - 'CPA mode', 'Screening clusters', 'Radial solver', - 'Ewald summation', 'LLoyd' - """ - out_dict = self.values - - #check for grouping - group_searchstrings = {'lattice':'Description of lattice', - 'chemistry':'Chemistry', - 'external fields':'External fields:', - 'accuracy':'Accuracy', - 'scf cycle':'Self-consistency control:', - 'other':['Running and test options', 'Name of potential and shapefun file']} - subgroups_all = {'lattice':['2D mode', 'shape functions'], - 'chemistry':['Atom types', 'Exchange-correlation', 'CPA mode', '2D mode'], - 'accuracy':['Valence energy contour', 'Semicore energy contour', - 'CPA mode', 'Screening clusters', 'Radial solver', - 'Ewald summation', 'LLoyd']} - if group in ['lattice', 'chemistry', 'accuracy', 'external fields', 'scf cycle', 'other']: - print('Returning only values belonging to group %s'%group) - tmp_dict = {} - for key in out_dict.keys(): - desc = self.__description[key] - key_in_group = False - if group_searchstrings[group] != 'other': - if group_searchstrings[group] in desc: - key_in_group = True - else: - if group_searchstrings[group][0] in desc or group_searchstrings[group][1] in desc: - key_in_group = True - if key_in_group: - tmp_dict[key] = self.values[key] - - #check for subgrouping and overwrite tmp_dict accordingly - if group in ['lattice', 'chemistry', 'accuracy']: - if subgroup in subgroups_all[group]: - print('Restrict keys additionally to subgroup %s'%subgroup) - tmp_dict2 = {} - for key in tmp_dict.keys(): - desc = self.__description[key] - key_in_group = False - if subgroup in desc: - key_in_group = True - if key_in_group: - tmp_dict2[key] = self.values[key] - tmp_dict = tmp_dict2 - - # overwrite out_dict with tmp_dict - out_dict = tmp_dict - - return out_dict - - - def _get_type_from_string(self, fmtstr): - """Helper function of get_type""" - if 'f' in fmtstr or 'e' in fmtstr: - keytype = float - elif 'i' in fmtstr: - keytype = int - elif 'l' in fmtstr: - keytype = bool - elif 's' in fmtstr: - keytype = str - else: - print('Error: type of keyvalue not found:', fmtstr) - raise TypeError('Type not found for format string: {}'.format(fmtstr)) - return keytype - - - def get_type(self, key): - """Extract expected type of 'key' from format info""" - try: - fmtstr = self.__format[key] - except KeyError: - fmtstr = None - if fmtstr is not None: - # simple format or complex pattern - simplefmt = True - if fmtstr.count('%') > 1: - simplefmt = False - if simplefmt: - keytype = self._get_type_from_string(fmtstr) - else: - fmtlist = fmtstr.replace('\n','').replace(' ','').split('%')[1:] - keytype = [] - for fmtstr in fmtlist: - keytype.append(self._get_type_from_string(fmtstr)) - return keytype - else: - return None - - - def _check_valuetype(self, key): - """Consistency check if type of value matches expected type from format info""" - - # check if entry is numpy array and change to list automatically: - try: - tmpval = self.values[key].flatten().tolist() - except: - tmpval = self.values[key] - tmptype = type(tmpval) - - # get type of value - if tmptype == list: - valtype = [] - for val in range(len(tmpval)): - valtype.append(type(tmpval[val])) - else: - valtype = tmptype - #print(key, valtype, self.get_type(key)) - - # check if type matches format info - cmptypes = self.get_type(key) - success = True - if cmptypes is not None: - #print(key, type(valtype), valtype, cmptypes) - changed_type_automatically = False - if valtype == int and cmptypes == float: - changed_type_automatically = True - self.values[key] = float(self.values[key]) - elif type(valtype) == list: - for ival in range(len(valtype)): - if valtype[ival] == int and cmptypes == float: - changed_type_automatically = True - self.values[key][ival] = float(self.values[key][ival]) - elif valtype != cmptypes and tmpval is not None: - success = False - print('Error: type of value does not match expected type for ', key, self.values[key], cmptypes) - raise TypeError('type of value does not match expected type for key={}; value={}; expected type={}'.format(key, self.values[key], cmptypes)) - - if changed_type_automatically: - print('Warning: filling value of "%s" with integer but expects float. Converting automatically and continue'%key) - - return success - - - def get_value(self, key): - """Gets value of keyword 'key'""" - if key not in self.values.keys(): - print('Error key ({}) not found in values dict! {}'.format(key, self.values)) - raise KeyError - else: - # deal with special cases of runopt and testopt (lists of codewords) - if key in ['RUNOPT', 'TESTOPT'] and self.values[key] is None: - return [] - else: - return self.values[key] - - - def set_value(self, key, value, silent=False): - """Sets value of keyword 'key'""" - if value is None: - if not silent: - print('Warning setting value None is not permitted!') - print('Use remove_value funciton instead! Ignore keyword {}'.format(key)) - else: - self.values[key] = value - self._check_valuetype(key) - - - def remove_value(self, key): - """Removes value of keyword 'key', i.e. resets to None""" - self.values[key] = None - - - def set_multiple_values(self, **kwargs): - """Set multiple values (in example value1 and value2 of keywords 'key1' and 'key2') given as key1=value1, key2=value2""" - for key in kwargs: - key2 = key - if key not in self.values.keys(): - key2 = '<'+key+'>' - #print('setting', key2, kwargs[key]) - self.set_value(key2, kwargs[key]) - - - def get_set_values(self): - """Return a list of all keys/values that are set (i.e. not None)""" - set_values = [] - added = 0 - for key in self.values.keys(): - if self.values[key] is not None: - set_values.append([key, self.values[key]]) - added += 1 - if added == 0: - print('No values set') - return set_values - - - def get_all_mandatory(self): - """Return a list of mandatory keys""" - self._update_mandatory() - mandatory_list = [] - for key in self.values.keys(): - if self.is_mandatory(key): - mandatory_list.append(key) - return mandatory_list - - - def is_mandatory(self, key): - """Returns mandatory flag (True/False) for keyword 'key'""" - return self._mandatory[key] - - - def get_description(self, key): - """Returns description of keyword 'key'""" - return self.__description[key] - - - def _create_keywords_dict(self, **kwargs): - """ - Creates KKR inputcard keywords dictionary and fills entry if value is given in **kwargs - - entries of keyword dictionary are: 'keyword', [value, format, keyword_mandatory, description] - - where - - - 'value' can be a single entry or a list of entries - - 'format' contains formatting info - - 'keyword_mandatory' is a logical stating if keyword needs to be defined to run a calculation - - 'description' is a string containing human redable info about the keyword - """ - - default_keywords = dict([# complete list of keywords, detault all that are not mandatory to None - # lattice - ('ALATBASIS', [None, '%f', True, 'Description of lattice: Length unit in Bohr radii usually conventional lattice parameter']), - ('BRAVAIS', [None, '%f %f %f\n%f %f %f\n%f %f %f', True, 'Description of lattice: Bravais vectors in units of [ALATBASIS]']), - ('NAEZ', [None, '%i', True, 'Description of lattice: Number of sites in unit cell']), - ('', [None, '%f %f %f', True, 'Description of lattice: Positions of sites in unit cell']), - ('CARTESIAN', [None, '%l', False, 'Description of lattice: Interpret the basis vector coordinates as reduced (w. respect to bravais) or as cartesian (in lattice constant units)']), - ('INTERFACE', [None, '%l', False, 'Description of lattice, 2D mode: needs to be TRUE for 2D calculation']), - ('', [None, '%i', False, 'Description of lattice, 2D mode: Number of basis sites forming the half-infinite lattice to the lower (=left) part of the slab.']), - ('', [None, '%f %f %f', False, 'Description of lattice, 2D mode: Positions of sites forming the basis sites of the half-infinite lattice to the lower (=left) part of the slab.']), - ('ZPERIODL', [None, '%f %f %f', False, 'Description of lattice, 2D mode: Lattice vector describing the periodicity perpendicular to the slab-plane for the half-infinite lattice to the lower (=left) part of the slab (plays the role of the 3rd Bravais vector for this half-infinite lattice). The vectors are periodically repeated by the ZPERIODL vector.']), - ('', [None, '%i', False, 'Description of lattice, 2D mode: Number of basis sites forming the half-infinite lattice to the upper (=right) part of the slab.']), - ('', [None, '%f %f %f', False, 'Description of lattice, 2D mode: Positions of sites forming the basis sites of the half-infinite lattice to the upper (=right) part of the slab.']), - ('ZPERIODR', [None, '%f %f %f', False, 'Description of lattice, 2D mode: Lattice vector describing the periodicity perpendicular to the slab-plane for the half-infinite lattice to the upper (=right) part of the slab (plays the role of the 3rd Bravais vector for this half-infinite lattice). The vectors are periodically repeated by the ZPERIODR vector.']), - ('KSHAPE', [None, '%i', False, 'Description of lattice, shape functions: 0 for ASA ([INS]=0), 2 for full potential ([INS]=1)']), - ('', [None, '%i', False, 'Description of lattice, shape functions: Indexes which shape function from the shape-function file to use in which atom. Default is that each atom has its own shape function.']), - # chemistry - ('', [None, '%f', True, 'Chemistry, Atom types: Nuclear charge per atom. Negative value signals to use value read in from the potential file.']), - ('NSPIN', [None, '%i', True, 'Chemistry, Atom types: Number of spin directions in potential. Values 1 or 2']), - ('KVREL', [None, '%i', False, 'Chemistry, Atom types: Relativistic treatment of valence electrons. Takes values 0 (Schroedinger), 1 (Scalar relativistic), 2 (Dirac ; works only in ASA mode)']), - ('', [None, '%f', False, 'Chemistry, Atom types: Spin-orbit coupling scaling per atom. Takes values between 0. (no spin-orbit) and 1. (full spin-orbit). Works only in combination with the Juelich spin orbit solver (runoption NEWSOSOL)']), - ('KEXCOR', [None, '%i', False, 'Chemistry, Exchange-correlation: Type of exchange correlation potential. Takes values 0 (LDA, Moruzzi-Janak-Williams), 1 (LDA, von Barth-Hedin), 2 (LDA, Vosko-Wilk-Nussair), 3 (GGA, Perdew-Wang 91), 4 (GGA, PBE), 5 (GGA, PBEsol)']), - ('LAMBDA_XC', [None, '%f', False, 'Chemistry, Exchange-correlation: Scale the magnetic part of the xc-potential and energy. Takes values between 0. (fully suppressed magnetisc potential) and 1. (normal magnetic potential).']), - ('NAT_LDAU', [None, '%i', False, 'Chemistry, Exchange-correlation: Numer of atoms where LDA+U will be used']), - ('LDAU_PARA', [None, '%i %i %f %f %f', False, 'Chemistry, Exchange-correlation: For each atom where LDA+U should be used, the entries are: [atom type] [angular mom. to apply LDA+U] [Ueff] [Jeff] [Eref] where [atom type] is between 1...[NATYP].']), - ('KREADLDAU', [None, '%i', False, "Chemistry, Exchange-correlation: Takes values 0 or 1; if [KREADLDAU]=1 then read previously calculated LDA+U matrix elements from file 'ldaupot'."]), - ('NATYP', [None, '%i', False, 'Chemistry, CPA mode: Number of atom types; CPA is triggered by setting [NATYP]>[NAEZ].']), - ('', [None, '%i', False, 'Chemistry, CPA mode: Takes values 1 < [] < [NAEZ] Assigns the position (given by []) where the atom-dependent read-in potential is situated. E.g., if the 3rd-in-the-row potential should be positioned at the 2nd vector, then the 3rd entry of the list should have the value 2.']), - ('', [None, '%f', False, 'Chemistry, CPA mode: Takes values 0. < [] < 1. Assigns the alloy-concentration corresponding to the atom-dependent read-in potential. Together with the variable , assigns the number and concentration of the atom-dependent potentials residing at each site form 1 to [NAEZ]. The sum of concentrations at each site should equal 1.']), - ('', [None, '%i', False, 'Chemistry, 2D mode: Controls the type of t-matrix at the lower (=left) half-crystal sites in case of embedding as these are given in the left-decimation file (i.e., changes the order compared to the one in the left-decimation file).']), - ('', [None, '%i', False, 'Chemistry, 2D mode: Controls the type of t-matrix at the upper (=right) half-crystal sites in case of embedding as these are given in the right-decimation file (i.e., changes the order compared to the one in the right-decimation file).']), - # external fields - ('LINIPOL', [None, '%l', False, 'External fields: If TRUE, triggers an external magn. field per atom in the first iteration.']), - ('HFIELD', [None, '%f', False, 'External fields: Value of an external magnetic field in the first iteration. Works only with LINIPOL, XINIPOL']), - ('XINIPOL', [None, '%i', False, 'External fields: Integer multiplying the HFIELD per atom']), - ('VCONST', [None, '%f', False, 'External fields: Constant potential shift in the first iteration.']), - # accuracy - ('LMAX', [None, '%i', True, 'Accuracy: Angular momentum cutoff']), - ('BZDIVIDE', [None, '%i %i %i', False, 'Accuracy: Maximal Brillouin zone mesh. Should not violate symmetry (e.g cubic symmetry implies i1=i2=i3; terragonal symmetry in xy implies i1=i2; i1=i2=i3 is always safe.)']), - ('EMIN', [None, '%f', False, 'Accuracy, Valence energy contour: Lower value (in Ryd) for the energy contour']), - ('EMAX', [None, '%f', False, 'Accuracy, Valence energy contour: Maximum value (in Ryd) for the DOS calculation Controls also [NPT2] in some cases']), - ('TEMPR', [None, '%f', False, 'Accuracy, Valence energy contour: Electronic temperature in K.']), - ('NPT1', [None, '%i', False, 'Accuracy, Valence energy contour: Number of energies in the 1st part of the rectangular contour ("going up").']), - ('NPT2', [None, '%i', False, 'Accuracy, Valence energy contour: Number of energies in the 2nd part of the rectangular contour ("going right").']), - ('NPT3', [None, '%i', False, 'Accuracy, Valence energy contour: Number of energies in the 3rd part of the rectangular contour (Fermi smearing part).']), - ('NPOL', [None, '%i', False, 'Accuracy, Valence energy contour: Number of Matsubara poles For DOS calculations, set [NPOL]=0']), - ('EBOTSEMI', [None, '%f', False, 'Accuracy, Semicore energy contour: Bottom of semicore contour in Ryd.']), - ('EMUSEMI', [None, '%f', False, 'Accuracy, Semicore energy contour: Top of semicore contour in Ryd.']), - ('TKSEMI', [None, '%f', False, 'Accuracy, Semicore energy contour: "Temperature" in K controlling height of semicore contour.']), - ('NPOLSEMI', [None, '%i', False, 'Accuracy, Semicore energy contour: Control of height of semicore contour: Im z = (2 * [NPOLSEMI] * pi * kB * [TKSEMI] ) with kB=0.6333659E-5']), - ('N1SEMI', [None, '%i', False, 'Accuracy, Semicore energy contour: Number of energies in first part of semicore contour ("going up").']), - ('N2SEMI', [None, '%i', False, 'Accuracy, Semicore energy contour: Number of energies in second part of semicore contour ("going right").']), - ('N3SEMI', [None, '%i', False, 'Accuracy, Semicore energy contour: Number of energies in third part of semicore contour ("going down").']), - ('FSEMICORE', [None, '%f', False, 'Accuracy, Semicore energy contour: Initial normalization factor for semicore states (approx. 1.).']), - ('CPAINFO', [None, '%f %i', False, 'Accuracy, CPA mode: CPA-error max. tolerance and max. number of CPA-cycle iterations.']), - ('RCLUSTZ', [None, '%f', False, 'Accuracy, Screening clusters: Radius of screening clusters in units of [ALATBASIS], default is 11 Bohr radii.']), - ('RCLUSTXY', [None, '%f', False, 'Accuracy, Screening clusters: If [RCLUSTXY] does not equal [RCLUSTZ] then cylindrical clusters are created with radius [RCLUSTXY] and height [RCLUSTZ].']), - ('', [None, '%f', False, 'Accuracy, Screening clusters: Muffin tin radius in Bohr radii for each site forming screening clusters. Negative value signals automatic calculation by the code.']), - ('NLEFTHOS', [None, '%i', False, 'Accuracy, Screening clusters 2D mode: The vectors [] are repeated i=1,...,[NLEFTHOS] times, shifted by i*[ZPERIODL], for the later formation of screening clusters.']), - ('', [None, '%f', False, 'Accuracy, Screening clusters 2D mode: Muffin-tin radius in Bohr radii for each site forming screening clusters in the lower (=left) half-crystal. Negative value signals automatic calculation by the code.']), - ('NRIGHTHO', [None, '%i', False, 'Accuracy, Screening clusters 2D mode: The vectors [] are repeated i=1,...,[NRIGHTHO] times, shifted by i*[ZPERIODR], for the later formation of screening clusters.']), - ('', [None, '%f', False, 'Accuracy, Screening clusters 2D mode: Muffin-tin radius in Bohr radii for each site forming screening clusters in the upper (=right) half-crystal. Negative value signals automatic calculation by the code.']), - ('INS', [None, '%i', False, 'Accuracy, Radial solver: Takes values 0 for ASA and 1 for full potential Must be 0 for Munich Dirac solver ([KREL]=2)']), - ('ICST', [None, '%i', False, 'Accuracy, Radial solver: Number of iterations in the radial solver']), - ('R_LOG', [None, '%f', False, 'Accuracy, Radial solver: Radius up to which log-rule is used for interval width. Used in conjunction with runopt NEWSOSOL']), - ('NPAN_LOG', [None, '%i', False, 'Accuracy, Radial solver: Number of intervals from nucleus to [R_LOG] Used in conjunction with runopt NEWSOSOL']), - ('NPAN_EQ', [None, '%i', False, 'Accuracy, Radial solver: Number of intervals from [R_LOG] to muffin-tin radius Used in conjunction with runopt NEWSOSOL']), - ('NCHEB', [None, '%i', False, 'Accuracy, Radial solver: Number of Chebyshev polynomials per interval Used in conjunction with runopt NEWSOSOL']), - ('', [None, '%f', False, 'Accuracy, Radial solver: Full potential limit per atom (in Bohr radii); at points closer to the nucleus, the potential is assumed spherical. Negative values indicate to use values from potential file. Values larger than the muffin tin indicate to use the muffin tin radius.']), - ('RMAX', [None, '%f', True, 'Accuracy, Ewald summation for Madelung potential: Max. radius in [ALATBASIS] for real space Ewald sum']), - ('GMAX', [None, '%f', True, 'Accuracy, Ewald summation for Madelung potential: Max. radius in 2*pi/[ALATBASIS] for reciprocal space Ewald sum']), - ('', [None, '%i', False, "Accuracy, LLoyd's formula: Set to 1 in order to use Lloyd's formula"]), - ('', [None, '(%f, %f)', False, "Accuracy, LLoyd's formula: Energy difference for derivative calculation in Lloyd's formula"]), - ('', [None, '%e', False, 'Accuracy, Virtual atoms: For distance between scattering-centers smaller than [], free GF is set to zero. Units are Bohr radii.']), - ('', [None, '%f', False, 'Accuracy: Muffin tin radium in Bohr radii for each atom site. This sets the value of RMT used internally in the KKRcode. Needs to be smaller than the touching RMT of the cells. In particular for structure relaxations this should be kept constant.']), - # scf cycle - ('NSTEPS', [None, '%i', False, 'Self-consistency control: Max. number of self-consistency iterations. Is reset to 1 in several cases that require only 1 iteration (DOS, Jij, write out GF).']), - ('IMIX', [None, '%i', False, "Self-consistency control: Mixing scheme for potential. 0 means straignt (linear) mixing, 3 means Broyden's 1st method, 4 means Broyden's 2nd method, 5 means Anderson's method"]), - ('STRMIX', [None, '%f', False, 'Self-consistency control: Linear mixing parameter Set to 0. if [NPOL]=0']), - ('ITDBRY', [None, '%i', False, 'Self-consistency control: how many iterations to keep in the Broyden/Anderson mixing scheme.']), - ('FCM', [None, '%f', False, 'Self-consistency control: Factor for increased linear mixing of magnetic part of potential compared to non-magnetic part.']), - ('BRYMIX', [None, '%f', False, 'Self-consistency control: Parameter for Broyden mixing.']), - ('QBOUND', [None, '%e', False, 'Self-consistency control: Lower limit of rms-error in potential to stop iterations.']), - #code options - ('RUNOPT', [None, '%s%s%s%s%s%s%s%s', False, 'Running and test options: 8-character keywords in a row without spaces between them']), - ('TESTOPT', [None, '%s%s%s%s%s%s%s%s\n%s%s%s%s%s%s%s%s', False, 'Running and test options: optional 8-character keywords in a row without spaces between them plus a secod row of the same.']), - #file names - ('FILES', [None, '%s', False, 'Name of potential and shapefun file (list of two strings, empty string will set back to default of the one file that is supposed to be changed)']), - # special options - ('JIJRAD', [None, '%f', False, 'Radius in alat which defines the cutoff for calcultion of Jij pairs']), - ('JIJRADXY', [None, '%f', False, 'use a cylindical cluster in which Jij pairs are searched for']), - ('JIJSITEI', [None, '%i', False, 'allow for the selection of specific sites in i in the unit cell, which should be considered in the calculation (default: all sites)']), - ('JIJSITEJ', [None, '%i', False, 'allow for the selection of specific sites in j in the unit cell, which should be considered in the calculation (default: all sites)']) - ]) - - for key in kwargs: - key2 = key - if key not in default_keywords.keys(): - key2 = '<'+key+'>' - if self.__params_type=='kkrimp': - if key=='KEXCORE': - key2 = 'XC' - if key=='R_LOG': - key2 = 'RADIUS_LOGPANELS' - if key=='STRMIX': - key2 = 'MIXFAC' - if key=='RUNOPT': - key2 = 'RUNFLAG' - if key=='TESTOPT': - key2 = 'TESTFLAG' - if key=='NSTEPS': - key2 = 'SCFSTEPS' - default_keywords[key2][0] = kwargs[key] - - return default_keywords - - - def _update_mandatory(self): - """Check if mandatory flags need to be updated if certain keywords are set""" - # initialize all mandatory flags to False and update list afterwards - for key in self.values.keys(): - self._mandatory[key] = False - - runopts = [] - if self.values.get('RUNOPT', None) is not None: - for runopt in self.values['RUNOPT']: - runopts.append(runopt.strip()) - - #For a KKR calculation these keywords are always mandatory: - mandatory_list = ['ALATBASIS', 'BRAVAIS', 'NAEZ', '', 'NSPIN', 'LMAX', 'RMAX', 'GMAX', ''] - - if self.values.get('NPOL', None) is not None and self.values['NPOL'] != 0: - mandatory_list += ['EMIN'] - #Mandatory in 2D - if self.values.get('INTERFACE', None): - mandatory_list += ['', '', 'ZPERIODL', '', '', 'ZPERIODR'] - #Mandatory in LDA+U - if 'NAT_LDAU' in self.values.keys() and 'LDAU' in runopts: - mandatory_list += ['NAT_LDAU', 'LDAU_PARA'] - #Mandatory in CPA - if self.values.get('NATYP', None) is not None and self.values['NATYP'] > self.values['NAEZ']: - mandatory_list += ['NATYP', '', ''] - #Mandatory in SEMICORE - if 'EBOTSEMI' in self.values.keys() and 'SEMICORE' in runopts: - mandatory_list += ['EBOTSEMI', 'EMUSEMI', 'TKSEMI', 'NPOLSEMI', 'N1SEMI', 'N2SEMI', 'N3SEMI', 'FSEMICORE'] - if self.values['INS'] == 1 and 'WRITEALL' not in runopts: - mandatory_list += [''] - - for key in mandatory_list: - self._mandatory[key] = True - - # overwrite if mandatory list needs to be changed (determinded from value of self.__params_type): - if self.__params_type == 'voronoi': - self._update_mandatory_voronoi() - if self.__params_type == 'kkrimp': - self._update_mandatory_kkrimp() - - - def _check_mandatory(self): - """Check if all mandatory keywords are set""" - self._update_mandatory() - for key in self.values.keys(): - if self._mandatory[key] and self.values[key] is None: - print('Error not all mandatory keys are set!') - set_of_mandatory = set(self.get_all_mandatory()) - set_of_keys = set([key[0] for key in self.get_set_values()]) - print(set_of_mandatory-set_of_keys, 'missing') - raise ValueError("Missing mandatory key(s): {}".format(set_of_mandatory-set_of_keys)) - - - def _check_array_consistency(self): - """Check all keys in __listargs if they match their specification (mostly 1D array, except for special cases e.g. )""" - from numpy import array, ndarray - - vec3_entries = ['', '', '', 'ZPERIODL', 'ZPERIODR'] - - #success = [True] - for key in self.__listargs.keys(): - if self.values[key] is not None: - tmpsuccess = True - #print('checking', key, self.values[key], self.__listargs[key]) - if type(self.values[key]) not in [list, ndarray]: - self.values[key] = array([self.values[key]]) - cmpdims = (self.__listargs[key], ) - if key in vec3_entries: - cmpdims = (self.__listargs[key], 3) - # automatically convert if naez==1 and only 1D array is given - if self.__listargs[key] == 1 and len(array(self.values[key]).shape) == 1 and key not in ['ZPERIODL', 'ZPERIODR']: - print('Warning: expected 2D array for %s but got 1D array, converting automatically'%key) - self.values[key] = array([self.values[key]]) - tmpdims = array(self.values[key]).shape - if tmpdims[0] != cmpdims[0]: - tmpsuccess = False - if len(tmpdims)==2: - if tmpdims[1] != cmpdims[1]: - tmpsuccess = False - #success.append(tmpsuccess) - - if not tmpsuccess: - print('check consistency:', key, self.values[key], cmpdims, tmpdims, tmpsuccess) - raise TypeError('Error: array input not consistent for key {}'.format(key)) - - - def _check_input_consistency(self, set_lists_only=False): - """Check consistency of input, to be done before wrinting to inputcard""" - from numpy import array - - # first check if all mandatory values are there - if not set_lists_only: - self._check_mandatory() - - # lists of array arguments - if self.__params_type != 'kkrimp': - keywords = self.values - naez = keywords['NAEZ'] - if keywords['NATYP'] is not None: - natyp = keywords['NATYP'] - else: - natyp = keywords['NAEZ'] - if keywords[''] is not None: - nlbasis = keywords[''] - else: - nlbasis = 1 - if keywords[''] is not None: - nrbasis = keywords[''] - else: - nrbasis = 1 - - listargs = dict([['', naez], ['', nlbasis], ['', nrbasis], ['', natyp], - ['', natyp], ['', natyp], ['', natyp], ['', natyp], - ['', nlbasis], ['', nrbasis], ['XINIPOL', natyp], ['', natyp], - ['', nlbasis], ['', nrbasis], ['', natyp], ['BZDIVIDE', 3], - ['', nrbasis], ['ZPERIODL', 3], ['', nrbasis], ['ZPERIODR', 3], - ['LDAU_PARA', 5], ['CPAINFO', 2], ['', 2], ['FILES', 2], ['', natyp]]) - # deal with special stuff for voronoi: - if self.__params_type == 'voronoi': - listargs[''] = natyp - self.update_to_voronoi() - special_formatting = ['BRAVAIS', 'RUNOPT', 'TESTOPT', 'FILES'] - else: - special_formatting = ['RUNFLAG', 'TESTFLAG'] - listargs = dict([['HFIELD', 2]]) - - self.__special_formatting = special_formatting - self.__listargs = listargs - - # ruturn after setting __special_formatting and __listargs lists - if set_lists_only: - return - - # check for consistency of array arguments - self._check_array_consistency() - - if self.__params_type != 'kkrimp': - # some special checks - bulkmode = False - set_values = [key[0] for key in self.get_set_values()] - if 'INTERFACE' not in set_values or self.values['INTERFACE']: - bulkmode = True - - bravais = array(self.values['BRAVAIS']) - if bulkmode and sum(bravais[2]**2)==0: - print("Error: 'BRAVAIS' matches 2D calculation but 'INTERFACE' is not set to True!") - raise ValueError - - # check if KSHAPE and INS are consistent and add missing values automatically - # WARNING: KSHAPE should be 2*INS !!! - if 'INS' not in set_values and 'KSHAPE' in set_values: - self.set_value('INS', self.get_value('KSHAPE')/2) - print("setting INS automatically with KSHAPE value ({})".format(self.get_value('KSHAPE')/2)) - elif 'INS' in set_values and 'KSHAPE' not in set_values: - self.set_value('KSHAPE', self.get_value('INS')*2) - print("setting KSHAPE automatically with INS value ({})".format(self.get_value('INS')*2)) - elif 'INS' in set_values and 'KSHAPE' in set_values: - ins = self.get_value('INS') - kshape = self.get_value('KSHAPE') - if (ins!=0 and kshape==0) or (ins==0 and kshape!=0): - print("Error: values of 'INS' and 'KSHAPE' are both found but are inconsistent (should be 0/0 or 1/2)") - raise ValueError('INS,KSHAPE mismatch') - - - - def fill_keywords_to_inputfile(self, is_voro_calc=False, output='inputcard'): - """ - Fill new inputcard with keywords/values - automatically check for input consistency - if is_voro_calc==True change mandatory list to match voronoi code, default is KKRcode - """ - from numpy import array - - # first check input consistency - if is_voro_calc: - self.__params_type = 'voronoi' - - # check for inconsistencies in input before writing file - self._check_input_consistency() - - - - #rename for easy reference - keywords = self.values - keyfmts = self.__format - - if self.__params_type != 'kkrimp': - sorted_keylist = [#run/testopts - 'RUNOPT', 'TESTOPT', - #lattice: - 'ALATBASIS', 'BRAVAIS', 'NAEZ', 'CARTESIAN', '', - 'INTERFACE', '', '', 'ZPERIODL', '', '', 'ZPERIODR', - 'KSHAPE', '', - # chemistry - 'NSPIN', 'KVREL', 'KEXCOR', 'LAMBDA_XC', - 'NAT_LDAU', 'LDAU_PARA', 'KREADLDAU', - '', '', - 'NATYP', '', '', - '', '', - # external fields - 'LINIPOL', 'HFIELD', 'XINIPOL', 'VCONST', - # accuracy - 'LMAX', 'BZDIVIDE', 'EMIN', 'EMAX', 'TEMPR', 'NPT1', 'NPT2', 'NPT3', 'NPOL', - 'EBOTSEMI', 'EMUSEMI', 'TKSEMI', 'NPOLSEMI', 'N1SEMI', 'N2SEMI', 'N3SEMI', 'FSEMICORE', - 'CPAINFO', - 'RCLUSTZ', 'RCLUSTXY', - '', 'NLEFTHOS', '', 'NRIGHTHO', '', - 'INS', 'ICST', - 'R_LOG', 'NPAN_LOG', 'NPAN_EQ', 'NCHEB', '', - 'RMAX', 'GMAX', '', '', '', - # scf cycle - 'NSTEPS', 'IMIX', 'STRMIX', 'ITDBRY', 'FCM', 'BRYMIX', 'QBOUND', - #file names - 'FILES'] - else: - sorted_keylist = ['RUNFLAG', 'TESTFLAG', 'INS', 'KVREL', 'NSPIN', 'SCFSTEPS', - 'IMIX', 'ITDBRY', 'MIXFAC', 'BRYMIX', 'QBOUND', 'XC', 'ICST', - 'SPINORBIT', 'NCOLL', 'NPAN_LOGPANELFAC', 'RADIUS_LOGPANELS', - 'RADIUS_MIN', 'NPAN_LOG', 'NPAN_EQ', 'NCHEB', 'HFIELD', - 'CALCORBITALMOMENT', 'CALCFORCE', 'CALCJIJMAT'] - - #add everything that was forgotten in sorted_keylist above - for key in keywords.keys(): - if key not in sorted_keylist: - sorted_keylist += [key] - - # ensure high enough precision in inputcard writeout - for key in keyfmts.keys(): - keyfmts[key] = keyfmts[key].replace('%f', '%21.14f') - - - # write all set keys to file - tmpl = '' - for key in sorted_keylist: - if keywords[key] is not None: - #print(key) - if (not key in self.__listargs.keys()) and (not key in self.__special_formatting): - tmpfmt = (keyfmts[key]).replace('%l', '%s') - try: - repltxt = tmpfmt%(keywords[key]) - except: - #print(key, tmpfmt, keywords[key]) - repltxt = '' - for i in range(len(tmpfmt)): - repltxt += ' ' + tmpfmt[i]%(keywords[key][i]) - tmpl += '%s= %s\n'%(key, repltxt) - elif key == 'BRAVAIS': - self.values[key] = array(self.values[key]) - tmpl += ('BRAVAIS\n'+self.__format[key]+'\n')%(self.values[key][0, 0], self.values[key][0, 1], self.values[key][0, 2], - self.values[key][1, 0], self.values[key][1, 1], self.values[key][1, 2], - self.values[key][2, 0], self.values[key][2, 1], self.values[key][2, 2]) - elif key == 'RUNOPT': - runops = keywords[key] - tmpl += 'RUNOPT\n' - for iop in range(len(runops)): - repltxt = runops[iop] - nblanks = 8 - len(repltxt) - if nblanks < 0: - print('WARNING for replacement of RUNOPTION %s: too long?'%repltxt) - print('RUNOPT %s is ignored and was not set!'%repltxt) - else: - repltxt = repltxt+' '*nblanks - tmpl += repltxt - tmpl += '\n' - elif key == 'TESTOPT': - testops = keywords[key] - tmpl += 'TESTOPT\n' - for iop in range(len(testops)): - repltxt = testops[iop] - nblanks = 8 - len(repltxt) - if nblanks < 0: - print('WARNING for replacement of TESTOPTION %s: too long?'%repltxt) - print('TESTOPT %s is ignored and was not set!'%repltxt) - else: - repltxt = repltxt+' '*nblanks - tmpl += repltxt - if iop==8: - tmpl += '\n' - tmpl += '\n' - elif key == 'XINIPOL': - tmpl += '%s='%key - for ival in range(len(self.values[key])): - tmpl += (' %s'%self.__format[key])%self.values[key][ival] - tmpl += '\n' - elif key == 'FILES': - files_changed = 0 - if self.values[key][0]=='': - self.values[key][0]='potential' - else: - files_changed += 1 - if self.values[key][1]=='': - self.values[key][1]='shapefun' - else: - files_changed += 1 - if files_changed>0: - print('Warning: Changing file name of potential file to "%s" and of shapefunction file to "%s"'%(self.values[key][0], self.values[key][1])) - tmpl += 'FILES\n' - tmpl += '\n' - tmpl += '%s\n'%self.values[key][0] - tmpl += '\n' - tmpl += '%s\n'%self.values[key][1] - tmpl += '\n' - elif self.__params_type == 'kkrimp' and key == 'RUNFLAG' or key == 'TESTFLAG': # for kkrimp - ops = keywords[key] - tmpl += key+'=' - for iop in range(len(ops)): - repltxt = ops[iop] - tmpl += ' ' + repltxt - tmpl += '\n' - elif key in self.__listargs.keys(): - if key in ['', '', '']: # RBASIS needs special formatting since three numbers are filled per line - tmpl += '%s\n'%key - for ival in range(self.__listargs[key]): - tmpl += (self.__format[key]+'\n')%(self.values[key][ival][0], self.values[key][ival][1], self.values[key][ival][2]) - elif key in ['CPAINFO', '']: - tmpl += '%s= '%key - tmpl += (self.__format[key]+'\n')%(self.values[key][0], self.values[key][1]) - elif key in ['BZDIVIDE', 'ZPERIODL', 'ZPERIODR']: - tmpl += '%s= '%key - tmpl += (self.__format[key]+'\n')%(self.values[key][0], self.values[key][1], self.values[key][2]) - elif key in ['LDAU_PARA']: - tmpl += '%s= '%key - tmpl += (self.__format[key]+'\n')%(self.values[key][0], self.values[key][1], self.values[key][2], self.values[key][3], self.values[key][4]) - elif self.__params_type == 'kkrimp' and key in ['HFIELD']: # for kkrimp - tmpl += '%s= '%key - tmpl += (self.__format[key]+'\n')%(self.values[key][0], self.values[key][1]) - else: - #print(key, self.__listargs[key], len(self.values[key])) - tmpl += '%s\n'%key - for ival in range(self.__listargs[key]): - tmpl += (self.__format[key]+'\n')%(self.values[key][ival]) - else: - print('Error trying to write keyword %s but writing failed!'%key) - raise ValueError - - # to make inputcard more readable insert some blank lines after certain keys - if self.__params_type == 'kkrimp': - breaklines = ['TESTFLAG', 'NSPIN', 'QBOUND', 'NCHEB', 'HFIELD'] - else: - breaklines = ['TESTOPT', 'CARTESIAN', '', 'ZPERIODL', 'ZPERIODR', '', - 'KREADLDAU', '', '', '', '', 'VCONST', - 'BZDIVIDE', 'FSEMICORE', 'CPAINFO', 'RCLUSTXY', '', '', - 'ICST', '', 'GMAX', '', 'QBOUND'] - if key in breaklines: - tmpl += "\n" - - - # finally write to file - open(output, 'w').write(tmpl) - - - def read_keywords_from_inputcard(self, inputcard='inputcard'): - """ - Read list of keywords from inputcard and extract values to keywords dict - - :example usage: p = kkrparams(); p.read_keywords_from_inputcard('inputcard') - :note: converts '', '', 'ZPERIODL', and 'ZPERIODR' automatically to Ang. units! - """ - from numpy import shape, array - from aiida_kkr.tools.common_functions import get_aBohr2Ang - - # some print statements with debug info - debug = False - - if debug: print('start reading {}'.format(inputcard)) - - txt = open(inputcard, 'r').readlines() - keywords = self.values - keyfmts = self.__format - - #TODO loop over known keywords and fill with values found in inputcard - # first read array dimensions - read_first = ['NAEZ', 'NATYP', '', ''] - read_already = [] - for key in read_first: - valtxt = self._find_value(key, txt, debug=debug) - if valtxt is None: # try to read key without '<', '>' - valtxt = self._find_value(key.replace('<','').replace('>',''), txt, debug=debug) - # now set value in kkrparams - if valtxt is not None: - value = self.get_type(key)(valtxt) - self.set_value(key, value) - read_already.append(key) - - # then set self.__special_formatting and self.__listargs in _check_input_consistency - # needs NAEZ, NATYP, NLBASIS, NRBASIS to be set to get array dimensions correct - self._check_input_consistency(set_lists_only=True) - - # try to read keywords from inputcard and fill self.values - for key in keywords: - if key not in read_already: - item, num = 1, 1 # starting column and number of columns that are read in - - if keyfmts[key].count('%')>1: - num = keyfmts[key].count('%') - - if key not in self.__special_formatting: - # determine if more than one line is read in - if key in self.__listargs and key not in ['ZPERIODL', 'ZPERIODR', 'BZDIVIDE']: - lines = range(1,self.__listargs[key]+1) - else: - lines = [1] - else: # special formatting keys - if key=='RUNOPT': - lines = [1] - num = 8 - keyfmts[key] = '%s%s%s%s%s%s%s%s' - elif key=='TESTOPT': - lines = [1, 2] - num = 8 - keyfmts[key] = '%s%s%s%s%s%s%s%s' - elif key=='BRAVAIS': - lines = [1, 2, 3] - num = 3 - keyfmts[key] = '%f %f %f' - elif key=='BZDIVIDE': - lines = [1] - num = 3 - keyfmts[key] = '%f' - elif key=='FILES': - lines = [2, 4] - num = 1 - keyfmts[key] = '%s' - # read in all lines for this key - values = [] - for iline in lines: - valtxt = self._find_value(key, txt, iline, item, num, debug=debug) - if valtxt is not None: - # first deal with run and testopts (needs to spearate keys) - if key=='RUNOPT' or key=='TESTOPT': - if type(valtxt) != list: - valtxt = [valtxt] - valtxt_tmp = [] - for itmp in valtxt: - if len(itmp)>8: - Nsplitoff = int(len(itmp)/8) - for ii in range(Nsplitoff): - itmp_splitoff = itmp[ii*8:(ii+1)*8] - valtxt_tmp.append(itmp_splitoff) - itmp_splitoff = itmp[Nsplitoff*8:] - valtxt_tmp.append(itmp_splitoff) - else: - valtxt_tmp.append(itmp) - valtxt =valtxt_tmp - # then continue with valtxt - if type(valtxt)==list: - tmp = [] - for itmp in range(len(valtxt)): - tmptype = self.get_type(key)[itmp] - if tmptype==float and ('d' in valtxt[itmp] or 'D' in valtxt[itmp]): - valtxt[itmp] = valtxt[itmp].replace('d', 'e').replace('D','e') - tmp.append(tmptype(valtxt[itmp])) - else: - tmptype = self.get_type(key) - if tmptype==float and ('d' in valtxt or 'D' in valtxt): - valtxt = valtxt.replace('d', 'e').replace('D','e') - if tmptype==bool: - if valtxt.upper() in ['F', 'FALSE', '.FALSE.', 'NO', '0']: - valtxt = "" # only empty string evaluates to False!!! - else: - valtxt = "True" - tmp = tmptype(valtxt) - values.append(tmp) - if len(values)==1: - values = values[0] - - if key=='TESTOPT': # flatten list - if shape(values)[0]==2 and type(values[0])==list: - tmp = [] - for itmp in values: - for ii in itmp: - tmp.append(ii) - values = tmp - - # finally set values in kkrparams object - if values != []: - self.set_value(key, values) - - # finally check if some input of the old style was given and read it in - natyp = self.get_value('NATYP') - if natyp is None: - natyp = self.get_value('NAEZ') - - # look for old RBASIS input style - if self.get_value('') is None: - rbasis = [] - for iatom in range(natyp): - rbasis.append([float(i) for i in self._find_value('RBASIS', txt, 1+iatom, 1, 3, debug=debug)]) - self.set_value('', rbasis) - - # look for old atominfo input style - atominfo_c = self._find_value('ATOMINFOC', txt, 2, debug=debug) - if atominfo_c is None: - atominfo_c = False - else: - atominfo_c = True - atominfo = self._find_value('ATOMINFO', txt, 2, debug=debug) - if atominfo is None: - atominfo = False - else: - atominfo = True - tmp = [] - if atominfo_c: - for iatom in range(natyp): - tmp.append(self._find_value('ATOMINFOC', txt, 2+iatom, 1, 14, debug=debug)) - elif atominfo: - for iatom in range(natyp): - tmp.append(self._find_value('ATOMINFO', txt, 2+iatom, 1, 12, debug=debug)) - if atominfo_c or atominfo: - tmp = array(tmp) - cls_list = [int(i) for i in tmp[:,6]] - self.set_multiple_values(ZATOM=[float(i) for i in tmp[:,0]], SHAPE=[int(i) for i in tmp[:,8]], RMTREF=[float(i) for i in tmp[:,11]]) - if atominfo_c: - self.set_value('SITE', [int(i) for i in tmp[:,12]]) - self.set_value('', [float(i) for i in tmp[:,13]]) - else: - cls_list = range(1, natyp+1) - - # look for old left/right basis input style - if self.get_value('INTERFACE'): - leftbasis = self._find_value('LEFTBASIS', txt, debug=debug) - if leftbasis is None: - leftbasis = False - else: - leftbasis = True - nlbasis = self.get_value('') - rightbasis = self._find_value('RIGHBASIS', txt, debug=debug) # RIGHBASIS is no typo!! - if rightbasis is None: - rightbasis = False - else: - rightbasis = True - nrbasis = self.get_value('') - if leftbasis: - tmp = [] - for iatom in range(nlbasis): - tmp.append(self._find_value('LEFTBASIS', txt, 1+iatom, 1, 5, debug=debug)) - tmp = array(tmp) - self.set_multiple_values(RBLEFT=[[float(i[j]) for j in range(3)] for i in tmp[:,0:3]], KAOEZL=[int(i) for i in tmp[:,3]]) - tmp2 = [] - for icls in tmp[:,3]: - rmtref = self.get_value('')[cls_list.index(int(icls))] - tmp2.append(rmtref) - self.set_value('', tmp2) - if rightbasis: - tmp = [] - for iatom in range(nrbasis): - tmp.append(self._find_value('RIGHBASIS', txt, 1+iatom, 1, 5, debug=debug)) - tmp = array(tmp) - self.set_multiple_values(RBRIGHT=[[float(i[j]) for j in range(3)] for i in tmp[:,0:3]], KAOEZR=[int(i) for i in tmp[:,3]]) - tmp2 = [] - for icls in tmp[:,3]: - rmtref = self.get_value('')[cls_list.index(int(icls))] - tmp2.append(rmtref) - self.set_value('', tmp2) - - # convert RBLEFT etc. from alat units to Ang. units (this is assumed in generate_inputcard) - rbl = self.get_value('') - rbr = self.get_value('') - zper_l = self.get_value('ZPERIODL') - zper_r = self.get_value('ZPERIODR') - alat2ang = self.get_value('ALATBASIS') * get_aBohr2Ang() - if rbl is not None: self.set_value('', array(rbl)*alat2ang) - if rbr is not None: self.set_value('', array(rbr)*alat2ang) - if zper_l is not None: self.set_value('ZPERIODL', array(zper_l)*alat2ang) - if zper_r is not None: self.set_value('ZPERIODR', array(zper_r)*alat2ang) - - if debug: print('extracted parameters: {}'.format(self.get_set_values())) - - - def _find_value(self, charkey, txt, line=1, item=1, num=1, debug=False): - """ - Search charkey in txt and return value string - - parameter, input :: charkey string that is search in txt - parameter, input :: txt text that is searched (output of readlines) - parameter, input, optional :: line index in which line to start reading after key was found - parameter, input, optional :: item index which column is read - parameter, input, optional :: num number of column that are read - - returns :: valtxt string or list of strings depending on num setting - """ - if debug: print('find_value: {}'.format(charkey)) - try: - iline = [ii for ii in range(len(txt)) if charkey in txt[ii]][0] - except IndexError: - iline = None - if iline is not None: - txtline = txt[iline] - chkeq = charkey+'=' - if chkeq in txtline: - valtxt = txtline.split(chkeq)[1].split()[item-1:item-1+num] - else: - nextline = txt[iline+line] - startpos = txtline.index(charkey) - valtxt = nextline[startpos:].split()[item-1:item-1+num] - if debug: print('find_value found {}'.format(valtxt)) - if num == 1: - return valtxt[0] - else: - return valtxt - else: - return None - - - # redefine _update_mandatory for voronoi code - def _update_mandatory_voronoi(self): - """Change mandatory flags to match requirements of voronoi code""" - # initialize all mandatory flags to False and update list afterwards - for key in self.values.keys(): - self._mandatory[key] = False - - runopts = [] - if self.values['RUNOPT'] is not None: - for runopt in self.values['RUNOPT']: - runopts.append(runopt.strip()) - - #For a KKR calculation these keywords are always mandatory: - mandatory_list = ['ALATBASIS', 'BRAVAIS', 'NAEZ', '', 'NSPIN', 'LMAX', 'RCLUSTZ', ''] - - #Mandatory in 2D - if self.values['INTERFACE']: - mandatory_list += ['', '', 'ZPERIODL', '', '', 'ZPERIODR'] - #Mandatory in CPA - if self.values['NATYP'] is not None and self.values['NATYP'] > self.values['NAEZ']: - mandatory_list += ['NATYP', '', ''] - - for key in mandatory_list: - self._mandatory[key] = True - - - # redefine _update_mandatory for kkrim code - def _update_mandatory_kkrimp(self): - """Change mandatory flags to match requirements of kkrimp code""" - # initialize all mandatory flags to False and update list afterwards - for key in self.values.keys(): - self._mandatory[key] = False - - runopts = [] - if self.values.get('RUNOPT', None) is not None: - for runopt in self.values['RUNOPT']: - runopts.append(runopt.strip()) - - #For a KKR calculation these keywords are always mandatory: - mandatory_list = [] - - for key in mandatory_list: - self._mandatory[key] = True - - - def get_missing_keys(self, use_aiida=False): - """Find list of mandatory keys that are not yet set""" - setlist = dict(self.get_set_values()).keys() - manlist = self.get_all_mandatory() - missing = [] - autoset_list = ['BRAVAIS', '', '', 'ALATBASIS', 'NAEZ', '', 'EMIN', 'RCLUSTZ'] - if self.__params_type == 'voronoi': - autoset_list = ['BRAVAIS', '', '', 'ALATBASIS', 'NAEZ'] - for key in manlist: - if key not in setlist: - if not use_aiida: - missing.append(key) - else: - if key not in autoset_list: - missing.append(key) - return missing - - - def update_to_voronoi(self): - """ - Update parameter settings to match voronoi specification. - Sets self.__params_type and calls _update_mandatory_voronoi() - """ - self.__params_type = 'voronoi' - self._update_mandatory_voronoi() - - - def update_to_kkrimp(self): - """ - Update parameter settings to match kkrimp specification. - Sets self.__params_type and calls _update_mandatory_kkrimp() - """ - self.__params_type = 'kkrimp' - self._update_mandatory_kkrimp() - - - def _create_keywords_dict_kkrimp(self, **kwargs): - """ - Like create_keywords_dict but for changed keys of impurity code - """ - - default_keywords = dict([# complete list of keywords, detault all that are not mandatory to None - # chemistry - ('NSPIN', [None, '%i', False, 'Chemistry, Atom types: Number of spin directions in potential. Values 1 or 2']), - ('KVREL', [None, '%i', False, 'Chemistry, Atom types: Relativistic treatment of valence electrons. Takes values 0 (Schroedinger), 1 (Scalar relativistic), 2 (Dirac ; works only in ASA mode)']), - ('XC', [None, '%s', False, 'Chemistry, Exchange-correlation: Type of exchange correlation potential. Takes values 0 (LDA, Moruzzi-Janak-Williams), 1 (LDA, von Barth-Hedin), 2 (LDA, Vosko-Wilk-Nussair), 3 (GGA, Perdew-Wang 91), 4 (GGA, PBE), 5 (GGA, PBEsol)']), - # external fields - ('HFIELD', [None, '%f %i', False, 'External fields: Value of an external magnetic field in the first iteration. Works only with LINIPOL, XINIPOL']), - # accuracy - ('INS', [None, '%i', False, 'Accuracy, Radial solver: Takes values 0 for ASA and 1 for full potential Must be 0 for Munich Dirac solver ([KREL]=2)']), - ('ICST', [None, '%i', False, 'Accuracy, Radial solver: Number of iterations in the radial solver']), - ('RADIUS_LOGPANELS', [None, '%f', False, 'Accuracy, Radial solver: Radius up to which log-rule is used for interval width. Used in conjunction with runopt NEWSOSOL']), - ('NPAN_LOG', [None, '%i', False, 'Accuracy, Radial solver: Number of intervals from nucleus to [R_LOG] Used in conjunction with runopt NEWSOSOL']), - ('NPAN_EQ', [None, '%i', False, 'Accuracy, Radial solver: Number of intervals from [R_LOG] to muffin-tin radius Used in conjunction with runopt NEWSOSOL']), - ('NCHEB', [None, '%i', False, 'Accuracy, Radial solver: Number of Chebyshev polynomials per interval Used in conjunction with runopt NEWSOSOL']), - ('NPAN_LOGPANELFAC', [None, '%i', False, 'Accuracy, Radial solver: division factor logpanel']), - ('RADIUS_MIN', [None, '%i', False, 'Accuracy, Radial solver: ']), - ('NCOLL', [None, '%i', False, 'Accuracy, Radial solver: use nonco_angles solver (1/0)']), - ('SPINORBIT', [None, '%i', False, 'Accuracy, Radial solver: use SOC solver (1/0)']), - # scf cycle - ('SCFSTEPS', [None, '%i', False, 'Self-consistency control: Max. number of self-consistency iterations. Is reset to 1 in several cases that require only 1 iteration (DOS, Jij, write out GF).']), - ('IMIX', [None, '%i', False, "Self-consistency control: Mixing scheme for potential. 0 means straignt (linear) mixing, 3 means Broyden's 1st method, 4 means Broyden's 2nd method, 5 means Anderson's method"]), - ('MIXFAC', [None, '%f', False, 'Self-consistency control: Linear mixing parameter Set to 0. if [NPOL]=0']), - ('ITDBRY', [None, '%i', False, 'Self-consistency control: how many iterations to keep in the Broyden/Anderson mixing scheme.']), - ('BRYMIX', [None, '%f', False, 'Self-consistency control: Parameter for Broyden mixing.']), - ('QBOUND', [None, '%e', False, 'Self-consistency control: Lower limit of rms-error in potential to stop iterations.']), - #code options - ('RUNFLAG', [None, '%s', False, 'Running and test options: lmdos , GBULKtomemory, LDA+U , SIMULASA']), - ('TESTFLAG', [None, '%s', False, 'Running and test options: tmatnew, noscatteringmoment']), - ('CALCFORCE', [None, '%i', False, 'Calculate forces']), - ('CALCJIJMAT', [None, '%i', False, 'Calculate Jijmatrix']), - ('CALCORBITALMOMENT', [None, '%i', False, 'Calculate orbital moment (SOC solver only, 0/1)']), - ]) - - for key in kwargs: - key2 = key - if key not in default_keywords.keys(): - key2 = '<'+key+'>' - default_keywords[key2][0] = kwargs[key] - - return default_keywords - - - \ No newline at end of file diff --git a/aiida_kkr/tools/kkrparser_functions.py b/aiida_kkr/tools/kkrparser_functions.py deleted file mode 100644 index 814e99db..00000000 --- a/aiida_kkr/tools/kkrparser_functions.py +++ /dev/null @@ -1,820 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- -""" -Created on Thu Dec 7 10:09:51 2017 - -@author: ruess - -Note: -Here I collect all functions needed to parse the output of a KKR calculation. -These functions do not need aiida and are therefore separated from the actual -parser file where parse_kkr_outputfile is called -""" - - -from aiida_kkr.tools.common_functions import (search_string, get_version_info, get_Ry2eV, angles_to_vec, - get_corestates_from_potential, get_highest_core_state) - - -def parse_array_float(outfile, searchstring, splitinfo, replacepair=None): - from numpy import array - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = 0 - res = [] - while itmp>=0: - itmp = search_string(searchstring, tmptxt) - if itmp>=0: - tmpval = tmptxt.pop(itmp) - if replacepair is not None: - tmpval = tmpval.replace(replacepair[0], replacepair[1]) - if splitinfo[0]==1: - tmpval = float(tmpval.split(splitinfo[1])[splitinfo[2]]) - elif splitinfo[0]==2: - tmpval = float(tmpval.split(splitinfo[1])[splitinfo[2]].split()[splitinfo[3]]) - else: - raise ValueError("splitinfo[0] has to be either 1 or 2") - res.append(tmpval) - res = array(res) - return res - - -def get_rms(outfile, outfile2): - res = parse_array_float(outfile, 'average rms-error', [2, '=', 1, 0], ['D', 'E']) - res2 = parse_array_float(outfile2, 'rms-error for atom', [2, '=', 1, 0], ['D', 'E']) - niter = len(res) # number of iterations - natoms = int(len(res2)/niter) # number of atoms in system, needed to take only atom resolved rms of last iteration - return res, res2[-natoms:] - - -def get_neutr(outfile): - res = parse_array_float(outfile, 'charge neutrality in unit cell', [1, '=', 1]) - return res - - -def get_magtot(outfile): - res = parse_array_float(outfile, 'TOTAL mag. moment in unit cell', [1, '=', 1]) - return res - - -def get_EF(outfile): - res = parse_array_float(outfile, 'E FERMI', [2, 'FERMI', 1, 0]) - return res - - -def get_DOS_EF(outfile): - res = parse_array_float(outfile, 'DOS(E_F)', [1, '=', 1]) - return res - - -def get_Etot(outfile): - res = parse_array_float(outfile, 'TOTAL ENERGY in ryd.', [1, ':', 1]) - return res - - -def find_warnings(outfile): - from numpy import array - f = open(outfile) - tmptxt = f.readlines() - tmptxt_caps = [txt.upper() for txt in tmptxt] - f.close() - itmp = 0 - res = [] - while itmp>=0: - itmp = search_string('WARNING', tmptxt_caps) - if itmp>=0: - tmpval = tmptxt_caps.pop(itmp) - tmpval = tmptxt.pop(itmp) - res.append(tmpval.strip()) - return array(res) - - -def extract_timings(outfile): - from numpy import array - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = 0 - res = [] - search_keys = ['main0', - 'main1a - tbref', - 'main1a ', # two spaces to differentiate from following key - 'main1b - calctref13', - 'main1b ', # two spaces! - 'main1c - serial part', - 'main1c ',# two spaces! - 'main2', - 'Time in Iteration'] - while itmp>=0: - tmpvals = [] - for isearch in search_keys: - itmp = search_string(isearch, tmptxt) - if itmp>=0: - tmpval = [isearch, float(tmptxt.pop(itmp).split()[-1])] - tmpvals.append(tmpval) - if len(tmpvals)>0: - res.append(tmpvals) - #print(res) - res = array(res[0]) - #print(dict(res)) - return dict(res) - - -def get_charges_per_atom(outfile_000): - res1 = parse_array_float(outfile_000, 'charge in wigner seitz', [1, '=', 1]) - # these two are not in output of DOS calculation (and are then ignored) - res2 = parse_array_float(outfile_000, 'nuclear charge', [2, 'nuclear charge', 1, 0]) - res3 = parse_array_float(outfile_000, 'core charge', [1, '=', 1]) - return res1, res2, res3 - - -def get_single_particle_energies(outfile_000): - """ - extracts single particle energies from outfile_000 (output.000.txt) - returns the valence contribution of the single particle energies - """ - from numpy import array - f = open(outfile_000) - tmptxt = f.readlines() - f.close() - itmp = 0 - res = [] - while itmp>=0: - itmp = search_string('band energy per atom', tmptxt) - if itmp>=0: - tmpval = float(tmptxt.pop(itmp).split()[-1]) - res.append(tmpval) - return array(res) - - -def get_econt_info(outfile_0init): - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - - itmp = search_string('E min', tmptxt) - emin = float(tmptxt[itmp].split('min')[1].split('=')[1].split()[0]) - - itmp = search_string('Temperature', tmptxt) - tempr = float(tmptxt[itmp].split('Temperature')[1].split('=')[1].split()[0]) - - itmp = search_string('Number of energy points', tmptxt) - Nepts = int(tmptxt[itmp].split(':')[1].split()[0]) - - doscalc = search_string('Density-of-States calculation', tmptxt) - if doscalc == -1: - # npol - itmp = search_string('poles =', tmptxt) - Npol = int(tmptxt[itmp].split('=')[1].split()[0]) - # npt1, npt2, npt3 - itmp = search_string('contour:', tmptxt) - tmp = tmptxt[itmp].replace(',','').split(':')[1].split() - N1 = int(tmp[2]) - N2 = int(tmp[5]) - N3 = int(tmp[8]) - else: - Npol, N1, N2, N3 = 0, 0, Nepts, 0 - - return emin, tempr, Nepts, Npol, N1, N2, N3 - - -def get_core_states(potfile): - from numpy import array - ncore, energies, lmoments = get_corestates_from_potential(potfile=potfile) - emax, lmax, descr_max = [], [], [] - for ipot in range(len(ncore)): - if ncore[ipot] > 0: - lvalmax, energy_max, descr = get_highest_core_state(ncore[ipot], energies[ipot], lmoments[ipot]) - else: - lvalmax, energy_max, descr = None, None, 'no core states' - emax.append(energy_max) - lmax.append(lvalmax) - descr_max.append(descr) - return array(ncore), array(emax), array(lmax), array(descr_max) - - -def get_alatinfo(outfile_0init): - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('Lattice constants :', tmptxt) - alat = float(tmptxt[itmp].split(':')[1].split('=')[1].split()[0]) - twopialat = float(tmptxt[itmp].split(':')[1].split('=')[2].split()[0]) - return alat, twopialat - - -def get_scfinfo(outfile_0init, outfile_000, outfile): - f = open(outfile_000) - tmptxt = f.readlines() - f.close() - - itmp = search_string('ITERATION :', tmptxt) - tmpval = tmptxt[itmp].split(':')[1].split() - niter = int(tmpval[0]) - nitermax = int(tmpval[3]) - - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp1 = search_string('SCF ITERATION CONVERGED', tmptxt) - itmp2 = search_string('NUMBER OF SCF STEPS EXHAUSTED', tmptxt) - if itmp1>=0: - converged = True - else: - converged = False - if itmp2>=0: - nmax_reached = True - else: - nmax_reached = False - - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('STRMIX FCM QBOUND', tmptxt) - tmpval = tmptxt[itmp+1].split() - strmix = float(tmpval[0]) - fcm = float(tmpval[1]) - qbound = float(tmpval[2]) - tmpval = tmptxt[itmp+4].split() - brymix = float(tmpval[0]) - itmp = search_string('IMIX IGF ICC', tmptxt) - imix = int(tmptxt[itmp+1].split()[0]) - idtbry = int(tmptxt[itmp+4].split()[0]) - - mixinfo = [imix, strmix, qbound, fcm, idtbry, brymix] - - return niter, nitermax, converged, nmax_reached, mixinfo - - -def get_kmeshinfo(outfile_0init, outfile_000): - """ - Extract kmesh info from output.0.txt and output.000.txt - """ - # first get info from output.0.txt - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - nkmesh = [] - itmp = search_string('number of different k-meshes', tmptxt) - nkmesh.append( int(tmptxt[itmp].split(':')[1].split()[0]) ) - itmp = search_string('k-mesh NofKs', tmptxt) - nofks, nkx, nky, nkz = [],[],[],[] - for ik in range(nkmesh[0]): - tmpval = tmptxt[itmp+2+ik].split() - nofks.append(int(tmpval[1])) - nkx.append(int(tmpval[2])) - nky.append(int(tmpval[3])) - nkz.append(int(tmpval[4])) - - tmpdict = {'number_of_kpts':nofks, 'n_kx':nkx, 'n_ky':nky, 'n_kz':nkz} - nkmesh.append(tmpdict) - - #next get kmesh_ie from output.000.txt - f = open(outfile_000) - tmptxt = f.readlines() - f.close() - kmesh_ie = [] - itmp = 0 - while itmp>=0: - itmp = search_string('KMESH =', tmptxt) - if itmp>=0: - tmpval = int(tmptxt.pop(itmp).split()[-1]) - kmesh_ie.append(tmpval) - - return nkmesh, kmesh_ie - - -def get_symmetries(outfile_0init): - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - try: - itmp = search_string('symmetries found for this lattice:', tmptxt) - nsym = int(tmptxt[itmp].split(':')[1].split()[0]) - except IndexError: - itmp = search_string('< FINDGROUP > : Finding symmetry operations', tmptxt) - tmptxt2 = tmptxt[itmp:] - itmp = search_string('found for this lattice:', tmptxt2) - nsym = int(tmptxt2[itmp].split(':')[1].split()[0]) - itmp = search_string('symmetries will be used', tmptxt) - nsym_used = int(tmptxt[itmp].split()[3]) - itmp = search_string('', tmptxt) - tmpdict = {} - for isym in range(nsym_used): - tmpval = tmptxt[itmp+5+isym].replace('0-', '0 -').replace('1-', '1 -').split() # bugfix for -120 degree euler angle - desc = tmpval[1] - inversion = int(tmpval[2]) - euler = [float(tmpval[3]), float(tmpval[4]), float(tmpval[5])] - unitary = int(tmpval[6].replace('T', '1').replace('F', '0')) - tmpdict[desc] = {'has_inversion':inversion, 'is_unitary':unitary, 'euler_angles':euler} - desc = tmpdict - return nsym, nsym_used, desc - - -def get_ewald(outfile_0init): - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('setting bulk Madelung coefficients', tmptxt) - if itmp>=0: - info = '3D' - else: - info = '2D' - if info == '3D': - itmp = search_string('< LATTICE3D >', tmptxt) - tmpval = tmptxt[itmp+7].split()[2:] - rsum = float(tmpval[2]), int(tmpval[0]), int(tmpval[1]) - tmpval = tmptxt[itmp+8].split()[2:] - gsum = float(tmpval[2]), int(tmpval[0]), int(tmpval[1]) - else: - itmp = search_string('< LATTICE2D >', tmptxt) - tmpval = tmptxt[itmp+13].split()[2:] - rsum = float(tmpval[2]), int(tmpval[0]), int(tmpval[1]) - tmpval = tmptxt[itmp+14].split()[2:] - gsum = float(tmpval[2]), int(tmpval[0]), int(tmpval[1]) - return rsum, gsum, info - - -def get_nspin(outfile_0init): - """ - extract NSPIN value from output.0.txt - """ - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('NSPIN', tmptxt) - nspin = int(tmptxt[itmp+1].split()[0]) - return nspin - - -def get_natom(outfile_0init): - """ - extract NATYP value from output.0.txt - """ - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('NATYP', tmptxt) - natom = int(tmptxt[itmp+1].split()[0]) - return natom - - -def use_newsosol(outfile_0init): - """ - extract NEWSOSOL info from output.0.txt - """ - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - itmp = search_string('NEWSOSOL', tmptxt) - newsosol = False - if itmp>=0: - newsosol = True - return newsosol - - -def get_spinmom_per_atom(outfile, natom, nonco_out_file=None): - """ - Extract spin moment information from outfile and nonco_angles_out (if given) - """ - from numpy import array - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = 0 - result = [] - while itmp >= 0: - itmp = search_string('m_spin', tmptxt) - if itmp>=0: - tmpline = tmptxt.pop(itmp) - tmparray = [] - for iatom in range(natom): - tmpline = tmptxt.pop(itmp) - tmparray.append(float(tmpline.split()[3])) - result.append(tmparray) - - # if the file is there, i.e. NEWSOSOL is used, then extract also direction of spins (angles theta and phi) - if nonco_out_file is not None and result != []: - from numpy import loadtxt - from numpy import shape - angles = loadtxt(nonco_out_file) - if len(shape(angles))==1: - angles = array([angles]) - vec = angles_to_vec(result[-1], angles[:,0], angles[:,1]) - else: - vec, angles = [],[] - - return array(result), vec, angles - - -def get_orbmom(outfile, natom): - """ - read orbmom info from outfile and return array (iteration, atom)=orbmom - """ - from numpy import array - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = 0 - result = [] - while itmp >= 0: - itmp = search_string('m_spin', tmptxt) - if itmp>=0: - tmpline = tmptxt.pop(itmp) - tmparray = [] - for iatom in range(natom): - tmpline = tmptxt.pop(itmp) - tmparray.append(float(tmpline.split()[4])) - result.append(tmparray) - - return array(result)#, vec, angles - - -def get_lattice_vectors(outfile_0init): - """ - read direct and reciprocal lattice vectors in internal units (useful for qdos generation) - """ - f = open(outfile_0init) - tmptxt = f.readlines() - f.close() - vecs, rvecs = [], [] - tmpvecs = [] - for search_txt in ['a_1: ', 'a_2: ', 'a_3: ', 'b_1: ', 'b_2: ', 'b_3: ']: - itmp = search_string(search_txt, tmptxt) - if itmp>=0: - tmpvec = tmptxt[itmp].split(':')[1].split() - tmpvecs.append([float(tmpvec[0]), float(tmpvec[1]), float(tmpvec[1])]) - if search_txt in ['a_3: ', 'b_3: '] and itmp<0: - # reset vecs for 2D case - tmpvecs[0] = tmpvecs[0][:2] - tmpvecs[1] = tmpvecs[1][:2] - if search_txt=='a_3: ': - vecs = tmpvecs - tmpvecs = [] - elif search_txt=='b_3: ': - rvecs = tmpvecs - return vecs, rvecs - - -def parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file, outfile_2='output.2.txt', skip_readin=False): - """ - Parser method for the kkr outfile. It returns a dictionary with results - """ - # scaling factors etc. defined globally - Ry2eV = get_Ry2eV() - doscalc = False - - # collection of parsing error messages - msg_list = [] - - try: - code_version, compile_options, serial_number = get_version_info(outfile) - tmp_dict = {} - tmp_dict['code_version'] = code_version - tmp_dict['compile_options'] = compile_options - tmp_dict['calculation_serial_number'] = serial_number - out_dict['code_info_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: Version Info" - msg_list.append(msg) - - try: - nspin = get_nspin(outfile_0init) - natom = get_natom(outfile_0init) - newsosol = use_newsosol(outfile_0init) - out_dict['nspin'] = nspin - out_dict['number_of_atoms_in_unit_cell'] = natom - out_dict['use_newsosol'] = newsosol - except: - msg = "Error parsing output of KKR: nspin/natom" - msg_list.append(msg) - - try: - result = find_warnings(outfile) - tmp_dict = {} - tmp_dict['number_of_warnings'] = len(result) - tmp_dict['warnings_list'] = result - out_dict['warnings_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: search for warnings" - msg_list.append(msg) - - try: - result = extract_timings(timing_file) - out_dict['timings_group'] = result - out_dict['timings_unit'] = 'seconds' - except: - msg = "Error parsing output of KKR: timings" - msg_list.append(msg) - - try: - emin, tempr, Nepts, Npol, N1, N2, N3 = get_econt_info(outfile_0init) - tmp_dict = {} - tmp_dict['emin'] = emin - tmp_dict['emin_unit'] = 'Rydberg' - tmp_dict['number_of_energy_points'] = Nepts - tmp_dict['temperature'] = tempr - tmp_dict['temperature_unit'] = 'Kelvin' - tmp_dict['npol'] = Npol - tmp_dict['n1'] = N1 - tmp_dict['n2'] = N2 - tmp_dict['n3'] = N3 - out_dict['energy_contour_group'] = tmp_dict - if Npol == 0: - doscalc = True - except: - msg = "Error parsing output of KKR: energy contour" - msg_list.append(msg) - - try: - ncore, emax, lmax, descr_max = get_core_states(potfile_out) - tmp_dict = {} - tmp_dict['number_of_core_states_per_atom'] = ncore - tmp_dict['energy_highest_lying_core_state_per_atom'] = emax - tmp_dict['energy_highest_lying_core_state_per_atom_unit'] = 'Rydberg' - tmp_dict['descr_highest_lying_core_state_per_atom'] = descr_max - out_dict['core_states_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: core_states" - msg_list.append(msg) - - try: - alat, twopioveralat = get_alatinfo(outfile_0init) - out_dict['alat_internal'] = alat - out_dict['two_pi_over_alat_internal'] = twopioveralat - out_dict['alat_internal_unit'] = 'a_Bohr' - out_dict['two_pi_over_alat_internal_unit'] = '1/a_Bohr' - except: - msg = "Error parsing output of KKR: alat, 2*pi/alat" - msg_list.append(msg) - - try: - nkmesh, kmesh_ie = get_kmeshinfo(outfile_0init, outfile_000) - tmp_dict = {} - tmp_dict['number_different_kmeshes'] = nkmesh[0] - tmp_dict['number_kpoints_per_kmesh'] = nkmesh[1] - tmp_dict['kmesh_energypoint'] = kmesh_ie - out_dict['kmesh_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: kmesh" - msg_list.append(msg) - - try: - nsym, nsym_used, desc = get_symmetries(outfile_0init) - tmp_dict = {} - tmp_dict['number_of_lattice_symmetries'] = nsym - tmp_dict['number_of_used_symmetries'] = nsym_used - tmp_dict['symmetry_description'] = desc - out_dict['symmetries_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: symmetries" - msg_list.append(msg) - - if not doscalc: # in case of dos calculation no ewald summation is done - try: - rsum, gsum, info = get_ewald(outfile_0init) - tmp_dict = {} - tmp_dict['ewald_summation_mode'] = info - tmp_dict['rsum_cutoff'] = rsum[0] - tmp_dict['rsum_number_of_vectors'] = rsum[1] - tmp_dict['rsum_number_of_shells'] = rsum[2] - tmp_dict['rsum_cutoff_unit'] = 'a_Bohr' - tmp_dict['gsum_cutoff'] = gsum[0] - tmp_dict['gsum_number_of_vectors'] = gsum[1] - tmp_dict['gsum_number_of_shells'] = gsum[2] - tmp_dict['gsum_cutoff_unit'] = '1/a_Bohr' - out_dict['ewald_sum_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: ewald summation for madelung poterntial" - msg_list.append(msg) - - try: - bv, recbv = get_lattice_vectors(outfile_0init) - out_dict['direct_bravais_matrix'] = bv - out_dict['reciprocal_bravais_matrix'] = recbv - out_dict['direct_bravais_matrix_unit'] = 'alat' - out_dict['reciprocal_bravais_matrix_unit'] = '2*pi / alat' - except: - msg = "Error parsing output of KKR: lattice vectors (direct/reciprocal)" - msg_list.append(msg) - - # this is skipped for qdos run for example - if not skip_readin: - tmp_dict = {} # used to group convergence info (rms, rms per atom, charge neutrality) - # also initialize convegence_group where all info stored for all iterations is kept - out_dict['convergence_group'] = tmp_dict - try: - result, result_atoms_last = get_rms(outfile, outfile_000) - tmp_dict['rms'] = result[-1] - tmp_dict['rms_all_iterations'] = result - tmp_dict['rms_per_atom'] = result_atoms_last - tmp_dict['rms_unit'] = 'unitless' - out_dict['convergence_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: rms-error" - msg_list.append(msg) - - try: - result = get_neutr(outfile) - tmp_dict['charge_neutrality'] = result[-1] - out_dict['convergence_group']['charge_neutrality_all_iterations'] = result - tmp_dict['charge_neutrality_unit'] = 'electrons' - out_dict['convergence_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: charge neutrality" - msg_list.append(msg) - - tmp_dict = {} # used to group magnetism info (spin and orbital moments) - try: - result = get_magtot(outfile) - if len(result)>0: - tmp_dict['total_spin_moment'] = result[-1] - out_dict['convergence_group']['total_spin_moment_all_iterations'] = result - tmp_dict['total_spin_moment_unit'] = 'mu_Bohr' - out_dict['magnetism_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: total magnetic moment" - msg_list.append(msg) - - try: - if nspin>1: - if not newsosol: - #reset automatically to None to turn off reading of nonco angles file - nonco_out_file = None - - result, vec, angles = get_spinmom_per_atom(outfile, natom, nonco_out_file) - if len(result)>0: - tmp_dict['spin_moment_per_atom'] = result[-1,:] - if newsosol: - tmp_dict['spin_moment_vector_per_atom'] = vec[:] - tmp_dict['spin_moment_angles_per_atom'] = angles[:] - tmp_dict['spin_moment_angles_per_atom_unit'] = 'degree' - out_dict['convergence_group']['spin_moment_per_atom_all_iterations'] = result[:,:] - tmp_dict['spin_moment_unit'] = 'mu_Bohr' - out_dict['magnetism_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: spin moment per atom" - msg_list.append(msg) - - # add orbital moments to magnetis group in parser output - try: - if nspin>1 and newsosol: - #TODO orbital moment full vectors - # so far the KKR code writes only the component of the orbital moment - # parallel to the spin moment, thus vec and angles are returned empty - # by construction. This might change in the future - #result, vec, angles = get_orbmom(outfile, natom, nonco_angles_orbmom) - # so for now return only result= array containing all iterations, all atoms, orbital moment parallel to spin quantization axis - result = get_orbmom(outfile, natom) - if len(result)>0: - tmp_dict['total_orbital_moment'] = sum(result[-1,:]) - tmp_dict['orbital_moment_per_atom'] = result[-1,:] - #tmp_dict['orbital_moment_vector_per_atom'] = vec[-1,:] - #tmp_dict['orbital_moment_angles_per_atom'] = angles[-1,:] - out_dict['convergence_group']['orbital_moment_per_atom_all_iterations'] = result[:,:] - tmp_dict['orbital_moment_unit'] = 'mu_Bohr' - #tmp_dict['orbital_moment_angles_per_atom_unit'] = 'degree' - out_dict['magnetism_group'] = tmp_dict - except: - msg = "Error parsing output of KKR: orbital moment" - msg_list.append(msg) - - try: - result = get_EF(outfile) - out_dict['fermi_energy'] = result[-1] - out_dict['fermi_energy_units'] = 'Ry' - out_dict['convergence_group']['fermi_energy_all_iterations'] = result - out_dict['convergence_group']['fermi_energy_all_iterations_units'] = 'Ry' - except: - msg = "Error parsing output of KKR: EF" - msg_list.append(msg) - - try: - result = get_DOS_EF(outfile) - out_dict['dos_at_fermi_energy'] = result[-1] - out_dict['convergence_group']['dos_at_fermi_energy_all_iterations'] = result - except: - msg = "Error parsing output of KKR: DOS@EF" - msg_list.append(msg) - - try: - result = get_Etot(outfile) - out_dict['energy'] = result[-1]*Ry2eV - out_dict['energy_unit'] = 'eV' - out_dict['total_energy_Ry'] = result[-1] - out_dict['total_energy_Ry_unit'] = 'Rydberg' - out_dict['convergence_group']['total_energy_Ry_all_iterations'] = result - except: - msg = "Error parsing output of KKR: total energy" - msg_list.append(msg) - - try: - result = get_single_particle_energies(outfile_000) - out_dict['single_particle_energies'] = result*Ry2eV - out_dict['single_particle_energies_unit'] = 'eV' - except: - msg = "Error parsing output of KKR: single particle energies" - msg_list.append(msg) - - try: - result_WS, result_tot, result_C = get_charges_per_atom(outfile_000) - niter = len(out_dict['convergence_group']['rms_all_iterations']) - natyp = int(len(result_tot)/niter) - out_dict['total_charge_per_atom'] = result_tot[-natyp:] - out_dict['charge_core_states_per_atom'] = result_C[-natyp:] - # this check deals with the DOS case where output is slightly different - if len(result_WS) == len(result_C): - out_dict['charge_valence_states_per_atom'] = result_WS[-natyp:]-result_C[-natyp:] - out_dict['total_charge_per_atom_unit'] = 'electron charge' - out_dict['charge_core_states_per_atom_unit'] = 'electron charge' - out_dict['charge_valence_states_per_atom_unit'] = 'electron charge' - except: - msg = "Error parsing output of KKR: charges" - msg_list.append(msg) - - try: - try: - niter, nitermax, converged, nmax_reached, mixinfo = get_scfinfo(outfile_0init, outfile_000, outfile) - except IndexError: - niter, nitermax, converged, nmax_reached, mixinfo = get_scfinfo(outfile_0init, outfile_2, outfile) - out_dict['convergence_group']['number_of_iterations'] = niter - out_dict['convergence_group']['number_of_iterations_max'] = nitermax - out_dict['convergence_group']['calculation_converged'] = converged - out_dict['convergence_group']['nsteps_exhausted'] = nmax_reached - out_dict['convergence_group']['imix'] = mixinfo[0] - out_dict['convergence_group']['strmix'] = mixinfo[1] - out_dict['convergence_group']['qbound'] = mixinfo[2] - out_dict['convergence_group']['fcm'] = mixinfo[3] - out_dict['convergence_group']['idtbry'] = mixinfo[4] - out_dict['convergence_group']['brymix'] = mixinfo[5] - except: - msg = "Error parsing output of KKR: scfinfo" - msg_list.append(msg) - - #convert arrays to lists - from numpy import ndarray - for key in out_dict.keys(): - if type(out_dict[key])==ndarray: - out_dict[key] = list(out_dict[key]) - elif type(out_dict[key])==dict: - for subkey in out_dict[key].keys(): - if type(out_dict[key][subkey])==ndarray: - out_dict[key][subkey] = (out_dict[key][subkey]).tolist() - - - # return output with error messages if there are any - if len(msg_list)>0: - return False, msg_list, out_dict - else: - return True, [], out_dict - -def check_error_category(err_cat, err_msg, out_dict): - """ - Check if parser error of the non-critical category (err_cat != 1) are - actually consistent and may be discarded. - - :param err_cat: the error-category of the error message to be investigated - :param err_msg: the error-message - :param out_dict: the dict of results obtained from the parser function - - :returns: True/False if message is an error or warning - """ - # check special cases: - # 1. nonco_angle_file not present, but newsosol==False anyways - if 'NONCO_ANGLES_OUT' in err_msg: - if "use_newsosol" in out_dict.keys(): - if out_dict["use_newsosol"]: - return True - else: - return False - else: - return True - - # default behavior - if err_cat == 1: - return True - else: - return False - - - -#""" -if __name__=='__main__': - print('run test') - path0 = '../../../development/calc_import_test/' - outfile = path0+'output.2.txt' - outfile_0init = path0+'output.0.txt' - outfile_000 = path0+'output.000.txt' - outfile_2 = path0+'output.2.txt' - timing_file = path0+'out_timing.000.txt' - potfile_out = path0+'potential' - nonco_out_file = path0+'nonco_angle_out.dat' - print(get_lattice_vectors(outfile_0init)) - #print('test_path: {}'.format(path0)) - #out_dict = {} - #success, msg_list, out_dict = parse_kkr_outputfile(out_dict, outfile, outfile_0init, outfile_000, timing_file, potfile_out, nonco_out_file, outfile_2) - #out_dict['parser_warnings'] = msg_list - #print(success) - #print(msg_list) -#""" \ No newline at end of file diff --git a/aiida_kkr/tools/tools_kkrimp.py b/aiida_kkr/tools/tools_kkrimp.py index 444a2408..fc5ceadd 100644 --- a/aiida_kkr/tools/tools_kkrimp.py +++ b/aiida_kkr/tools/tools_kkrimp.py @@ -190,7 +190,7 @@ def _get_econt_info(self, out_log): * 'epts', list of complex valued energy points * 'weights', list of complex valued weights for energy integration """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string from numpy import array f = open(out_log) tmptxt = f.readlines() @@ -218,7 +218,7 @@ def _get_scfinfo(self, file): :returns: niter (int), nitermax (int), converged (bool), nmax_reached (bool), mixinfo (dict) :note: mixinfo contains information on mixing scheme and mixing factor used in the calculation """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(file) tmptxt = f.readlines() f.close() @@ -266,7 +266,7 @@ def _get_newsosol(self, file): :param file: absolute path to out_log.000.txt of KKRimp calculation :returns: True(False) if SOC solver is (not) used """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(file) tmptxt = f.readlines() f.close() @@ -285,7 +285,7 @@ def _get_natom(self, file): :param file: file that is parsed to find number of atoms :returns: natom (int), number of atoms in impurity cluster """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(file) tmptxt = f.readlines() f.close() @@ -294,14 +294,41 @@ def _get_natom(self, file): return natom - def _get_magtot(self, file): + def _get_magtot(self, file, natom): """ - Extract total magnetic moment ofall atoms in imp. cluster + Extract total magnetic moment of all atoms in imp. cluster, + magnetic moment for each atom in the cluster and magn. moment + for all atoms and all iterations of the calculation :param file: file that is parsed to find magnetic moments - :returns: list of total magnetic moments of all atoms - """ - #TODO implement - return [] + :param natom: number of atoms in the cluster + :returns: magn. moment for all atoms in the cluster for the last iteration (saved in z-comp. of 3d vector) + magn. moment for all atoms in the cluster for all iterations (saved in z-comp. of 3d vector) + total magnetic moments of all atoms for last iteration + """ + from masci_tools.io.common_functions import search_string + import numpy as np + + f = open(file) + tmptxt = f.readlines() + f.close() + itmp = 0 + spinmom_all = [] + while itmp >= 0: + itmp = search_string('spin magnetic moment =', tmptxt) + if itmp >= 0: + spinmom_all.append(float(tmptxt.pop(itmp).split()[-1])) + # if no spin + spinmom = spinmom_all[len(spinmom_all)-natom:] + print (spinmom_all, natom, spinmom) + spinmom_vec = np.array([[0, 0, spinmom[0]]]) + spinmom_vec_all = np.array([[0, 0, spinmom_all[0]]]) + for i in range(1, natom): + spinmom_vec = np.append(spinmom_vec, [[0, 0, spinmom[i]]], axis=0) + for i in range(1, len(spinmom_all)): + spinmom_vec_all = np.append(spinmom_vec_all, [[0, 0, spinmom_all[i]]], axis=0) + magtot = sum(spinmom) + + return spinmom_vec, spinmom_vec_all, magtot def _extract_timings(self, outfile): @@ -310,7 +337,7 @@ def _extract_timings(self, outfile): :param outfile: timing file of the KKRimp run :returns: res (dict) timings in seconds, averaged over iterations """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(outfile) tmptxt = f.readlines() f.close() @@ -342,13 +369,13 @@ def _extract_timings(self, outfile): return res - def _get_nspin(self, file): + def _get_nspin(self, file, natom): """ Extract nspin from file :param file: file that is parsed :returns: 1 if calculation is paramagnetic, 2 otherwise """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(file) tmptxt = f.readlines() f.close() @@ -362,21 +389,52 @@ def _get_spinmom_per_atom(self, file, natom): Extract spin moment for all atoms :param file: file that is parsed :param natom: number of atoms in impurity cluster - :returns: spinmom_at (list), spin moments for all atoms - """ - #TODO implement - return spinmom_at + :returns: spinmom_at (array of spin moments for all atoms and the last iteration), + spinmom_at_all (array of spin moments for all atoms and iterations), + spinmom_at_tot (total spinmoment for the last iteration) + """ + import numpy as np + from math import sqrt + + f = open(file) + lines = f.readlines() + startline = len(lines) - natom + spinmom_at = np.array([lines[startline].split()]) + spinmom_at_all = np.array([lines[1].split()]) + for i in range(1, natom): + spinmom_at = np.append(spinmom_at, [lines[startline+i].split()], axis=0) + for j in range(2, len(lines)): + spinmom_at_all = np.append(spinmom_at_all, [lines[j].split()], axis=0) + spinmom_at_tot = 0 + for i in range(0, natom): + spinmom_at_tot += sqrt(float(spinmom_at[i][0])**2+float(spinmom_at[i][1])**2+float(spinmom_at[i][2])**2) + + return spinmom_at, spinmom_at_all, spinmom_at_tot def _get_orbmom_per_atom(self, file, natom): """ - Extract orbital moment for all atoms + Extract orbital moment for all atoms (orbmom_at: all atoms in last iteration, + orbmom_at_all: all atoms in all iterations). For each atom there are six values: + first -> x-component real part, second -> x-component imaginary part, + third -> y-component real part, ... sixth -> z-component imaginary part. :param file: file that is parsed :param natom: number of atoms in impurity cluster :returns: orbmom_at (list), orbital moments for all atoms """ - #TODO implement - return orbmom_at + import numpy as np + + f = open(file) + lines = f.readlines() + startline = len(lines) - natom + orbmom_at = np.array([lines[startline].split()]) + orbmom_at_all = np.array([lines[1].split()]) + for i in range(1, natom): + orbmom_at = np.append(orbmom_at, [lines[startline+i].split()], axis=0) + for j in range(2, len(lines)): + orbmom_at_all = np.append(orbmom_at_all, [lines[j].split()], axis=0) + + return orbmom_at, orbmom_at_all def _get_EF_potfile(self, potfile): @@ -398,7 +456,7 @@ def _get_Etot(self, file): :param file: file that is parsed :returns: Etot (list), values of the total energy in Ry for all iterations """ - from aiida_kkr.tools.common_functions import search_string + from masci_tools.io.common_functions import search_string f = open(file) tmptxt = f.readlines() f.close() @@ -447,8 +505,8 @@ def parse_kkrimp_outputfile(self, out_dict, file_dict): * 'out_spinmoms', the output spin moments file * 'out_orbmoms', the output orbital moments file """ - from aiida_kkr.tools.kkrparser_functions import get_rms, find_warnings, get_charges_per_atom, get_core_states - from aiida_kkr.tools.common_functions import get_version_info, get_Ry2eV + from masci_tools.io.parsers.kkrparser_functions import get_rms, find_warnings, get_charges_per_atom, get_core_states + from masci_tools.io.common_functions import get_version_info, get_Ry2eV Ry2eV = get_Ry2eV() msg_list = [] @@ -479,56 +537,46 @@ def parse_kkrimp_outputfile(self, out_dict, file_dict): msg = "Error parsing output of KKRimp: rms-error" msg_list.append(msg) - tmp_dict = {} # used to group magnetism info (spin and orbital moments) - try: - result = self._get_magtot(files['out_log']) - if len(result)>0: - tmp_dict['total_spin_moment'] = result[-1] - out_dict['convergence_group']['total_spin_moment_all_iterations'] = result - tmp_dict['total_spin_moment_unit'] = 'mu_Bohr' - out_dict['magnetism_group'] = tmp_dict - except: - msg = "Error parsing output of KKRimp: total magnetic moment" - msg_list.append(msg) - try: - nspin = self._get_nspin(files['out_log']) natom = self._get_natom(files['out_log']) + nspin = self._get_nspin(files['out_log'], natom) newsosol = self._get_newsosol(files['out_log']) out_dict['nspin'] = nspin out_dict['number_of_atoms_in_unit_cell'] = natom out_dict['use_newsosol'] = newsosol except: msg = "Error parsing output of KKRimp: nspin/natom" - msg_list.append(msg) - + msg_list.append(msg) + + + tmp_dict = {} # used to group magnetism info (spin and orbital moments) try: - if nspin>1: - #result, vec, angles = get_spinmom_per_atom(outfile, natom, nonco_out_file) - spinmom_atom, spinmom_atom_vec_all_iter, = self._get_spinmom_per_atom(files['out_spinmom'], natom) - if len(result)>0: - tmp_dict['spin_moment_per_atom'] = result[-1,:] - if newsosol: - tmp_dict['spin_moment_vector_per_atom'] = vec[:] - tmp_dict['spin_moment_angles_per_atom'] = angles[:] - tmp_dict['spin_moment_angles_per_atom_unit'] = 'degree' - out_dict['convergence_group']['spin_moment_per_atom_all_iterations'] = result[:,:] - tmp_dict['spin_moment_unit'] = 'mu_Bohr' - out_dict['magnetism_group'] = tmp_dict + if nspin>1 and newsosol: + spinmom_vec, spinmom_vec_all, magtot = self._get_spinmom_per_atom(files['out_spinmoms'], natom) + tmp_dict['total_spin_moment'] = magtot + out_dict['convergence_group']['spin_moment_per_atom'] = spinmom_vec + out_dict['convergence_group']['spin_moment_per_atom_all_iterations'] = spinmom_vec_all + tmp_dict['total_spin_moment_unit'] = 'mu_Bohr' + out_dict['magnetism_group'] = tmp_dict + elif nspin>1: + spinmom_vec, spinmom_vec_all, magtot = self._get_magtot(files['out_log'], natom) + tmp_dict['total_spin_moment'] = magtot + out_dict['convergence_group']['spin_moment_per_atom'] = spinmom_vec + out_dict['convergence_group']['spin_moment_per_atom_all_iterations'] = spinmom_vec_all + tmp_dict['total_spin_moment_unit'] = 'mu_Bohr' + out_dict['magnetism_group'] = tmp_dict except: msg = "Error parsing output of KKRimp: spin moment per atom" - msg_list.append(msg) + msg_list.append(msg) - # add orbital moments to magnetis group in parser output + # add orbital moments to magnetism group in parser output try: - if nspin>1 and newsosol: - orbmom_atom = self._get_orbmom_per_atom(files['out_orbmom'], natom) - if len(result)>0: - tmp_dict['total_orbital_moment'] = sum(result[-1,:]) - tmp_dict['orbital_moment_per_atom'] = result[-1,:] - out_dict['convergence_group']['orbital_moment_per_atom_all_iterations'] = result[:,:] - tmp_dict['orbital_moment_unit'] = 'mu_Bohr' - out_dict['magnetism_group'] = tmp_dict + if nspin>1 and newsosol and files['out_orbmoms'] is not None: + orbmom_atom, orbmom_atom_all = self._get_orbmom_per_atom(files['out_orbmoms'], natom) + tmp_dict['orbital_moment_per_atom'] = orbmom_atom + out_dict['convergence_group']['orbital_moment_per_atom_all_iterations'] = orbmom_atom_all + tmp_dict['orbital_moment_unit'] = 'mu_Bohr' + out_dict['magnetism_group'] = tmp_dict except: msg = "Error parsing output of KKRimp: orbital moment" msg_list.append(msg) @@ -543,7 +591,6 @@ def parse_kkrimp_outputfile(self, out_dict, file_dict): try: result = self._get_Etot(files['out_log']) - print(result) out_dict['energy'] = result[-1]*Ry2eV out_dict['energy_unit'] = 'eV' out_dict['total_energy_Ry'] = result[-1] @@ -679,7 +726,7 @@ def get_structure_data(structure): #import packages from aiida.common.constants import elements as PeriodicTableElements - from aiida_kkr.tools.common_functions import get_Ang2aBohr, get_alat_from_bravais + from masci_tools.io.common_functions import get_Ang2aBohr, get_alat_from_bravais import numpy as np #list of globally used constants @@ -793,7 +840,7 @@ def rotate_onto_z(structure, structure_array, vector): :return: rotated system, now the 'orient'-axis is aligned with the z-axis """ - from aiida_kkr.tools.common_functions import vec_to_angles + from masci_tools.io.common_functions import vec_to_angles import math import numpy as np @@ -851,7 +898,7 @@ def find_neighbors(structure, structure_array, i, radius, clust_shape='spherical """ #import packages - from aiida_kkr.tools.common_functions import get_Ang2aBohr, get_alat_from_bravais + from masci_tools.io.common_functions import get_Ang2aBohr, get_alat_from_bravais import numpy as np import math diff --git a/aiida_kkr/tools/voroparser_functions.py b/aiida_kkr/tools/voroparser_functions.py deleted file mode 100644 index b12086d3..00000000 --- a/aiida_kkr/tools/voroparser_functions.py +++ /dev/null @@ -1,361 +0,0 @@ -#!/usr/bin/python - -from __future__ import print_function -import sys - -from aiida_kkr.tools.common_functions import (get_corestates_from_potential, - get_highest_core_state, search_string, - get_version_info, get_Ry2eV, - get_ef_from_potfile) -from aiida_kkr.tools.kkrparser_functions import get_core_states - - -# redefine raw_input for python 3/2.7 compatilbility -if sys.version_info[0] >= 3: - def raw_input(msg): - return input(msg) - - - -def get_valence_min(outfile='out_voronoi'): - """Construct minimum of energy contour (between valence band bottom and core states)""" - from scipy import array - txt = open(outfile).readlines() - searchstr = 'All other states are above' - valence_minimum = array([float(line.split(':')[1].split()[0]) for line in txt if searchstr in line]) - return valence_minimum - - -def check_voronoi_output(potfile, outfile, delta_emin_safety=0.1): - """Read output from voronoi code and create guess of energy contour""" - from scipy import zeros - #analyse core levels, minimum of valence band and their difference - ncore, ecore, lcore = get_corestates_from_potential(potfile=potfile) - e_val_min = get_valence_min(outfile=outfile) - - #print a table that summarizes the result - e_core_max = zeros(len(ncore)) - print('pot Highest core-level low. val. state diff') - for ipot in range(len(ncore)): - if ncore[ipot] > 0: - lval, emax, descr = get_highest_core_state(ncore[ipot], ecore[ipot], lcore[ipot]) - e_core_max[ipot] = emax - print('%3i %2s %10.6f %6.2f %6.2f'%(ipot+1, descr, emax, e_val_min[ipot], e_val_min[ipot]-emax)) - else: - print('%3i << no core states >>'%(ipot+1)) - # set to some large negative number for check to not give false positive in case of empty cells - e_core_max[ipot] = -1000 - - #get hint for energy integration: - emin_guess = e_val_min.min() - delta_emin_safety # in Ry - - return emin_guess, e_core_max.max() - - -def parse_voronoi_output(out_dict, outfile, potfile, atominfo, radii, inputfile): - """ - Parse output of voronoi calculation and return (success, error_messages_list, out_dict) - """ - # for collection of error messages: - msg_list = [] - - try: - code_version, compile_options, serial_number = get_version_info(outfile) - tmp_dict = {} - tmp_dict['code_version'] = code_version - tmp_dict['compile_options'] = compile_options - tmp_dict['calculation_serial_number'] = serial_number - out_dict['code_info_group'] = tmp_dict - except: - msg = "Error parsing output of voronoi: Version Info" - msg_list.append(msg) - - try: - emin, e_core_max = check_voronoi_output(potfile, outfile) - out_dict['emin'] = emin - out_dict['emin_units'] = 'Ry' - diff_emin_ef = emin - get_ef_from_potfile(potfile) - out_dict['emin_minus_efermi_Ry'] = diff_emin_ef - out_dict['emin_minus_efermi'] = diff_emin_ef * get_Ry2eV() - out_dict['emin_minus_efermi_Ry_units'] = 'Ry' - out_dict['emin_minus_efermi_units'] = 'eV' - except: - msg = "Error parsing output of voronoi: 'EMIN'" - msg_list.append(msg) - - # parse - try: - ncore, emax, lmax, descr_max = get_core_states(potfile) - tmp_dict = {} - tmp_dict['number_of_core_states_per_atom'] = ncore - tmp_dict['energy_highest_lying_core_state_per_atom'] = emax - tmp_dict['energy_highest_lying_core_state_per_atom_unit'] = 'Rydberg' - tmp_dict['descr_highest_lying_core_state_per_atom'] = descr_max - out_dict['core_states_group'] = tmp_dict - except: - msg = "Error parsing output of voronoi: core_states" - msg_list.append(msg) - - try: - Ncls, natom, results = get_cls_info(outfile) - clsinfo = [] - tmpdict_all = {} - for icls in range(natom): - tmpdict = {} - tmpdict['iatom'] = results[icls][0] - tmpdict['refpot'] = results[icls][1] - tmpdict['rmt_ref'] = results[icls][2] - tmpdict['tb_cluster_id'] = results[icls][3] - tmpdict['sites'] = results[icls][4] - clsinfo.append(tmpdict) - tmpdict_all['cluster_info_atoms'] = clsinfo - tmpdict_all['number_of_clusters'] = Ncls - out_dict['cluster_info_group'] = tmpdict_all - except: - msg = "Error parsing output of voronoi: Cluster Info" - msg_list.append(msg) - - try: - out_dict['start_from_jellium_potentials'] = startpot_jellium(outfile) - except: - msg = "Error parsing output of voronoi: Jellium startpot" - msg_list.append(msg) - - try: - natyp, naez, shapes = get_shape_array(outfile, atominfo) - out_dict['shapes'] = shapes - except: - msg = "Error parsing output of voronoi: SHAPE Info" - msg_list.append(msg) - - try: - Vtot, results = get_volumes(outfile) - tmp_dict = {} - tmp_dict['volume_total'] = Vtot - tmpdict_all = [] - for icls in range(naez): - tmpdict = {} - tmpdict['iatom'] = results[icls][0] - tmpdict['v_atom'] = results[icls][1] - tmpdict_all.append(tmpdict) - tmp_dict['volume_atoms'] = tmpdict_all - tmp_dict['volume_unit'] = 'alat^3' - out_dict['volumes_group'] = tmp_dict - except: - msg = "Error parsing output of voronoi: Volume Info" - msg_list.append(msg) - - try: - results = get_radii(naez, radii) - tmpdict_all = [] - for icls in range(naez): - tmpdict = {} - tmpdict['iatom'] = results[icls][0] - tmpdict['rmt0'] = results[icls][1] - tmpdict['rout'] = results[icls][2] - tmpdict['dist_nn'] = results[icls][4] - tmpdict['rmt0_over_rout'] = results[icls][3] - tmpdict['rout_over_dist_nn'] = results[icls][5] - tmpdict_all.append(tmpdict) - tmpdict_all.append({'radii_units':'alat'}) - out_dict['radii_atoms_group'] = tmpdict_all - except: - msg = "Error parsing output of voronoi: radii.dat Info" - msg_list.append(msg) - - try: - results = get_fpradius(naez, atominfo) - out_dict['fpradius_atoms'] = results - out_dict['fpradius_atoms_unit'] = 'alat' - except: - msg = "Error parsing output of voronoi: full potential radius" - msg_list.append(msg) - - try: - result = get_alat(inputfile) - out_dict['alat'] = result - out_dict['alat_unit'] = 'a_Bohr' - except: - msg = "Error parsing output of voronoi: alat" - msg_list.append(msg) - - try: - result = get_radial_meshpoints(potfile) - out_dict['radial_meshpoints'] = result - except: - msg = "Error parsing output of voronoi: radial meshpoints" - msg_list.append(msg) - - # some consistency checks comparing lists with natyp/naez numbers - #TODO implement checks - - #convert arrays to lists - from numpy import ndarray - for key in out_dict.keys(): - if type(out_dict[key])==ndarray: - out_dict[key] = list(out_dict[key]) - elif type(out_dict[key])==dict: - for subkey in out_dict[key].keys(): - if type(out_dict[key][subkey])==ndarray: - out_dict[key][subkey] = list(out_dict[key][subkey]) - - - # return output with error messages if there are any - if len(msg_list)>0: - return False, msg_list, out_dict - else: - return True, [], out_dict - - -def startpot_jellium(outfile): - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = search_string('JELLSTART POTENTIALS', tmptxt) - if itmp ==-1: - return False - else: - return True - - -def get_volumes(outfile): - f = open(outfile) - tmptxt = f.readlines() - f.close() - - itmp = search_string('Total volume (alat^3)', tmptxt) - if itmp>=0: - Vtot = float(tmptxt.pop(itmp).split()[-1]) - - itmp = 0 - results = [] - while itmp>=0: - itmp = search_string(' Volume(alat^3) :', tmptxt) - if itmp>=0: - tmpstr = tmptxt.pop(itmp) - tmpstr = tmpstr.split() - tmpstr = [int(tmpstr[2]), float(tmpstr[5])] - results.append(tmpstr) - return Vtot, results - - -def get_cls_info(outfile): - f = open(outfile) - tmptxt = f.readlines() - f.close() - itmp = 0 - Ncls = 0 - Natom = 0 - cls_all = [] - results = [] - while itmp>=0: - itmp = search_string('CLSGEN_TB: Atom', tmptxt) - if itmp>=0: - tmpstr = tmptxt.pop(itmp) - tmpstr = tmpstr.split() - tmp = [int(tmpstr[2]), int(tmpstr[4]), float(tmpstr[6]), int(tmpstr[8]), int(tmpstr[10])] - results.append(tmp) - if int(tmpstr[8]) not in cls_all: - Ncls += 1 - cls_all.append(int(tmpstr[8])) - Natom += 1 - return Ncls, Natom, results - - -def get_shape_array(outfile, atominfo): - f = open(outfile) - txt = f.readlines() - f.close() - #naez/natyp number of items either one number (=ishape without cpa or two =[iatom, ishape] with CPA) - # read in naez and/or natyp and then find ishape array (1..natyp[=naez without CPA]) - itmp = search_string('NAEZ= ', txt) - if itmp>=0: - tmp = txt[itmp] - ipos = tmp.find('NAEZ=') - naez = int(tmp[ipos+5:].split()[0]) - else: - naez = -1 - itmp = search_string('NATYP= ', txt) - if itmp>=0: - tmp = txt[itmp] - ipos = tmp.find('NATYP=') - natyp = int(tmp[ipos+6:].split()[0]) - else: - natyp = -1 - - # consistency check - if naez==-1 and natyp>0: - naez = natyp - elif natyp==-1 and naez>0: - natyp = naez - elif natyp==-1 and naez==-1: - raise ValueError('Neither NAEZ nor NATYP found in %s'%outfile) - - # read shape index from atominfo file - f = open(atominfo) - tmptxt = f.readlines() - f.close() - - itmp = search_string('', tmptxt) + 1 - ishape = [] - for iatom in range(natyp): - txt = tmptxt[itmp+iatom] - if natyp>naez: #CPA option - ishape.append(int(txt.split()[1])) - else: - ishape.append(int(txt.split()[0])) - - return natyp, naez, ishape - - -def get_radii(naez, radii): - f = open(radii) - txt = f.readlines() - f.close() - results = [] - for iatom in range(naez): - # IAT Rmt0 Rout Ratio(%) dist(NN) Rout/dist(NN) (%) - # 1 0.5000001547 0.7071070000 70.71 1.0000003094 70.71 - tmpline = txt[3+iatom].split() - tmpline = [int(tmpline[0]), float(tmpline[1]), float(tmpline[2]), float(tmpline[3]), float(tmpline[4]), float(tmpline[5])] - results.append(tmpline) - return results - - -def get_fpradius(naez, atominfo): - f = open(atominfo) - txt = f.readlines() - f.close() - itmp = search_string('', txt) + 1 - results = [] - for iatom in range(naez): - #ZAT LMXC KFG FAC - # 0.00 1 3 3 0 0 1 1 1 1. 199 2.3166000 0.4696902 - tmpline = float(txt[itmp+iatom].split()[-1]) - results.append(tmpline) - return results - - -def get_alat(inpfile): - f = open(inpfile) - txt = f.readlines() - f.close() - itmp = search_string('ALATBASIS', txt) - result = float(txt[itmp].split('ALATBASIS')[1].split('=')[1].split()[0]) - return result - - -def get_radial_meshpoints(potfile): - f = open(potfile) - txt = f.readlines() - f.close() - itmp = 0 - result = [] - while itmp >= 0: - itmp = search_string('exc:', txt) - if itmp >= 0: - txt.pop(itmp)# remove header line - tmp = txt.pop(itmp+3) # extract meshpoints - result.append(float(tmp)) - return result - diff --git a/aiida_kkr/workflows/check_magnetic_state.py b/aiida_kkr/workflows/check_magnetic_state.py index b1dcb1f0..498120ea 100644 --- a/aiida_kkr/workflows/check_magnetic_state.py +++ b/aiida_kkr/workflows/check_magnetic_state.py @@ -7,13 +7,12 @@ from aiida.orm import Code, DataFactory from aiida.work.workchain import WorkChain, while_, if_, ToContext -from aiida.work.run import submit, run +from aiida.work.launch import submit, run from aiida.work import workfunction as wf -from aiida.work.process_registry import ProcessRegistry from aiida.common.datastructures import calc_states from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.calculations.voro import VoronoiCalculation -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams __copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " diff --git a/aiida_kkr/workflows/check_para_convergence.py b/aiida_kkr/workflows/check_para_convergence.py index eb819dee..acb5c2f0 100644 --- a/aiida_kkr/workflows/check_para_convergence.py +++ b/aiida_kkr/workflows/check_para_convergence.py @@ -9,11 +9,10 @@ from aiida.work.workchain import WorkChain, while_, if_, ToContext from aiida.work.run import submit, run from aiida.work import workfunction as wf -from aiida.work.process_registry import ProcessRegistry from aiida.common.datastructures import calc_states from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.calculations.voro import VoronoiCalculation -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams __copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " @@ -82,4 +81,4 @@ def define(cls, spec): #spec.dynamic_output() - \ No newline at end of file + diff --git a/aiida_kkr/workflows/dos.py b/aiida_kkr/workflows/dos.py index b1053f37..0782fc14 100644 --- a/aiida_kkr/workflows/dos.py +++ b/aiida_kkr/workflows/dos.py @@ -13,10 +13,9 @@ from aiida.orm import Code, DataFactory, load_node from aiida.work.workchain import WorkChain, if_, ToContext -from aiida.work.run import submit +from aiida.work.launch import submit from aiida.work import workfunction as wf -from aiida.work.process_registry import ProcessRegistry -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams from aiida_kkr.tools.common_workfunctions import test_and_get_codenode, get_parent_paranode, update_params_wf, get_inputs_kkr from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.calculations.voro import VoronoiCalculation @@ -109,9 +108,8 @@ def start(self): """ init context and some parameters """ - self.report('INFO: started KKR dos workflow version {}\n' - 'INFO: Workchain node identifiers: {}' - ''.format(self._workflowversion, ProcessRegistry().current_calc_node)) + self.report('INFO: started KKR dos workflow version {}' + ''.format(self._workflowversion)) ####### init ####### @@ -136,8 +134,8 @@ def start(self): self.ctx.dos_params_dict = wf_dict.get('dos_params', self._wf_default['dos_params']) self.ctx.dos_kkrparams = None # is set in set_params_dos - self.ctx.description_wf = self.inputs.get('_description', self._wf_description) - self.ctx.label_wf = self.inputs.get('_label', self._wf_label) + self.ctx.description_wf = self.inputs.get('description', self._wf_description) + self.ctx.label_wf = self.inputs.get('label', self._wf_label) self.report('INFO: use the following parameter:\n' 'use_mpi: {}\n' @@ -288,7 +286,7 @@ def get_dos(self): label = 'KKR DOS calc.' dosdict = self.ctx.dos_params_dict - description = 'dos calculation using the following parameter set. emin= {}, emax= {}, nepts= {}, tempr={}, kmesh={}'.format(dosdict['emin'], dosdict['emax'], dosdict['nepts'], dosdict['tempr'], dosdict['kmesh']) + description = 'dos calc: emin= {}, emax= {}, nepts= {}, tempr={}, kmesh={}'.format(dosdict['emin'], dosdict['emax'], dosdict['nepts'], dosdict['tempr'], dosdict['kmesh']) code = self.inputs.kkr remote = self.inputs.remote_data params = self.ctx.dos_kkrparams @@ -301,7 +299,7 @@ def get_dos(self): # run the DOS calculation self.report('INFO: doing calculation') - dosrun = submit(KkrProcess, **inputs) + dosrun = self.submit(KkrProcess, **inputs) return ToContext(dosrun=dosrun) @@ -386,8 +384,8 @@ def parse_dosfiles(dospath): """ parse dos files to XyData nodes """ - from aiida_kkr.tools.common_functions import interpolate_dos - from aiida_kkr.tools.common_functions import get_Ry2eV + from masci_tools.io.common_functions import interpolate_dos + from masci_tools.io.common_functions import get_Ry2eV from aiida.orm import DataFactory XyData = DataFactory('array.xy') @@ -447,7 +445,7 @@ def create_dos_result_node(outputnode, dos_retrieved): dos_extracted = False outdict = {} - outdict['results_wf'] = outputnode.copy() + outdict['results_wf'] = outputnode if dos_extracted: outdict['dos_data'] = dosXyDatas[0] outdict['dos_data_interpol'] = dosXyDatas[1] @@ -461,5 +459,5 @@ def create_dos_result_node_minimal(outputnode): minimal if dosrun unsuccesful """ outdict = {} - outdict['results_wf'] = outputnode.copy() + outdict['results_wf'] = outputnode return outdict diff --git a/aiida_kkr/workflows/gf_writeout.py b/aiida_kkr/workflows/gf_writeout.py new file mode 100644 index 00000000..e9ddc5df --- /dev/null +++ b/aiida_kkr/workflows/gf_writeout.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +""" +In this module you find the base workflow for writing out the kkr_flexfiles and +some helper methods to do so with AiiDA +""" + + +from aiida.orm import Code, DataFactory, load_node +from aiida.work.workchain import WorkChain, ToContext, if_ +from masci_tools.io.kkr_params import kkrparams +from aiida_kkr.tools.common_workfunctions import test_and_get_codenode, get_parent_paranode, update_params_wf, get_inputs_kkr +from aiida_kkr.calculations.kkr import KkrCalculation +from aiida.orm.calculation.job import JobCalculation +from aiida.common.datastructures import calc_states +from aiida.orm import WorkCalculation +from aiida.common.exceptions import InputValidationError + + + +__copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " + "IAS-1/PGI-1, Germany. All rights reserved.") +__license__ = "MIT license, see LICENSE.txt file" +__version__ = "0.1" +__contributors__ = u"Fabian Bertoldo" + + +RemoteData = DataFactory('remote') +StructureData = DataFactory('structure') +ParameterData = DataFactory('parameter') +FolderData = DataFactory('folder') +KkrProcess = KkrCalculation.process() + + +class kkr_flex_wc(WorkChain): + """ + Workchain of a kkr_flex calculation to calculate the Green function with + KKR starting from the RemoteData node of a previous calculation (either Voronoi or KKR). + + :param options_parameters: (ParameterData), Workchain specifications + :param remote_data: (RemoteData), mandatory; from a converged KKR calculation + :param kkr: (Code), mandatory; KKR code running the flexfile writeout + :param imp_info: ParameterData, mandatory: imp_info node specifying information + of the impurities in the system + + :return result_kkr_flex_wc: (ParameterData), Information of workflow results + like success, last result node, list with convergence behavior + """ + + _workflowversion = __version__ + _wf_label = 'kkr_flex_wc' + _wf_description = 'Workflow for a KKR flex calculation starting from RemoteData node of previous converged KKR calculation' + + + _options_default = {'queue_name' : '', # Queue name to submit jobs too + 'resources': {"num_machines": 1}, # resources to allowcate for the job + 'walltime_sec' : 60*60, # walltime after which the job gets killed (gets parsed to KKR)} + 'custom_scheduler_commands' : '', # some additional scheduler commands + 'use_mpi' : False} # execute KKR with mpi or without + + @classmethod + def get_wf_defaults(self): + """ + Print and return _wf_defaults dictionary. Can be used to easily create set of wf_parameters. + returns _wf_defaults + """ + + print('Version of workflow: {}'.format(self._workflowversion)) + return self._options_default + + @classmethod + def define(cls, spec): + """ + Defines the outline of the workflow + """ + + # Take input of the workflow or use defaults defined above + super(kkr_flex_wc, cls).define(spec) + + spec.input("kkr", valid_type=Code, required=True) + spec.input("options_parameters", valid_type=ParameterData, required=False, + default=ParameterData(dict=cls._options_default)) + spec.input("remote_data", valid_type=RemoteData, required=True) + spec.input("imp_info", valid_type=ParameterData, required=True) + + # Here the structure of the workflow is defined + spec.outline( + cls.start, + if_(cls.validate_input)( + cls.set_params_flex, + cls.get_flex), # calculate host GF and kkr-flexfiles + cls.return_results) + + # ToDo: improve error codes + spec.exit_code(101, 'ERROR_INVALID_INPUT_IMP_INFO', + message="ERROR: the 'imp_info' input ParameterData node could not be used") + spec.exit_code(102, 'ERROR_INVALID_INPUT_KKR', + message="ERROR: the code you provided for kkr does not use the plugin kkr.kkr") + spec.exit_code(103, 'ERROR_INVALID_INPUT_REMOTE_DATA', + message="ERROR: No remote_data was provided as Input") + + # specify the outputs + #spec.output('remote_folder', valid_type=RemoteData) + spec.output('calculation_info', valid_type=ParameterData) + spec.output('GF_host_remote', valid_type=RemoteData) + + + + def start(self): + """ + init context and some parameters + """ + + self.report('INFO: started KKR flex workflow version {}' + ''.format(self._workflowversion)) + + ####### init ####### + # internal para / control para + self.ctx.abort = False + + # input both wf and options parameters + options_dict = self.inputs.options_parameters.get_dict() + + if options_dict == {}: + options_dict = self._options_default + self.report('INFO: using default options parameters') + + # set values, or defaults + # ToDo: arrange option assignment differently (look at scf.py from aiida-fleur) + self.ctx.use_mpi = options_dict.get('use_mpi', self._options_default['use_mpi']) + self.ctx.resources = options_dict.get('resources', self._options_default['resources']) + self.ctx.walltime_sec = options_dict.get('walltime_sec', self._options_default['walltime_sec']) + self.ctx.queue = options_dict.get('queue_name', self._options_default['queue_name']) + self.ctx.custom_scheduler_commands = options_dict.get('custom_scheduler_commands', self._options_default['custom_scheduler_commands']) + + self.ctx.description_wf = self.inputs.get('description', self._wf_description) + self.ctx.label_wf = self.inputs.get('label', self._wf_label) + + + self.report('INFO: use the following parameter:\n' + 'use_mpi: {}\n' + 'Resources: {}\n' + 'Walltime (s): {}\n' + 'queue name: {}\n' + 'scheduler command: {}\n' + 'description: {}\n' + 'label: {}\n'.format(self.ctx.use_mpi, self.ctx.resources, self.ctx.walltime_sec, + self.ctx.queue, self.ctx.custom_scheduler_commands, + self.ctx.description_wf, self.ctx.label_wf)) + + # return para/vars + self.ctx.successful = True + self.ctx.errors = [] + self.ctx.formula = '' + + + + def validate_input(self): + """ + Validate input + """ + + inputs = self.inputs + input_ok = True + + if not 'imp_info' in inputs: + input_ok = False + return self.exit_codes.ERROR_INVALID_INPUT_IMP_INFO + + if 'remote_data' in inputs: + input_ok = True + else: + input_ok = False + return self.exit_codes.ERROR_INVALID_REMOTE_DATA + + # extract correct remote folder of last calculation if input remote_folder node + # is not from KKRCalculation but kkr_scf_wc workflow + input_remote = self.inputs.remote_data + # check if input_remote has single KKRCalculation parent + parents = input_remote.get_inputs(node_type=JobCalculation) + nparents = len(parents) + if nparents!=1: + # extract parent workflow and get uuid of last calc from output node + parent_workflow = input_remote.inp.last_RemoteData + if not isinstance(parent_workflow, WorkCalculation): + raise InputValidationError("Input remote_data node neither output of a KKR calculation nor of kkr_scf_wc workflow") + parent_workflow_out = parent_workflow.out.output_kkr_scf_wc_ParameterResults + uuid_last_calc = parent_workflow_out.get_dict().get('last_calc_nodeinfo').get('uuid') + last_calc = load_node(uuid_last_calc) + if not isinstance(last_calc, KkrCalculation): + raise InputValidationError("Extracted last_calc node not of type KkrCalculation: check remote_data input node") + # overwrite remote_data node with extracted remote folder + output_remote = last_calc.out.remote_folder + self.inputs.remote_data = output_remote + + if 'kkr' in inputs: + try: + test_and_get_codenode(inputs.kkr, 'kkr.kkr', use_exceptions=True) + except ValueError: + error = ("The code you provided for kkr does not " + "use the plugin kkr.kkr") + self.ctx.errors.append(error) + input_ok = False + return self.exit_codes.ERROR_INVALID_INPUT_KKR + + # set self.ctx.input_params_KKR + self.ctx.input_params_KKR = get_parent_paranode(self.inputs.remote_data) + + if input_ok: + self.report('INFO: checking inputs successful') + + return input_ok + + + + def set_params_flex(self): + """ + Take input parameter node and change to input from wf_parameter and options + """ + + self.report('INFO: setting parameters ...') + + params = self.ctx.input_params_KKR + input_dict = params.get_dict() + para_check = kkrparams() + + # step 1: try to fill keywords + try: + for key, val in input_dict.iteritems(): + para_check.set_value(key, val, silent=True) + except: + error = 'ERROR: calc_parameters given are not consistent! Hint: did you give an unknown keyword?' + self.ctx.errors.append(error) + self.control_end_wc(error) + + # step 2: check if all mandatory keys are there + label = '' + descr = '' + missing_list = para_check.get_missing_keys(use_aiida=True) + if missing_list != []: + kkrdefaults = kkrparams.get_KKRcalc_parameter_defaults()[0] + kkrdefaults_updated = [] + for key_default, val_default in kkrdefaults.items(): + if key_default in missing_list: + para_check.set_value(key_default, kkrdefaults.get(key_default), silent=True) + kkrdefaults_updated.append(key_default) + missing_list.remove(key_default) + if len(missing_list)>0: + error = 'ERROR: calc_parameters misses keys: {}'.format(missing_list) + self.ctx.errors.append(error) + self.control_end_wc(error) + else: + self.report('updated KKR parameter node with default values: {}'.format(kkrdefaults_updated)) + label = 'add_defaults_' + descr = 'added missing default keys, ' + + runopt = para_check.get_dict().get('RUNOPT', []) + #self.report(para_check.get_dict()) + if runopt == None: + runopt = [] + runopt = [i.strip() for i in runopt] + if 'KKRFLEX' not in runopt: + runopt.append('KKRFLEX') + + self.report('INFO: RUNOPT set to: {}'.format(runopt)) + para_check = update_params_wf(self.ctx.input_params_KKR, ParameterData(dict={'RUNOPT':runopt})) + + #construct the final param node containing all of the params + updatenode = ParameterData(dict=para_check.get_dict()) + updatenode.label = label+'KKRparam_flex' + updatenode.description = descr+'KKR parameter node extracted from parent parameters and wf_parameter and options input node.' + paranode_flex = update_params_wf(self.ctx.input_params_KKR, updatenode) + self.ctx.flex_kkrparams = paranode_flex + self.ctx.flex_runopt = runopt + + + + def get_flex(self): + """ + Submit a KKRFLEX calculation + """ + + label = 'KKRFLEX calc.' + description = 'KKRFLEX calculation to write out host GF' + code = self.inputs.kkr + remote = self.inputs.remote_data + params = self.ctx.flex_kkrparams + imp_info = self.inputs.imp_info + options = {"max_wallclock_seconds": self.ctx.walltime_sec, + "resources": self.ctx.resources, + "queue_name": self.ctx.queue} + if self.ctx.custom_scheduler_commands: + options["custom_scheduler_commands"] = self.ctx.custom_scheduler_commands + inputs = get_inputs_kkr(code, remote, options, label, description, parameters=params, serial=(not self.ctx.use_mpi), imp_info=imp_info) + + # run the KKRFLEX calculation + self.report('INFO: doing calculation') + flexrun = self.submit(KkrProcess, **inputs) + + return ToContext(flexrun=flexrun) + + + def return_results(self): + """ + Return the results of the KKRFLEX calculation. + This should run through and produce output nodes even if everything failed, + therefore it only uses results from context. + """ + + # capture error of unsuccessful flexrun + calc_state = self.ctx.flexrun.get_state() + if calc_state != calc_states.FINISHED: + self.ctx.successful = False + error = ('ERROR: KKRFLEX calculation failed somehow it is ' + 'in state{}'.format(calc_state)) + self.ctx.errors.append(error) + + # create dict to store results of workflow output + outputnode_dict = {} + outputnode_dict['workflow_name'] = self.__class__.__name__ + outputnode_dict['workflow_version'] = self._workflowversion + outputnode_dict['use_mpi'] = self.ctx.use_mpi + outputnode_dict['resources'] = self.ctx.resources + outputnode_dict['walltime_sec'] = self.ctx.walltime_sec + outputnode_dict['queue'] = self.ctx.queue + outputnode_dict['custom_scheduler_commands'] = self.ctx.custom_scheduler_commands + outputnode_dict['successful'] = self.ctx.successful + outputnode_dict['pk_flexcalc'] = self.ctx.flexrun.pk + outputnode_dict['list_of_errors'] = self.ctx.errors + + outputnode = ParameterData(dict=outputnode_dict) + outputnode.label = 'kkr_flex_wc_results' + outputnode.description = '' + outputnode.store() + + # return the input remote_data folder as output node + #self.out('remote_data', self.inputs.remote_data) + # return ParameterData node containing information about previous calculation + self.out('calculation_info', outputnode) + # return retrieved data from kkrflex calculation + self.out('GF_host_remote', self.ctx.flexrun.out.remote_folder) + + self.report('INFO: created GF writeout result nodes') + +# self.report("INFO: create GF writeout results nodes: outputnode={}".format(outputnode)) +# try: +# self.report("INFO: create GF writeout results nodes. KKRFLEX calc retrieved node={}".format(self.ctx.flexrun.out.retrieved)) +# has_flexrun = True +# except AttributeError as e: +# self.report("ERROR: no KKRFLEX calc retrieved node found") +# self.report("Caught AttributeError {}".format(e)) +# has_flexrun = False + + #for link_name, node in outdict.iteritems(): + #self.report("INFO: storing node '{}' with link name '{}'".format(node, link_name)) + #self.report("INFO: node type: {}".format(type(node))) + #self.out(link_name, node) + + self.report("INFO: done with KKRFLEX GF writeout workflow!\n") +# self.report("Successful run: {}".format(has_flexrun)) + + + def control_end_wc(self, errormsg): + """ + Controled way to shutdown the workchain. will initalize the output nodes + """ + self.report('ERROR: shutting workchain down in a controlled way.\n') + self.ctx.successful = False + self.ctx.abort = True + self.report(errormsg) + self.return_results() + #self.abort(errormsg) diff --git a/aiida_kkr/workflows/kkr_imp.py b/aiida_kkr/workflows/kkr_imp.py new file mode 100644 index 00000000..3d56deba --- /dev/null +++ b/aiida_kkr/workflows/kkr_imp.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- +""" +In this module you find the total workflow for a kkr impurity calculation +and some helper methods to do so with AiiDA +""" + +from aiida.orm import Code, DataFactory, load_node +from aiida.work.workchain import WorkChain, ToContext, if_ +from aiida_kkr.calculations.voro import VoronoiCalculation +from masci_tools.io.kkr_params import kkrparams +from aiida_kkr.tools.common_workfunctions import test_and_get_codenode, neworder_potential_wf +from aiida_kkr.workflows.gf_writeout import kkr_flex_wc +from aiida_kkr.workflows.voro_start import kkr_startpot_wc +from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc +import numpy as np + +__copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " + "IAS-1/PGI-1, Germany. All rights reserved.") +__license__ = "MIT license, see LICENSE.txt file" +__version__ = "0.2" +__contributors__ = u"Fabian Bertoldo" +#TODO: generalize workflow to multiple impurities +#TODO: add additional checks for the input +#TODO: maybe work on a clearer outputnode structure + +RemoteData = DataFactory('remote') +StructureData = DataFactory('structure') +ParameterData = DataFactory('parameter') +SinglefileData = DataFactory('singlefile') +FolderData = DataFactory('folder') + + + +class kkr_imp_wc(WorkChain): + """ + Workchain of a kkrimp calculation starting either from scratch (with a structure + and impurity_info node), or with a converged host potential and impurity + startpotentials, ... to calculate the converged host-impurity potential of the system. + + :param options_parameters: (ParameterData), Workchain specifications + :param wf_parameters: (ParameterData), specifications for the kkr impurity workflow + :param voro_aux_parameters: (ParameterData), specification for the auxiliary voronoi calculation for the impurity + :param kkrimpcode: (Code), mandatory: KKRimp code converging the host-imp-potential + :param kkrcode: (Code), mandatory: KKR code for calculation the host potential + :param vorocode: (Code), mandatory: Voronoi code to generate the impurity startpot + :param GF_remote_data: (RemoteData): remote folder of a previous kkrflex calculation containing the flexfiles ... + + :return result_kkr_imp_wc: (ParameterData), Information of workflow results + """ + + + _workflowversion = __version__ + _wf_label = 'kkr_imp_wc' + _wf_description = 'Workflow for a KKRimp calculation' + + + _options_default = {'queue_name' : '', # Queue name to submit jobs too + 'resources': {"num_machines": 1}, # resources to allowcate for the job + 'walltime_sec' : 60*60, # walltime after which the job gets killed (gets parsed to KKR)} + 'custom_scheduler_commands' : '', # some additional scheduler commands + 'use_mpi' : False} # execute KKR with mpi or without + + _wf_default = {'nspin': 1, # non-magnetic calculation, set nspin = 2 for magnetic case + 'kkr_runmax': 3, # Maximum number of kkr jobs/starts (defauld iterations per start) + 'threshold_aggressive_mixing': 5*10**-2, # threshold after which agressive mixing is used + 'convergence_criterion' : 3*10**-2, # Stop if charge denisty is converged below this value + 'mixreduce': 0.5, # reduce mixing factor by this factor if calculaito fails due to too large mixing + 'strmix': 0.03, # mixing factor of simple mixing + 'aggressive_mix': 3, # type of aggressive mixing (3: broyden's 1st, 4: broyden's 2nd, 5: generalized anderson) + 'aggrmix': 0.01, # mixing factor of aggressive mixing + 'nsteps': 20, # number of iterations done per KKR calculation + 'non_spherical': 1, # use non-spherical parts of the potential (0 if you don't want that) + 'broyden_number': 20, # number of potentials to 'remember' for Broyden's mixing + 'born_iter': 2, # number of Born iterations for the non-spherical calculation + 'mag_init' : False, # initialize and converge magnetic calculation + 'hfield' : [0.1, 10], # Ry # external magnetic field used in initialization step + 'init_pos' : None, # position in unit cell where magnetic field is applied [default (None) means apply to all] + 'r_cls' : 1.3, # alat # default cluster radius, is increased iteratively + 'calc_orbmom' : False, # defines of orbital moments will be calculated and written out + 'spinorbit' : False, # SOC calculation (True/False) + 'newsol' : False } # new SOC solver is applied + + _voro_aux_default = {'dos_params' : {'nepts': 61, # DOS params: number of points in contour + 'tempr': 200, # K # DOS params: temperature + 'emin': -1, # Ry # DOS params: start of energy contour + 'emax': 1, # Ry # DOS params: end of energy contour + 'kmesh': [50, 50, 50]}, # DOS params: kmesh for DOS calculation (typically higher than in scf contour) + 'num_rerun' : 4, # number of times voronoi+starting dos+checks is rerun to ensure non-negative DOS etc + 'fac_cls_increase' : 1.3, # alat # factor by which the screening cluster is increased each iteration (up to num_rerun times) + 'r_cls' : 1.3, # alat # default cluster radius, is increased iteratively + 'natom_in_cls_min' : 79, # minimum number of atoms in screening cluster + 'delta_e_min' : 1., # eV # minimal distance in DOS contour to emin and emax in eV + 'threshold_dos_zero' : 10**-3, #states/eV + 'check_dos': True, # logical to determine if DOS is computed and checked + 'delta_e_min_core_states' : 1.0, # Ry # minimal distance of start of energy contour to highest lying core state in Ry + 'lmax': 3, + 'gmax': 65., + 'rmax': 7., + 'rclustz': 2.5} + + + + @classmethod + def get_wf_defaults(self): + """ + Print and return _wf_defaults dictionary. Can be used to easily create + set of wf_parameters. + + returns _wf_defaults + """ + + print('Version of workflow: {}'.format(self._workflowversion)) + return self._options_default, self._wf_default, self._voro_aux_default + + + + @classmethod + def define(cls, spec): + """ + Defines the outline of the workflow + """ + + super(kkr_imp_wc, cls).define(spec) + + # define the inputs of the workflow + spec.input("vorocode", valid_type=Code, required=True) + spec.input("kkrimpcode", valid_type=Code, required=True) + spec.input("impurity_info", valid_type=ParameterData, required=True) + spec.input("kkrcode", valid_type=Code, required=False) + spec.input("remote_converged_host", valid_type=RemoteData, required=False) + spec.input("gf_remote", valid_type=RemoteData, required=False) + spec.input("options_parameters", valid_type=ParameterData, required=False) + spec.input("voro_aux_parameters", valid_type=ParameterData, required=False) + spec.input("wf_parameters", valid_type=ParameterData, required=False) + + + # structure of the workflow + spec.outline( + cls.start, # initialize workflow + if_(cls.validate_input)( # validate the input (if true, run_gf_writeout, else skip) + cls.run_gf_writeout), # write out the host GF + cls.run_voroaux, # calculate the auxiliary impurity potentials + cls.construct_startpot, # construct the host-impurity startpotential + cls.run_kkrimp_scf, # run the kkrimp_sub workflow to converge the host-imp startpot + cls.return_results) # check if the calculation was successful and return the result nodes + + + # define the possible exit codes + spec.exit_code(141, 'ERROR_INVALID_INPUT_CODE', + message="ERROR: one or more of the codes you provided do not " + "use the necessary plugins: kkr.voro, kkr.kkr, kkr.kkrimp") + spec.exit_code(142, 'ERROR_MISSING_KKRCODE', + message="ERROR: since GF writeout step has to be conducted, " + "'kkrcode' is needed as an input") + spec.exit_code(143, 'ERROR_MISSING_REMOTE', + message="ERROR: neither converged host remote nor GF writeout " + "remote is given as an input. One of them is needed to " + "proceed with this workflow!") + + + # define the outputs of the workflow + spec.output('workflow_info', valid_type=ParameterData) + spec.output('last_calc_output_parameters', valid_type=ParameterData) + spec.output('last_calc_info', valid_type=ParameterData) + + + + def start(self): + """ + Init context and some parameters + """ + + self.report('INFO: started KKR impurity workflow version {}' + ''.format(self._workflowversion)) + + # get input parameters + if 'options_parameters' in self.inputs: + options_dict = self.inputs.options_parameters.get_dict() + else: + options_dict = self._options_default + self.report('INFO: using default options') + if 'wf_parameters' in self.inputs: + wf_dict = self.inputs.wf_parameters.get_dict() + else: + wf_dict = self._wf_default + self.report('INFO: using default workflow parameters for KKRimp scf cycle') + if 'voro_aux_parameters' in self.inputs: + voro_aux_dict = self.inputs.voro_aux_parameters.get_dict() + else: + voro_aux_dict = self._voro_aux_default + self.report('INFO: using default workflow parameters for auxiliary voronoi calculation') + + + # set option parameters from input, or defaults + self.ctx.use_mpi = options_dict.get('use_mpi', self._options_default['use_mpi']) + self.ctx.resources = options_dict.get('resources', self._options_default['resources']) + self.ctx.walltime_sec = options_dict.get('walltime_sec', self._options_default['walltime_sec']) + self.ctx.queue = options_dict.get('queue_name', self._options_default['queue_name']) + self.ctx.custom_scheduler_commands = options_dict.get('custom_scheduler_commands', self._options_default['custom_scheduler_commands']) + self.ctx.options_params_dict = ParameterData(dict={'use_mpi': self.ctx.use_mpi, 'resources': self.ctx.resources, 'walltime_sec': self.ctx.walltime_sec, + 'queue_name': self.ctx.queue, 'custom_scheduler_commands': self.ctx.custom_scheduler_commands}) + + # set label and description of the workflow + self.ctx.description_wf = self.inputs.get('description', 'Workflow for a KKR impurity calculation starting from a host-impurity potential') + self.ctx.label_wf = self.inputs.get('label', 'kkr_imp_sub_wc') + + # set parameters for the auxiliary voronoi calculation + self.ctx.voro_dos_params = voro_aux_dict.get('dos_params', self._voro_aux_default['dos_params']) + self.ctx.voro_num_rerun = voro_aux_dict.get('num_rerun', self._voro_aux_default['num_rerun']) + self.ctx.voro_fac_cls_increase = voro_aux_dict.get('fac_cls_increase', self._voro_aux_default['fac_cls_increase']) + self.ctx.voro_r_cls = voro_aux_dict.get('r_cls', self._voro_aux_default['r_cls']) + self.ctx.voro_natom_in_cls_min = voro_aux_dict.get('natom_in_cls_min', self._voro_aux_default['natom_in_cls_min']) + self.ctx.voro_delta_e_min = voro_aux_dict.get('delta_e_min', self._voro_aux_default['delta_e_min']) + self.ctx.voro_threshold_dos_zero = voro_aux_dict.get('threshold_dos_zero', self._voro_aux_default['threshold_dos_zero']) + self.ctx.voro_check_dos = voro_aux_dict.get('check_dos', self._voro_aux_default['check_dos']) + self.ctx.voro_delta_e_min_core_states = voro_aux_dict.get('delta_e_min_core_states', self._voro_aux_default['delta_e_min_core_states']) + self.ctx.voro_lmax = voro_aux_dict.get('lmax', self._voro_aux_default['lmax']) + self.ctx.voro_gmax = voro_aux_dict.get('gmax', self._voro_aux_default['gmax']) + self.ctx.voro_rmax = voro_aux_dict.get('rmax', self._voro_aux_default['rmax']) + self.ctx.voro_rclustz = voro_aux_dict.get('rclustz', self._voro_aux_default['rclustz']) + # set up new parameter dict to pass to voronoi subworkflow later + self.ctx.voro_params_dict = ParameterData(dict={'queue_name': self.ctx.queue, 'resources': self.ctx.resources, 'walltime_sec': self.ctx.walltime_sec, + 'use_mpi': self.ctx.use_mpi, 'custom_scheduler_commands': self.ctx.custom_scheduler_commands, + 'dos_params': self.ctx.voro_dos_params, 'num_rerun': self.ctx.voro_num_rerun, + 'fac_cls_increase': self.ctx.voro_fac_cls_increase, 'r_cls': self.ctx.voro_r_cls, + 'natom_in_cls_min': self.ctx.voro_natom_in_cls_min, 'delta_e_min': self.ctx.voro_delta_e_min, + 'threshold_dos_zero': self.ctx.voro_threshold_dos_zero, 'check_dos': self.ctx.voro_check_dos, + 'delta_e_min_core_states': self.ctx.voro_delta_e_min_core_states}) + + # set workflow parameters for the KKR impurity calculation + self.ctx.nspin = wf_dict.get('nspin', self._wf_default['nspin']) + self.ctx.nsteps = wf_dict.get('nsteps', self._wf_default['nsteps']) + self.ctx.kkr_runmax = wf_dict.get('kkr_runmax', self._wf_default['kkr_runmax']) + self.ctx.threshold_aggressive_mixing = wf_dict.get('threshold_aggressive_mixing', self._wf_default['threshold_aggressive_mixing']) + self.ctx.convergence_criterion = wf_dict.get('convergence_criterion', self._wf_default['convergence_criterion']) + self.ctx.mixreduce = wf_dict.get('mixreduce', self._wf_default['mixreduce']) + self.ctx.strmix = wf_dict.get('strmix', self._wf_default['strmix']) + self.ctx.aggressive_mix = wf_dict.get('aggressive_mix', self._wf_default['aggressive_mix']) + self.ctx.aggrmix = wf_dict.get('aggrmix', self._wf_default['aggrmix']) + self.ctx.non_spherical = wf_dict.get('non_spherical', self._wf_default['non_spherical']) + self.ctx.broyden_number = wf_dict.get('broyden_number', self._wf_default['broyden_number']) + self.ctx.born_iter = wf_dict.get('born_iter', self._wf_default['born_iter']) + self.ctx.mag_init = wf_dict.get('mag_init', self._wf_default['mag_init']) + self.ctx.hfield = wf_dict.get('hfield', self._wf_default['hfield']) + self.ctx.init_pos = wf_dict.get('init_pos', self._wf_default['init_pos']) + self.ctx.r_cls = wf_dict.get('r_cls', self._wf_default['r_cls']) + self.ctx.calc_orbmom = wf_dict.get('calc_orbmom', self._wf_default['calc_orbmom']) + self.ctx.spinorbit = wf_dict.get('spinorbit', self._wf_default['spinorbit']) + self.ctx.newsol = wf_dict.get('newsol', self._wf_default['newsol']) + # set up new parameter dict to pass to kkrimp subworkflow later + self.ctx.kkrimp_params_dict = ParameterData(dict={'nspin': self.ctx.nspin, 'nsteps': self.ctx.nsteps, 'kkr_runmax': self.ctx.kkr_runmax, + 'threshold_aggressive_mixing': self.ctx.threshold_aggressive_mixing, + 'convergence_criterion': self.ctx.convergence_criterion, 'mixreduce': self.ctx.mixreduce, + 'strmix': self.ctx.strmix, 'aggressive_mix': self.ctx.aggressive_mix, + 'aggrmix': self.ctx.aggrmix, 'non_spherical': self.ctx.non_spherical, + 'broyden_number': self.ctx.broyden_number, 'born_iter': self.ctx.born_iter, + 'mag_init': self.ctx.mag_init, 'hfield': self.ctx.hfield, 'init_pos': self.ctx.init_pos, + 'r_cls': self.ctx.r_cls, 'calc_orbmom': self.ctx.calc_orbmom, + 'spinorbit': self.ctx.spinorbit, 'newsol': self.ctx.newsol}) + + + # report the chosen parameters to the user + self.report('INFO: use the following parameter:\n' + '\nGeneral settings\n' + 'use mpi: {}\n' + 'resources: {}\n' + 'walltime (s): {}\n' + 'queue name: {}\n' + 'scheduler command: {}\n' + 'description: {}\n' + 'label: {}\n' + 'nspin: {}\n' + 'parameters for the voroaux calculation: {}\n' + 'parameters for the kkrimp scf: {}\n' + ''.format(self.ctx.use_mpi, self.ctx.resources, self.ctx.walltime_sec, + self.ctx.queue, self.ctx.custom_scheduler_commands, + self.ctx.description_wf, self.ctx.label_wf, + self.ctx.nspin, self.ctx.voro_params_dict.get_attrs(), + self.ctx.kkrimp_params_dict.get_attrs())) + + + + def validate_input(self): + """ + Validate the input and catch possible errors from the input + """ + + inputs = self.inputs + inputs_ok = True + + if 'kkrimpcode' and 'vorocode' in inputs: + try: + test_and_get_codenode(inputs.kkrimpcode, 'kkr.kkrimp', use_exceptions=True) + test_and_get_codenode(inputs.vorocode, 'kkr.voro', use_exceptions=True) + except ValueError: + inputs_ok = False + self.report(self.exit_codes.ERROR_INVALID_INPUT_CODE) + return self.exit_codes.ERROR_INVALID_INPUT_CODE + elif 'kkrcode' in inputs: + try: + test_and_get_codenode(inputs.kkrcode, 'kkr.kkr', use_exceptions=True) + except ValueError: + inputs_ok = False + self.report(self.exit_codes.ERROR_INVALID_INPUT_CODE) + return self.exit_codes.ERROR_INVALID_INPUT_CODE + + if 'impurity_info' in inputs: + self.report('INFO: found the following impurity info node in input: {}'.format(inputs.impurity_info.get_attrs())) + + if 'gf_remote' in inputs and 'remote_converged_host' in inputs: + self.report('INFO: both converged host remote (pid: {}) and GF writeout remote (pid: {}) found in input. ' + 'Converged host remote will not be used. Skip GF writeout step and ' + 'start workflow with auxiliary voronoi calculations.' .format(inputs.remote_converged_host.pk, inputs.gf_remote.pk)) + do_gf_calc = False + elif 'remote_converged_host' in inputs: + self.report('INFO: found converged host remote (pid: {}) in input. ' + 'Start workflow by calculating the host GF.'.format(inputs.remote_converged_host.pk)) + if 'kkrcode' in inputs: + do_gf_calc = True + else: + inputs_ok = False + self.report(self.exit_codes.ERROR_MISSING_KKRCODE) + return self.exit_codes.ERROR_MISSING_KKRCODE + elif 'gf_remote' in inputs: + self.report('INFO: found remote_data node (pid: {}) from previous KKRFLEX calculation (pid: {}) in input. ' + 'Skip GF writeout step and start workflow by auxiliary voronoi calculations.' + .format(inputs.gf_remote.pk, inputs.gf_remote.inp.remote_folder.pk)) + do_gf_calc = False + else: + inputs_ok = False + self.report(self.exit_codes.ERROR_MISSING_REMOTE) + return self.exit_codes.ERROR_MISSING_REMOTE + + self.ctx.do_gf_calc = do_gf_calc + self.report('INFO: validated input successfully: {}. Do GF writeout calc: {}.'.format(inputs_ok, self.ctx.do_gf_calc)) + + return do_gf_calc + + + + def run_gf_writeout(self): + """ + Run the gf_writeout workflow to calculate the host Green's function and the + KKR flexfiles using the converged host remote folder and the impurity info node + """ + + # collect inputs + kkrcode = self.inputs.kkrcode + imp_info = self.inputs.impurity_info + converged_host_remote = self.inputs.remote_converged_host + options = self.ctx.options_params_dict + + # set label and description of the calc + sub_label = 'GF writeout (conv. host pid: {}, imp_info pid: {})'.format(converged_host_remote.pk, imp_info.pk) + sub_description = 'GF writeout sub workflow for kkrimp_wc using converged host remote data (pid: {}) and impurity_info node (pid: {})'.format(converged_host_remote.pk, imp_info.pk) + + future = self.submit(kkr_flex_wc, label=sub_label, description=sub_description, kkr=kkrcode, options_parameters=options, + remote_data=converged_host_remote, imp_info=imp_info) + + self.report('INFO: running GF writeout (pid: {})'.format(future.pk)) + + return ToContext(gf_writeout=future, last_calc_gf=future) + + + + def run_voroaux(self): + """ + Perform a voronoi calculation for every impurity charge using the structure + from the converged KKR host calculation + """ + # TODO: generalize to multiple impurities + + # collect inputs + vorocode = self.inputs.vorocode + kkrcode = self.inputs.kkrcode + imp_info = self.inputs.impurity_info + voro_params = self.ctx.voro_params_dict + if self.ctx.do_gf_calc: + self.report('INFO: get converged host remote from inputs to extract structure for Voronoi calculation') + converged_host_remote = self.inputs.remote_converged_host + else: + self.report('INFO: get converged host remote from GF_host_calc and graph to extract structure for Voronoi calculation') + GF_host_calc_pk = self.inputs.gf_remote.inp.remote_folder.pk + GF_host_calc = load_node(GF_host_calc_pk) + converged_host_remote = GF_host_calc.inp.parent_calc_folder + calc_params = ParameterData(dict=kkrparams(NSPIN=self.ctx.nspin, LMAX=self.ctx.voro_lmax, GMAX=self.ctx.voro_gmax, + RMAX=self.ctx.voro_rmax, RCLUSTZ=self.ctx.voro_rclustz).get_dict()) + structure_host, voro_calc = VoronoiCalculation.find_parent_structure(converged_host_remote) + + # for every impurity, generate a structure and launch the voronoi workflow + # to get the auxiliary impurity startpotentials + self.ctx.voro_calcs = {} +# for i in range(2): + inter_struc = change_struc_imp_aux_wf(structure_host, imp_info) + sub_label = 'voroaux calc for Zimp: {} in host-struc'.format(imp_info.get_attr('Zimp')[0]) + sub_description = 'Auxiliary voronoi calculation for an impurity with charge ' + sub_description += '{} in the host structure from pid: {}'.format(imp_info.get_attr('Zimp')[0], converged_host_remote.pk) + + future = self.submit(kkr_startpot_wc, label=sub_label, description=sub_description, structure=inter_struc, + voronoi=vorocode, kkr=kkrcode, wf_parameters=voro_params, calc_parameters=calc_params) + + tmp_calcname = 'voro_aux_{}'.format(1) + self.ctx.voro_calcs[tmp_calcname] = future + self.report('INFO: running voro aux (Zimp= {}, pid: {})'.format(imp_info.get_attr('Zimp')[0], future.pk)) + + return ToContext(last_voro_calc=future) + + + + def construct_startpot(self): + """ + Take the output of GF writeout and the converged host potential as well as the + auxiliary startpotentials for the impurity to construct the startpotential for the + KKR impurity sub workflow + """ + + nspin = self.ctx.nspin + + # collect all nodes necessary to construct the startpotential + if self.ctx.do_gf_calc: + GF_host_calc_pk = self.ctx.gf_writeout.out.calculation_info.get_attr('pk_flexcalc') + self.report('GF_host_calc_pk: {}'.format(GF_host_calc_pk)) + GF_host_calc = load_node(GF_host_calc_pk) + converged_host_remote = self.inputs.remote_converged_host + else: + GF_host_calc_pk = self.inputs.gf_remote.inp.remote_folder.pk + self.report('GF_host_calc_pk: {}'.format(GF_host_calc_pk)) + GF_host_calc = load_node(GF_host_calc_pk) + converged_host_remote = GF_host_calc.inp.parent_calc_folder + voro_calc_remote = self.ctx.last_voro_calc.out.last_voronoi_remote + imp_info = self.inputs.impurity_info + + # prepare settings dict + potname_converged = 'potential' + potname_impvorostart = 'output.pot' + potname_imp = 'potential_imp' + if nspin < 2: + replacelist_pot2 = [[0,0]] + else: + replacelist_pot2 = [[0,0],[1,1]] + neworder_pot1 = [int(i) for i in np.loadtxt(GF_host_calc.out.retrieved.get_abs_path('scoef'), skiprows=1)[:,3]-1] + + settings_label = 'startpot_KKRimp for imp_info node {}'.format(imp_info.pk) + settings_description = 'starting potential for impurity info: {}'.format(imp_info) + settings = ParameterData(dict={'pot1': potname_converged, 'out_pot': potname_imp, 'neworder': neworder_pot1, + 'pot2': potname_impvorostart, 'replace_newpos': replacelist_pot2, 'label': settings_label, + 'description': settings_description}) + startpot_kkrimp = neworder_potential_wf(settings_node=settings, parent_calc_folder=converged_host_remote, + parent_calc_folder2=voro_calc_remote) + + # add starting potential for kkrimp calculation to context + self.ctx.startpot_kkrimp = startpot_kkrimp + + self.report('INFO: created startpotential (pid: {}) for the impurity calculation ' + 'by using information of the GF host calculation (pid: {}), the potential of the ' + 'converged host system (remote pid: {}) and the potential of the auxiliary voronoi ' + 'calculation (remote pid: {})'.format(startpot_kkrimp.pk, GF_host_calc_pk, converged_host_remote.pk, self.ctx.last_voro_calc.pk)) + + + + def run_kkrimp_scf(self): + """ + Uses both the previously generated host-impurity startpotential and the output from + the GF writeout workflow as inputs to run the kkrimp_sub workflow in order to + converge the host-impurity potential + """ + + # collect all necessary input nodes + kkrimpcode = self.inputs.kkrimpcode + startpot = self.ctx.startpot_kkrimp + kkrimp_params = self.ctx.kkrimp_params_dict + options = self.ctx.options_params_dict + imp_info = self.inputs.impurity_info + if self.ctx.do_gf_calc: + self.report('INFO: get GF remote from gf_writeout sub wf (pid: {})'.format(self.ctx.gf_writeout.pk)) + gf_remote = self.ctx.gf_writeout.out.GF_host_remote + else: + self.report('INFO: get GF remote from input node (pid: {})'.format(self.inputs.gf_remote.pk)) + gf_remote = self.inputs.gf_remote + + # set label and description + sub_label = 'kkrimp_sub scf wf (GF host remote: {}, imp_info: {})'.format(gf_remote.pk, self.inputs.impurity_info.pk) + sub_description = 'convergence of the host-impurity potential (pk: {}) using GF remote (pk: {})'.format(startpot.pk, gf_remote.pk) + + future = self.submit(kkr_imp_sub_wc, label=sub_label, description=sub_description, + kkrimp=kkrimpcode, options_parameters=options, impurity_info=imp_info, + host_imp_startpot=startpot, GF_remote_data=gf_remote, wf_parameters=kkrimp_params) + + self.report('INFO: running kkrimp_sub_wf (startpot: {}, GF_remote: {}, wf pid: {})'.format(startpot.pk, gf_remote.pk, future.pk)) + + return ToContext(kkrimp_scf_sub=future) + + + + def return_results(self): + """ + Return the results and create all of the output nodes + """ + + self.report('INFO: creating output nodes for the KKR impurity workflow ...') + + outputnode_dict = {} + outputnode_dict['workflow_name'] = self.__class__.__name__ + outputnode_dict['workflow_version'] = self._workflowversion + last_calc_pk = self.ctx.kkrimp_scf_sub.out.calculation_info.get_attr('last_calc_nodeinfo')['pk'] + last_calc_output_params = load_node(last_calc_pk).out.output_parameters + last_calc_info = self.ctx.kkrimp_scf_sub.out.calculation_info + if self.ctx.do_gf_calc: + outputnode_dict['used_subworkflows'] = {'gf_writeout': self.ctx.gf_writeout.pk, 'auxiliary_voronoi': self.ctx.last_voro_calc.pk, + 'kkr_imp_sub': self.ctx.kkrimp_scf_sub.pk} + else: + outputnode_dict['used_subworkflows'] = {'auxiliary_voronoi': self.ctx.last_voro_calc.pk, 'kkr_imp_sub': self.ctx.kkrimp_scf_sub.pk} + outputnode_t = ParameterData(dict=outputnode_dict) + outputnode_t.label = 'kkrimp_wc_inform' + outputnode_t.description = 'Contains information for workflow' + + self.out('workflow_info', outputnode_t) + self.out('last_calc_output_parameters', last_calc_output_params) + self.out('last_calc_info', last_calc_info) + + self.report('INFO: created 3 output nodes for the KKR impurity workflow.') + self.report('\n' + '|------------------------------------------------------------------------------------------------------------------|\n' + '|-------------------------------------| Done with the KKR impurity workflow! |-------------------------------------|\n' + '|------------------------------------------------------------------------------------------------------------------|') + + + +def change_struc_imp_aux_wf(struc, imp_info): # Note: works for single imp at center only! + from aiida.common.constants import elements as PeriodicTableElements + _atomic_numbers = {data['symbol']: num for num, data in PeriodicTableElements.iteritems()} + + new_struc = StructureData(cell=struc.cell) + isite = 0 + for site in struc.sites: + sname = site.kind_name + kind = struc.get_kind(sname) + pos = site.position + zatom = _atomic_numbers[kind.get_symbols_string()] + if isite == imp_info.get_dict().get('ilayer_center'): + zatom = imp_info.get_dict().get('Zimp')[0] + symbol = PeriodicTableElements.get(zatom).get('symbol') + new_struc.append_atom(position=pos, symbols=symbol) + isite += 1 + + return new_struc + diff --git a/aiida_kkr/workflows/kkr_imp_sub.py b/aiida_kkr/workflows/kkr_imp_sub.py new file mode 100644 index 00000000..aae807ec --- /dev/null +++ b/aiida_kkr/workflows/kkr_imp_sub.py @@ -0,0 +1,979 @@ +# -*- coding: utf-8 -*- +""" +In this module you find the sub workflow for the kkrimp self consistency cycle +and some helper methods to do so with AiiDA +""" + +from aiida.orm import Code, DataFactory +from aiida.work.workchain import WorkChain, ToContext, while_ +from masci_tools.io.kkr_params import kkrparams +from aiida_kkr.tools.common_workfunctions import test_and_get_codenode, get_inputs_kkrimp +from aiida_kkr.calculations.kkrimp import KkrimpCalculation +from aiida.common.datastructures import calc_states +from numpy import array + + +__copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " + "IAS-1/PGI-1, Germany. All rights reserved.") +__license__ = "MIT license, see LICENSE.txt file" +__version__ = "0.2" +__contributors__ = u"Fabian Bertoldo" + +#TODO: work on return results function +#TODO: edit inspect_kkrimp function +#TODO: get rid of create_scf_result node and create output nodes differently +#TOTO: check if calculation parameters from previous calculation have to be +# loaded (in validate input, compare to kkr workflow) +#TODO: maybe add decrease mixing factor option as in kkr_scf wc +#TODO: add option to check if the convergence is on track + + +RemoteData = DataFactory('remote') +StructureData = DataFactory('structure') +ParameterData = DataFactory('parameter') +SinglefileData = DataFactory('singlefile') +FolderData = DataFactory('folder') +KkrimpProcess = KkrimpCalculation.process() + + + +class kkr_imp_sub_wc(WorkChain): + """ + Workchain of a kkrimp self consistency calculation starting from the + host-impurity potential of the system. (Not the entire kkr_imp workflow!) + + :param options_parameters: (ParameterData), Workchain specifications + :param wf_parameters: (ParameterData), specifications for the calculation + :param host_imp_startpot: (RemoteData), mandatory; input host-impurity potential + :param kkrimp: (Code), mandatory; KKRimp code converging the host-imp-potential + :param GF_remote_data: (RemoteData), mandatory; remote folder of a previous + kkrflex calculation containing the flexfiles ... + :param impurity_info: (ParameterData), Parameter node with information + about the impurity cluster + + :return result_kkr_imp_sub_wc: (ParameterData), Information of workflow results + like success, last result node, list with + convergence behavior + """ + + _workflowversion = __version__ + _wf_label = 'kkr_imp_sub_wc' + _wf_description = 'Workflow for a KKRimp self consistency calculation to converge a given host-impurity potential' + + + _options_default = {'queue_name' : '', # Queue name to submit jobs too + 'resources': {"num_machines": 1}, # resources to allowcate for the job + 'walltime_sec' : 60*60, # walltime after which the job gets killed (gets parsed to KKR)} + 'custom_scheduler_commands' : '', # some additional scheduler commands + 'use_mpi' : False} # execute KKR with mpi or without + + _wf_default = {'kkr_runmax': 5, # Maximum number of kkr jobs/starts (defauld iterations per start) + 'threshold_aggressive_mixing': 5*10**-2, # threshold after which agressive mixing is used + 'convergence_criterion' : 3*10**-2, # Stop if charge denisty is converged below this value + 'mixreduce': 0.5, # reduce mixing factor by this factor if calculaito fails due to too large mixing + 'strmix': 0.03, # mixing factor of simple mixing + 'aggressive_mix': 3, # type of aggressive mixing (3: broyden's 1st, 4: broyden's 2nd, 5: generalized anderson) + 'aggrmix': 0.01, # mixing factor of aggressive mixing + 'nsteps': 10, # number of iterations done per KKR calculation + 'nspin': 1, # NSPIN can either be 1 or 2 + 'non-spherical': 1, # use non-spherical parts of the potential (0 if you don't want that) + 'broyden-number': 20, # number of potentials to 'remember' for Broyden's mixing + 'born-iter': 2, # number of Born iterations for the non-spherical calculation + 'mag_init' : False, # initialize and converge magnetic calculation + 'hfield' : [0.1, 10], # Ry # external magnetic field used in initialization step + 'init_pos' : None, # position in unit cell where magnetic field is applied [default (None) means apply to all] + 'r_cls' : 1.3, # alat # default cluster radius, is increased iteratively + 'calc_orbmom' : False, # defines of orbital moments will be calculated and written out + 'spinorbit' : False, # SOC calculation (True/False) + 'newsol' : False, # new SOC solver is applied + 'mesh_params': { 'NPAN_LOG': 8, + 'NPAN_EQ': 5, + 'NCHEB': 7} +# # Some parameter for direct solver (same as in host code) +# 'NPAN_LOGPANELFAC': 2, +# 'RADIUS_LOGPANELS': 0.6, # where to set change of logarithmic to linear radial mesh +# 'RADIUS_MIN': -1, +# 'NPAN_LOG': 15, # number of panels in log mesh +# 'NPAN_EQ': 5, # number of panels in linear mesh +# 'NCHEB': 15 # number of chebychev polynomials in each panel (total number of points in radial mesh NCHEB*(NPAN_LOG+NPAN_EQ)) + } + + + + @classmethod + def get_wf_defaults(self): + """ + Print and return _wf_defaults dictionary. Can be used to easily create + set of wf_parameters. + + returns _wf_defaults + """ + + print('Version of workflow: {}'.format(self._workflowversion)) + return self._options_default, self._wf_default + + + + @classmethod + def define(cls, spec): + """ + Defines the outline of the workflow + """ + + super(kkr_imp_sub_wc, cls).define(spec) + + # Define the inputs of the workflow + spec.input("kkrimp", valid_type=Code, required=True) + spec.input("host_imp_startpot", valid_type=SinglefileData, required=True) + spec.input("GF_remote_data", valid_type=RemoteData, required=True) + spec.input("impurity_info", valid_type=ParameterData, required=False) + spec.input("options_parameters", valid_type=ParameterData, required=False, + default=ParameterData(dict=cls._options_default)) + spec.input("wf_parameters", valid_type=ParameterData, required=False, + default=ParameterData(dict=cls._wf_default)) + + # Here the structure of the workflow is defined + spec.outline( + cls.start, + cls.validate_input, + while_(cls.condition)( + cls.update_kkrimp_params, + cls.run_kkrimp, + cls.inspect_kkrimp), + cls.return_results) + + # exit codes + spec.exit_code(121, 'ERROR_NO_HOST_IMP_POT', + message="ERROR: No host-impurity potential found in the inputs") + spec.exit_code(122, 'ERROR_INVALID_INPUT_KKRIMP', + message="ERROR: The code you provided for KKRimp does not " + "use the plugin kkr.kkrimp") + spec.exit_code(123, 'ERROR_INVALID_HOST_IMP_POT', + message="ERROR: Unable to extract parent paremeter node of " + "input remote folder") + # probably not necessary + spec.exit_code(124, 'ERROR_NO_CALC_PARAMS', + message="ERROR: No calculation parameters provided") + + spec.exit_code(125, 'ERROR_SUB_FAILURE', + message="ERROR: Last KKRcalc in SUBMISSIONFAILED state!\nstopping now") + spec.exit_code(126, 'ERROR_MAX_STEPS_REACHED', + message="ERROR: Maximal number of KKR restarts reached. Exiting now!") + spec.exit_code(127, 'ERROR_SETTING_LAST_REMOTE', + message="ERROR: Last_remote could not be set to a previous succesful calculation") + spec.exit_code(127, 'ERROR_MISSING_PARAMS', + message="ERROR: There are still missing calculation parameters") + spec.exit_code(128, 'ERROR_PARAMETER_UPDATE', + message="ERROR: Parameters could not be updated") + spec.exit_code(129, 'ERROR_LAST_CALC_NOT_FINISHED', + message="ERROR: Last calculation is not in finished state") + + + # Define the outputs of the workflow + spec.output('calculation_info', valid_type=ParameterData) + spec.output('host_imp_pot', valid_type=SinglefileData) + + + def start(self): + """ + init context and some parameters + """ + self.report('INFO: started KKR impurity convergence workflow version {}' + ''.format(self._workflowversion)) + + ####### init ####### + + # internal para /control para + self.ctx.loop_count = 0 + self.ctx.last_mixing_scheme = 0 + self.ctx.calcs = [] + self.ctx.abort = False + # flags used internally to check whether the individual steps were successful + self.ctx.kkr_converged = False + self.ctx.kkr_step_success = False + self.ctx.kkr_higher_accuracy = False + # links to previous calculations + self.ctx.last_calc = None + self.ctx.last_params = None + self.ctx.last_remote = None + # link to previous host impurity potential + self.ctx.last_pot = None + # convergence info about rms etc. (used to determine convergence behavior) + self.ctx.last_rms_all = [] + self.ctx.rms_all_steps = [] + self.ctx.last_neutr_all = [] + self.ctx.neutr_all_steps = [] + + # input para + wf_dict = self.inputs.wf_parameters.get_dict() + options_dict = self.inputs.options_parameters.get_dict() + + if options_dict == {}: + options_dict = self._options_default + self.report('INFO: using default options') + + if wf_dict == {}: + wf_dict = self._wf_default + self.report('INFO: using default wf parameter') + + # set option parameters from input, or defaults + self.ctx.use_mpi = options_dict.get('use_mpi', self._options_default['use_mpi']) + self.ctx.resources = options_dict.get('resources', self._options_default['resources']) + self.ctx.walltime_sec = options_dict.get('walltime_sec', self._options_default['walltime_sec']) + self.ctx.queue = options_dict.get('queue_name', self._options_default['queue_name']) + self.ctx.custom_scheduler_commands = options_dict.get('custom_scheduler_commands', self._options_default['custom_scheduler_commands']) + + # set workflow parameters from input, or defaults + self.ctx.max_number_runs = wf_dict.get('kkr_runmax', self._wf_default['kkr_runmax']) + self.ctx.description_wf = self.inputs.get('description', 'Workflow for ' + 'a KKR impurity calculation' + 'starting from a host-impurity' + 'potential') + self.ctx.label_wf = self.inputs.get('label', 'kkr_imp_sub_wc') + self.ctx.strmix = wf_dict.get('strmix', self._wf_default['strmix']) + self.ctx.convergence_criterion = wf_dict.get('convergence_criterion', self._wf_default['convergence_criterion']) + self.ctx.mixreduce = wf_dict.get('mixreduce', self._wf_default['mixreduce']) + self.ctx.threshold_aggressive_mixing = wf_dict.get('threshold_aggressive_mixing', self._wf_default['threshold_aggressive_mixing']) + self.ctx.type_aggressive_mixing = wf_dict.get('aggressive_mix', self._wf_default['aggressive_mix']) + self.ctx.aggrmix = wf_dict.get('aggrmix', self._wf_default['aggrmix']) + self.ctx.nsteps = wf_dict.get('nsteps', self._wf_default['nsteps']) + self.ctx.nspin = wf_dict.get('nspin', self._wf_default['nspin']) + self.ctx.spherical = wf_dict.get('non-spherical', self._wf_default['non-spherical']) + self.ctx.broyden_num = wf_dict.get('broyden-number', self._wf_default['broyden-number']) + self.ctx.born_iter = wf_dict.get('born-iter', self._wf_default['born-iter']) + + # initial magnetization + self.ctx.mag_init = wf_dict.get('mag_init', self._wf_default['mag_init']) + self.ctx.hfield = wf_dict.get('hfield', self._wf_default['hfield']) + self.ctx.xinit = wf_dict.get('init_pos', self._wf_default['init_pos']) + self.ctx.mag_init_step_success = False + + # SOC + self.ctx.calc_orbmom = wf_dict.get('calc_orbmom', self._wf_default['calc_orbmom']) + self.ctx.spinorbit = wf_dict.get('spinorbit', self._wf_default['spinorbit']) + self.ctx.newsol = wf_dict.get('newsol', self._wf_default['newsol']) + self.ctx.mesh_params = wf_dict.get('mesh_params', self._wf_default['mesh_params']) + + + self.report('INFO: use the following parameter:\n' + '\nGeneral settings\n' + 'use mpi: {}\n' + 'max number of KKR runs: {}\n' + 'Resources: {}\n' + 'Walltime (s): {}\n' + 'queue name: {}\n' + 'scheduler command: {}\n' + 'description: {}\n' + 'label: {}\n' + '\nMixing parameter\n' + 'Straight mixing factor: {}\n' + 'Nsteps scf cycle: {}\n' + 'Nspin: {}\n' + 'threshold_aggressive_mixing: {}\n' + 'Aggressive mixing technique: {}\n' + 'Aggressive mixing factor: {}\n' + 'Mixing decrease factor if convergence fails: {}\n' + 'Convergence criterion: {}\n' + '\nAdditional parameter\n' + 'init magnetism in first step: {}\n' + 'init magnetism, hfield: {}\n' + 'init magnetism, init_pos: {}\n' + 'use new SOC solver: {}\n' + 'SOC calculation: {}\n' + 'write out orbital moments: {}\n' + ''.format(self.ctx.use_mpi, self.ctx.max_number_runs, + self.ctx.resources, self.ctx.walltime_sec, + self.ctx.queue, self.ctx.custom_scheduler_commands, + self.ctx.description_wf, self.ctx.label_wf, + self.ctx.strmix, self.ctx.nsteps, self.ctx.nspin, + self.ctx.threshold_aggressive_mixing, + self.ctx.type_aggressive_mixing, self.ctx.aggrmix, + self.ctx.mixreduce, self.ctx.convergence_criterion, + self.ctx.mag_init, self.ctx.hfield, self.ctx.xinit, + self.ctx.newsol, self.ctx.spinorbit, self.ctx.calc_orbmom) + ) + + # return para/vars + self.ctx.successful = False + self.ctx.rms = [] + self.ctx.neutr = [] + self.ctx.warnings = [] + self.ctx.errors = [] + self.ctx.formula = '' + + # for results table each list gets one entry per iteration that has been performed + self.ctx.KKR_steps_stats = {'success':[], + 'isteps':[], + 'imix':[], + 'mixfac':[], + 'qbound':[], + 'high_sett':[], + 'first_rms':[], + 'last_rms':[], + 'first_neutr':[], + 'last_neutr':[], + 'pk':[], + 'uuid':[]} + + + + def validate_input(self): + """ + validate input and catch possible errors from the input + """ + + inputs = self.inputs + inputs_ok = True + + if not 'host_imp_startpot' in inputs: + inputs_ok = False + return self.exit_codes.ERROR_NO_HOST_IMP_POT + + if 'kkr' in inputs: + try: + test_and_get_codenode(inputs.kkr, 'kkr.kkrimp', use_exceptions=True) + except ValueError: + inputs_ok = False + return self.exit_codes.ERROR_INVALID_INPUT_KKRIMP + + # set params and remote folder to input + self.ctx.last_remote = inputs.GF_remote_data + + # set starting potential + self.ctx.last_pot = inputs.host_imp_startpot + + # TBD!!! + if 'wf_parameters' in inputs: + self.ctx.last_params = inputs.wf_parameters + else: + inputs_ok = False + self.report('ERROR: {}'.format(self.exit_codes.ERROR_NO_CALC_PARAMS)) + return self.exit_codes.ERROR_NO_CALC_PARAMS + + self.report('INFO: validated input successfully: {}'.format(inputs_ok)) + + + + def condition(self): + """ + check convergence condition + """ + + do_kkr_step = True + stopreason = '' + + #increment KKR runs loop counter + self.ctx.loop_count += 1 + + # check if previous calculation reached convergence criterion + if self.ctx.kkr_converged: + if not self.ctx.kkr_higher_accuracy: + do_kkr_step = do_kkr_step & True + else: + stopreason = 'KKR converged' + self.ctx.successful = True + do_kkr_step = False + else: + do_kkr_step = do_kkr_step & True + + # check if previous calculation is in SUBMISSIONFAILED state + if self.ctx.loop_count>1 and self.ctx.last_calc.get_state() == calc_states.SUBMISSIONFAILED: + return self.exit_codes.ERROR_SUB_FAILURE + + # next check only needed if another iteration should be done after validating convergence etc. (previous checks) + if do_kkr_step: + # check if maximal number of iterations has been reached + if self.ctx.loop_count <= self.ctx.max_number_runs: + do_kkr_step = do_kkr_step & True + else: + do_kkr_step = False +# self.report('ERROR: {}'.format(self.exit_codes.ERROR_MAX_STEPS_REACHED)) +# return self.exit_codes.ERROR_MAX_STEPS_REACHED + + self.report("INFO: done checking condition for kkr step (result={})".format(do_kkr_step)) + + if not do_kkr_step: + self.report("INFO: Stopreason={}".format(stopreason)) + +# self.report("INFO: kkr_higher_accuracy = {}".format(self.ctx.kkr_higher_accuracy)) + + return do_kkr_step + + + + def update_kkrimp_params(self): + """ + update set of KKR parameters (check for reduced mixing, change of + mixing strategy, change of accuracy setting) + """ + + decrease_mixing_fac = False + switch_agressive_mixing = False + switch_higher_accuracy= False + initial_settings = False + + # only do something other than simple mixing after first kkr run + if self.ctx.loop_count != 1: + # first determine if previous step was successful (otherwise try to find some rms value and decrease mixing to try again) + if not self.ctx.kkr_step_success: + decrease_mixing_fac = True + self.report("INFO: last KKR calculation failed. Trying decreasing mixfac") + + convergence_on_track = self.convergence_on_track() + + # check if calculation was on its way to converge + if not convergence_on_track: + decrease_mixing_fac = True + self.report("INFO: Last KKR did not converge. Trying decreasing mixfac") + # reset last_remote to last successful calculation + for icalc in range(len(self.ctx.calcs))[::-1]: + self.report("INFO: last calc success? {} {}".format(icalc, self.ctx.KKR_steps_stats['success'][icalc])) + if self.ctx.KKR_steps_stats['success'][icalc]: + self.ctx.last_remote = self.ctx.calcs[icalc].out.remote_folder + break # exit loop if last_remote was found successfully + else: + self.ctx.last_remote = None + # if no previous calculation was succesful take voronoi output + # or remote data from input (depending on the inputs) + self.report("INFO: Last_remote is None? {} {}".format(self.ctx.last_remote is None, 'structure' in self.inputs)) + if self.ctx.last_remote is None: + if 'structure' in self.inputs: + self.ctx.voronoi.out.last_voronoi_remote + else: + self.ctx.last_remote = self.inputs.remote_data + # check if last_remote has finally been set and abort if this is not the case + self.report("INFO: last_remote is still None? {}".format(self.ctx.last_remote is None)) + if self.ctx.last_remote is None: + error = 'ERROR: last_remote could not be set to a previous succesful calculation' + self.ctx.errors.append(error) + return self.exit_codes.ERROR_SETTING_LAST_REMOTE + + # check if mixing strategy should be changed + last_mixing_scheme = self.ctx.last_params.get_dict()['IMIX'] + if last_mixing_scheme is None: + last_mixing_scheme = 0 + + # TODO: problem with convergence on track has to be solved, just set as true for testing + convergence_on_track = True + if convergence_on_track: + last_rms = self.ctx.last_rms_all[-1] + if last_rms < self.ctx.threshold_aggressive_mixing and last_mixing_scheme == 0: + switch_agressive_mixing = True + self.report("INFO: rms low enough, switch to agressive mixing") + + # check if switch to higher accuracy should be done + if not self.ctx.kkr_higher_accuracy: + if self.ctx.kkr_converged: # or last_rms < self.ctx.threshold_switch_high_accuracy: + switch_higher_accuracy = True +# self.report("INFO: rms low enough, switch to higher accuracy settings") + else: + initial_settings = True + self.ctx.kkr_step_success = True + + if self.ctx.loop_count > 1: + last_rms = self.ctx.last_rms_all[-1] + + # if needed update parameters + if decrease_mixing_fac or switch_agressive_mixing or switch_higher_accuracy or initial_settings or self.ctx.mag_init: + if initial_settings: + label = 'initial KKR scf parameters' + description = 'initial parameter set for scf calculation' + else: + label = '' + description = '' + + # step 1: extract info from last input parameters and check consistency +# params = self.ctx.last_params +# input_dict = params.get_dict() + para_check = kkrparams(params_type='kkrimp') + para_check.get_all_mandatory() + self.report('INFO: get kkrimp keywords') + + # step 1.1: try to fill keywords + #for key, val in input_dict.iteritems(): + # para_check.set_value(key, val, silent=True) + + # init new_params dict where updated params are collected + new_params = {} + + # step 1.2: check if all mandatory keys are there and add defaults if missing + missing_list = para_check.get_missing_keys(use_aiida=True) + if missing_list != []: + kkrdefaults = kkrparams.get_KKRcalc_parameter_defaults()[0] + kkrdefaults_updated = [] + for key_default, val_default in kkrdefaults.items(): + if key_default in missing_list: + new_params[key_default] = kkrdefaults.get(key_default) + kkrdefaults_updated.append(key_default) + if len(kkrdefaults_updated)>0: + error = 'ERROR: Calc_parameters misses keys: {}'.format(missing_list) + self.ctx.errors.append(error) + self.report('ERROR: {}'.format(self.exit_codes.ERROR_MISSING_PARAMS)) + return self.exit_codes.ERROR_MISSING_PARAMS + else: + self.report('updated KKR parameter node with default values: {}'.format(kkrdefaults_updated)) + + # step 2: change parameter (contained in new_params dictionary) + last_mixing_scheme = para_check.get_value('IMIX') + if last_mixing_scheme is None: + last_mixing_scheme = 0 + + strmixfac = self.ctx.strmix + aggrmixfac = self.ctx.aggrmix + nsteps = self.ctx.nsteps + nspin = self.ctx.nspin + + # TODO: maybe add decrease mixing factor option as in kkr_scf wc + # step 2.1 fill new_params dict with values to be updated + if decrease_mixing_fac: + if last_mixing_scheme == 0: + self.report('(strmixfax, mixreduce)= ({}, {})'.format(strmixfac, self.ctx.mixreduce)) + self.report('type(strmixfax, mixreduce)= {} {}'.format(type(strmixfac), type(self.ctx.mixreduce))) + strmixfac = strmixfac * self.ctx.mixreduce + self.ctx.strmix = strmixfac + label += 'decreased_mix_fac_str (step {})'.format(self.ctx.loop_count) + description += 'decreased STRMIX factor by {}'.format(self.ctx.mixreduce) + else: + self.report('(aggrmixfax, mixreduce)= ({}, {})'.format(aggrmixfac, self.ctx.mixreduce)) + self.report('type(aggrmixfax, mixreduce)= {} {}'.format(type(aggrmixfac), type(self.ctx.mixreduce))) + aggrmixfac = aggrmixfac * self.ctx.mixreduce + self.ctx.aggrmix = aggrmixfac + label += 'decreased_mix_fac_bry' + description += 'decreased AGGRMIX factor by {}'.format(self.ctx.mixreduce) + + if switch_agressive_mixing: + last_mixing_scheme = self.ctx.type_aggressive_mixing + label += ' switched_to_agressive_mixing' + description += ' switched to agressive mixing scheme (IMIX={})'.format(last_mixing_scheme) + + # add number of scf steps, spin + new_params['SCFSTEPS'] = nsteps + new_params['NSPIN'] = nspin + new_params['INS'] = self.ctx.spherical + + # add newsosol + if self.ctx.newsol: + new_params['TESTFLAG'] = ['tmatnew'] + else: + new_params['TESTFLAG'] = [] + + if self.ctx.spinorbit: + new_params['SPINORBIT'] = 1 + new_params['NCOLL'] = 1 + new_params['NCHEB'] = self.ctx.mesh_params['NCHEB'] + new_params['NPAN_LOG'] = self.ctx.mesh_params['NPAN_LOG'] + new_params['NPAN_EQ'] = self.ctx.mesh_params['NPAN_EQ'] + else: + new_params['SPINORBIT'] = 0 + new_params['NCOLL'] = 0 + + if self.ctx.calc_orbmom: + new_params['CALCORBITALMOMENT'] = 1 + else: + new_params['CALCORBITALMOMENT'] = 0 + + # set mixing schemes and factors + if last_mixing_scheme == 3 or last_mixing_scheme == 4: + new_params['ITDBRY'] = self.ctx.broyden_num + new_params['IMIX'] = last_mixing_scheme + new_params['MIXFAC'] = aggrmixfac + elif last_mixing_scheme == 0: + new_params['IMIX'] = last_mixing_scheme + new_params['MIXFAC'] = strmixfac + + # add mixing scheme to context + self.ctx.last_mixing_scheme = last_mixing_scheme + + + if switch_higher_accuracy: + self.ctx.kkr_higher_accuracy = True +# convergence_settings = self.ctx.convergence_setting_fine +# label += ' use_higher_accuracy' +# description += ' using higher accuracy settings goven in convergence_setting_fine' +# else: +# convergence_settings = self.ctx.convergence_setting_coarse + +# # slightly increase temperature if previous calculation was unsuccessful for the second time +# if decrease_mixing_fac and not self.convergence_on_track(): +# self.report('INFO: last calculation did not converge and convergence not on track. Try to increase temperature by 50K.') +# convergence_settings['tempr'] += 50. +# label += ' TEMPR+50K' +# description += ' with increased temperature of 50K' + + # add convergence settings + if self.ctx.loop_count == 1 or self.ctx.last_mixing_scheme == 0: + new_params['QBOUND'] = self.ctx.threshold_aggressive_mixing + else: + new_params['QBOUND'] = self.ctx.convergence_criterion + + # initial magnetization + if initial_settings and self.ctx.mag_init: + if self.ctx.hfield <= 0: + self.report('\nWARNING: magnetization initialization chosen but hfield is zero. Automatically change back to default value (hfield={})\n'.format(self._wf_default['hfield'])) + self.ctx.hfield = self._wf_default['hfield'] + new_params['HFIELD'] = self.ctx.hfield + elif self.ctx.mag_init and self.ctx.mag_init_step_success: # turn off initialization after first (successful) iteration + new_params['HFIELD'] = [0.0, 0] + elif not self.ctx.mag_init: + self.report("INFO: mag_init is False. Overwrite 'HFIELD' to '0.0' and 'LINIPOL' to 'False'.") + # reset mag init to avoid resinitializing + new_params['HFIELD'] = [0.0, 0] + + # set nspin to 2 if mag_init is used + if self.ctx.mag_init: + nspin_in = nspin + if nspin_in is None: + nspin_in = 1 + if nspin_in < 2: + self.report('WARNING: found NSPIN=1 but for maginit needs NPIN=2. Overwrite this automatically') + new_params['NSPIN'] = 2 + self.report('new_params: {}'.format(new_params)) + + # step 2.2 update values + try: + for key, val in new_params.iteritems(): + para_check.set_value(key, val, silent=True) + except: + error = 'ERROR: parameter update unsuccessful: some key, value pair not valid!' + self.ctx.errors.append(error) + self.report(error) + #return self.exit_codes.ERROR_PARAMETER_UPDATE + + # step 3: + self.report("INFO: update parameters to: {}".format(para_check.get_set_values())) + + #test + self.ctx.last_params = ParameterData(dict={}) + + updatenode = ParameterData(dict=para_check.get_dict()) + updatenode.label = label + updatenode.description = description + + paranode_new = updatenode #update_params_wf(self.ctx.last_params, updatenode) + self.ctx.last_params = paranode_new + else: + self.report("INFO: reuse old settings") + + self.report("INFO: done updating kkr param step") + + + + def run_kkrimp(self): + """ + submit a KKR impurity calculation + """ + self.report("INFO: setting up kkrimp calculation step {}".format(self.ctx.loop_count)) + + + label = 'KKRimp calculation step {} (IMIX={})'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme) + description = 'KKRimp calculation of step {}, using mixing scheme {}'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme) + code = self.inputs.kkrimp + params = self.ctx.last_params + host_GF = self.inputs.GF_remote_data + imp_pot = self.ctx.last_pot + + options = {"max_wallclock_seconds": self.ctx.walltime_sec, + "resources": self.ctx.resources, + "queue_name" : self.ctx.queue} + if self.ctx.custom_scheduler_commands: + options["custom_scheduler_commands"] = self.ctx.custom_scheduler_commands + if 'impurity_info' in self.inputs: + self.report('INFO: using impurity_info node as input for kkrimp calculation') + imp_info = self.inputs.impurity_info + label = 'KKRimp calculation step {} (IMIX={}, Zimp: {})'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme, imp_info.get_attr('Zimp')) + description = 'KKRimp calculation of step {}, using mixing scheme {}'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme) + inputs = get_inputs_kkrimp(code, options, label, description, params, not self.ctx.use_mpi, imp_info=imp_info, host_GF=host_GF, imp_pot=imp_pot) + else: + self.report('INFO: getting inpurity_info node from previous GF calculation') + label = 'KKRimp calculation step {} (IMIX={}, GF_remote: {})'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme, host_GF.pk) + description = 'KKRimp calculation of step {}, using mixing scheme {}'.format(self.ctx.loop_count, self.ctx.last_mixing_scheme) + inputs = get_inputs_kkrimp(code, options, label, description, params, not self.ctx.use_mpi, host_GF=host_GF, imp_pot=imp_pot) + + # run the KKR calculation + self.report('INFO: doing calculation') + kkrimp_run = self.submit(KkrimpProcess, **inputs) + + return ToContext(kkr=kkrimp_run, last_calc=kkrimp_run) + + + + def inspect_kkrimp(self): + """ + check for convergence and store some of the results of the last calculation to context + """ + + self.ctx.calcs.append(self.ctx.last_calc) + self.ctx.kkrimp_step_success = True + + # check calculation state + calc_state = self.ctx.last_calc.get_state() + if calc_state != calc_states.FINISHED: + self.ctx.kkrimp_step_success = False + self.report('ERROR: {}', self.exit_codes.ERROR_LAST_CALC_NOT_FINISHED) + return self.exit_codes.ERROR_LAST_CALC_NOT_FINISHED + + self.report("INFO: kkrimp_step_success: {}".format(self.ctx.kkrimp_step_success)) + + # get potential from last calculation + retrieved_path = self.ctx.kkr.out.retrieved.get_abs_path() # retrieved path + pot_path = retrieved_path+'/path/out_potential' + self.ctx.last_pot = SinglefileData(file=pot_path) + + # extract convergence info about rms etc. (used to determine convergence behavior) + try: + self.report("INFO: trying to find output of last_calc: {}".format(self.ctx.last_calc)) + last_calc_output = self.ctx.last_calc.out.output_parameters.get_dict() + found_last_calc_output = True + except: + found_last_calc_output = False + self.report("INFO: found_last_calc_output: {}".format(found_last_calc_output)) + + # try yo extract remote folder + try: + if self.convergence_on_track(): + self.ctx.last_remote = self.ctx.kkr.out.remote_folder + else: + self.ctx.last_remote = self.inputs.remote_data + except: + self.ctx.last_remote = None + self.ctx.kkrimp_step_success = False + + self.report("INFO: last_remote: {}".format(self.ctx.last_remote)) + + if self.ctx.kkrimp_step_success and found_last_calc_output: + # check convergence + self.ctx.kkr_converged = last_calc_output['convergence_group']['calculation_converged'] + # check rms + self.ctx.rms.append(last_calc_output['convergence_group']['rms']) + rms_all_iter_last_calc = list(last_calc_output['convergence_group']['rms_all_iterations']) + + # add lists of last iterations + self.ctx.last_rms_all = rms_all_iter_last_calc + if self.ctx.kkrimp_step_success and self.convergence_on_track(): + self.ctx.rms_all_steps += rms_all_iter_last_calc + else: + self.ctx.kkr_converged = False + + self.report("INFO: kkr_converged: {}".format(self.ctx.kkr_converged)) + self.report("INFO: rms: {}".format(self.ctx.rms)) + self.report("INFO: last_rms_all: {}".format(self.ctx.last_rms_all)) + + # turn off initial magnetization once one step was successful (update_kkr_params) used in + if self.ctx.mag_init and self.ctx.kkrimp_step_success: + self.ctx.mag_init_step_success = True + + # store some statistics used to print table in the end of the report + self.ctx.KKR_steps_stats['success'].append(self.ctx.kkr_step_success) + try: + isteps = self.ctx.last_calc.out.output_parameters.get_dict()['convergence_group']['number_of_iterations'] + except: + self.ctx.warnings.append('cound not set isteps in KKR_steps_stats dict') + isteps = -1 + + try: + first_rms = self.ctx.last_rms_all[0] + last_rms = self.ctx.last_rms_all[-1] + except: + self.ctx.warnings.append('cound not set first_rms, last_rms in KKR_steps_stats dict') + first_rms = -1 + last_rms = -1 + + if self.ctx.last_mixing_scheme == 0: + mixfac = self.ctx.strmix + elif self.ctx.last_mixing_scheme == 3 or self.ctx.last_mixing_scheme == 4: + mixfac = self.ctx.aggrmix + + if self.ctx.kkr_higher_accuracy: + qbound = self.ctx.convergence_criterion + else: + qbound = self.ctx.threshold_aggressive_mixing + + self.ctx.KKR_steps_stats['isteps'].append(isteps) + self.ctx.KKR_steps_stats['imix'].append(self.ctx.last_mixing_scheme) + self.ctx.KKR_steps_stats['mixfac'].append(mixfac) + self.ctx.KKR_steps_stats['qbound'].append(qbound) + self.ctx.KKR_steps_stats['high_sett'].append(self.ctx.kkr_higher_accuracy) + self.ctx.KKR_steps_stats['first_rms'].append(first_rms) + self.ctx.KKR_steps_stats['last_rms'].append(last_rms) + self.ctx.KKR_steps_stats['pk'].append(self.ctx.last_calc.pk) + self.ctx.KKR_steps_stats['uuid'].append(self.ctx.last_calc.uuid) + + self.report("INFO: done inspecting kkrimp results step") + + + + def convergence_on_track(self): + """ + Check if convergence behavior of the last calculation is on track (i.e. going down) + """ + + on_track = True + threshold = 5. # used to check condition if at least one of charnge_neutrality, rms-error goes down fast enough + + # first check if previous calculation was stopped due to reaching the QBOUND limit + try: + calc_reached_qbound = self.ctx.last_calc.out.output_parameters.get_dict()['convergence_group']['calculation_converged'] + except AttributeError: # captures error when last_calc dies not have an output node + calc_reached_qbound = False + except KeyError: # captures + calc_reached_qbound = False + + if self.ctx.kkrimp_step_success and not calc_reached_qbound: + first_rms = self.ctx.last_rms_all[0] + last_rms = self.ctx.last_rms_all[-1] + # use this trick to avoid division by zero + if last_rms == 0: + last_rms = 10**-16 + r = last_rms/first_rms + self.report("INFO: convergence check: first/last rms {}, {}".format(first_rms, last_rms)) + if r < 1: + self.report("INFO: convergence check: rms goes down") + on_track = True + elif r > threshold: + self.report("INFO: convergence check: rms goes up too fast, convergence is not expected") + on_track = False + elif len(self.ctx.last_rms_all) == 1: + self.report("INFO: convergence check: already converged after single iteration") + on_track = True + else: + self.report("INFO: convergence check: rms does not shrink fast enough, convergence is not expected") + on_track = False + elif calc_reached_qbound: + self.report("INFO: convergence check: calculation reached QBOUND") + on_track = True + else: + self.report("INFO: convergence check: calculation unsuccessful") + on_track = False + + self.report("INFO: convergence check result: {}".format(on_track)) + + return on_track + + + + def return_results(self): + """ + Return the results of the calculations + This should run through and produce output nodes even if everything failed, + therefore it only uses results from context. + """ + + self.report("INFO: entering return_results") + + # try/except to capture as mnuch as possible (everything that is there even when workflow exits unsuccessfully) + # capture pk and uuids of last calc, params and remote + try: + last_calc_uuid = self.ctx.last_calc.uuid + last_calc_pk = self.ctx.last_calc.pk + last_params_uuid = self.ctx.last_params.uuid + last_params_pk = self.ctx.last_params.pk + last_remote_uuid = self.ctx.last_remote.uuid + last_remote_pk = self.ctx.last_remote.pk + except: + last_calc_uuid = None + last_calc_pk = None + last_params_uuid = None + last_params_pk = None + last_remote_uuid = None + last_remote_pk = None + + all_pks = [] + for calc in self.ctx.calcs: + try: + all_pks.append(calc.pk) + except: + self.ctx.warnings.append('cound not get pk of calc {}'.format(calc)) + + + # capture links to last parameter, calcualtion and output + try: + last_calc_out = self.ctx.kkr.out['output_parameters'] + last_calc_out_dict = last_calc_out.get_dict() + last_RemoteData = self.ctx.last_remote + last_InputParameters = self.ctx.last_params + except: + last_InputParameters = None + last_RemoteData = None + last_calc_out = None + last_calc_out_dict = {} + + # capture convergence info + try: + last_rms = self.ctx.rms[-1] + except: + last_rms = None + + # now collect results saved in results node of workflow + self.report("INFO: collect outputnode_dict") + outputnode_dict = {} + outputnode_dict['workflow_name'] = self.__class__.__name__ + outputnode_dict['workflow_version'] = self._workflowversion + outputnode_dict['material'] = self.ctx.formula + outputnode_dict['loop_count'] = self.ctx.loop_count + outputnode_dict['warnings'] = self.ctx.warnings + outputnode_dict['successful'] = self.ctx.successful + outputnode_dict['last_params_nodeinfo'] = {'uuid':last_params_uuid, 'pk':last_params_pk} + outputnode_dict['last_remote_nodeinfo'] = {'uuid':last_remote_uuid, 'pk':last_remote_pk} + outputnode_dict['last_calc_nodeinfo'] = {'uuid':last_calc_uuid, 'pk':last_calc_pk} + outputnode_dict['pks_all_calcs'] = all_pks + outputnode_dict['errors'] = self.ctx.errors + outputnode_dict['convergence_value'] = last_rms + outputnode_dict['convergence_values_all_steps'] = array(self.ctx.rms_all_steps) + outputnode_dict['convergence_values_last_step'] = array(self.ctx.last_rms_all) + outputnode_dict['convergence_reached'] = self.ctx.kkr_converged + outputnode_dict['kkr_step_success'] = self.ctx.kkr_step_success + outputnode_dict['used_higher_accuracy'] = self.ctx.kkr_higher_accuracy + + # report the status + if self.ctx.successful: + self.report('STATUS: Done, the convergence criteria are reached.\n' + 'INFO: The charge density of the KKR calculation pk= {} ' + 'converged after {} KKR runs and {} iterations to {} \n' + ''.format(last_calc_pk, self.ctx.loop_count - 1, sum(self.ctx.KKR_steps_stats.get('isteps')), self.ctx.last_rms_all[-1])) + else: # Termination ok, but not converged yet... + if self.ctx.abort: # some error occured, donot use the output. + self.report('STATUS/ERROR: I abort, see logs and ' + 'erros/warning/hints in output_kkr_scf_wc_para') + else: + self.report('STATUS/WARNING: Done, the maximum number of runs ' + 'was reached or something failed.\n INFO: The ' + 'charge density of the KKR calculation pk= ' + 'after {} KKR runs and {} iterations is {} "me/bohr^3"\n' + ''.format(self.ctx.loop_count - 1, sum(self.ctx.KKR_steps_stats.get('isteps')), self.ctx.last_rms_all[-1])) + + # create results node + self.report("INFO: create results nodes") #: {}".format(outputnode_dict)) + outputnode_t = ParameterData(dict=outputnode_dict) + outputnode_t.label = 'kkr_scf_wc_results' + outputnode_t.description = 'Contains results of workflow (e.g. workflow version number, info about success of wf, lis tof warnings that occured during execution, ...)' + + self.out('calculation_info', outputnode_t) + self.out('host_imp_pot', self.ctx.last_pot) + + # print results table for overview + # table layout: + message = "INFO: overview of the result:\n\n" + message += "|------|---------|--------|------|--------|---------|-----------------|---------------------------------------------|\n" + message += "| irun | success | isteps | imix | mixfac | qbound | rms | pk and uuid |\n" + message += "| | | | | | | first | last | |\n" + message += "|------|---------|--------|------|--------|---------|--------|--------|---------------------------------------------|\n" + #| %6i | %9s | %8i | %6i | %.2e | %.3e | %.2e | %.2e | + KKR_steps_stats = self.ctx.KKR_steps_stats + for irun in range(len(KKR_steps_stats.get('success'))): + message += "|%6i|%9s|%8i|%6i|%.2e|%.3e|%.2e|%.2e|"%(irun+1, + KKR_steps_stats.get('success')[irun], KKR_steps_stats.get('isteps')[irun], + KKR_steps_stats.get('imix')[irun], KKR_steps_stats.get('mixfac')[irun], + KKR_steps_stats.get('qbound')[irun], + KKR_steps_stats.get('first_rms')[irun], KKR_steps_stats.get('last_rms')[irun]) + message += " {} | {}|\n".format(KKR_steps_stats.get('pk')[irun], KKR_steps_stats.get('uuid')[irun]) + message += "|------|---------|--------|------|--------|---------|-----------------|---------------------------------------------|\n" + """ + message += "#|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|\n".format(irun+1, + KKR_steps_stats.get('success')[irun], KKR_steps_stats.get('isteps')[irun], + KKR_steps_stats.get('imix')[irun], KKR_steps_stats.get('mixfac')[irun], + KKR_steps_stats.get('qbound')[irun], + KKR_steps_stats.get('first_rms')[irun], KKR_steps_stats.get('last_rms')[irun]) + """ + self.report(message) + + self.report("INFO: done with kkr_scf workflow!\n") diff --git a/aiida_kkr/workflows/kkr_scf.py b/aiida_kkr/workflows/kkr_scf.py index 893367b7..f9e92d34 100644 --- a/aiida_kkr/workflows/kkr_scf.py +++ b/aiida_kkr/workflows/kkr_scf.py @@ -7,18 +7,16 @@ from aiida.orm import Code, DataFactory, load_node from aiida.work.workchain import WorkChain, while_, if_, ToContext -from aiida.work.run import submit from aiida.work import workfunction as wf -from aiida.work.process_registry import ProcessRegistry from aiida.common.datastructures import calc_states from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.calculations.voro import VoronoiCalculation -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams from aiida_kkr.tools.common_workfunctions import (test_and_get_codenode, get_inputs_kkr, get_parent_paranode, update_params_wf) from aiida_kkr.workflows.voro_start import kkr_startpot_wc from aiida_kkr.workflows.dos import kkr_dos_wc -from aiida_kkr.tools.common_functions import get_Ry2eV, get_ef_from_potfile +from masci_tools.io.common_functions import get_Ry2eV, get_ef_from_potfile from numpy import array, where, ones __copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, " @@ -184,9 +182,8 @@ def start(self): """ init context and some parameters """ - self.report('INFO: started KKR convergence workflow version {}\n' - 'INFO: Workchain node identifiers: {}' - ''.format(self._workflowversion, ProcessRegistry().current_calc_node)) + self.report('INFO: started KKR convergence workflow version {}' + ''.format(self._workflowversion)) ####### init ####### @@ -226,13 +223,13 @@ def start(self): self.ctx.walltime_sec = wf_dict.get('walltime_sec', self._wf_default['walltime_sec']) self.ctx.queue = wf_dict.get('queue_name', self._wf_default['queue_name']) self.ctx.custom_scheduler_commands = wf_dict.get('custom_scheduler_commands', self._wf_default['custom_scheduler_commands']) - self.ctx.description_wf = self.inputs.get('_description', 'Workflow for ' + self.ctx.description_wf = self.inputs.get('description', 'Workflow for ' 'a KKR scf calculation starting ' 'either from a structure with ' 'automatic voronoi calculation ' 'or a valid RemoteData node of ' 'a previous calculation') - self.ctx.label_wf = self.inputs.get('_label', 'kkr_scf_wc') + self.ctx.label_wf = self.inputs.get('label', 'kkr_scf_wc') self.ctx.strmix = wf_dict.get('strmix', self._wf_default['strmix']) self.ctx.brymix = wf_dict.get('brymix', self._wf_default['brymix']) self.ctx.check_dos = wf_dict.get('check_dos', self._wf_default['check_dos']) @@ -476,9 +473,9 @@ def run_voronoi(self): wf_label= 'kkr_startpot (voronoi)' wf_desc = 'subworkflow to set up the input of a KKR calculation' - future = submit(kkr_startpot_wc, kkr=kkrcode, voronoi=voronoicode, + future = self.submit(kkr_startpot_wc, kkr=kkrcode, voronoi=voronoicode, calc_parameters=params, wf_parameters=sub_wf_params, - structure=structure, _label=wf_label, _description=wf_desc) + structure=structure, label=wf_label, description=wf_desc) return ToContext(voronoi=future, last_calc=future) @@ -812,7 +809,7 @@ def run_kkr(self): # run the KKR calculation self.report('INFO: doing calculation') - kkr_run = submit(KkrProcess, **inputs) + kkr_run = self.submit(KkrProcess, **inputs) return ToContext(kkr=kkr_run, last_calc=kkr_run) @@ -1088,7 +1085,7 @@ def return_results(self): # report the status if self.ctx.successful: self.report('STATUS: Done, the convergence criteria are reached.\n' - 'INFO: The charge density of the KKR calculation pk= {}' + 'INFO: The charge density of the KKR calculation pk= {} ' 'converged after {} KKR runs and {} iterations to {} \n' ''.format(last_calc_pk, self.ctx.loop_count, self.ctx.loop_count, last_rms)) else: # Termination ok, but not converged yet... @@ -1194,7 +1191,7 @@ def control_end_wc(self, errormsg): self.report(errormsg) # because return_results still fails somewhen self.return_results() #self.abort_nowait(errormsg) - self.abort(errormsg) + #self.abort(errormsg) def check_input_params(self, params, is_voronoi=False): @@ -1270,7 +1267,7 @@ def get_dos(self): remote = self.ctx.last_calc.out.remote_folder wf_label= ' final DOS calculation' wf_desc = ' subworkflow of a DOS calculation' - future = submit(kkr_dos_wc, kkr=code, remote_data=remote, wf_parameters=wfdospara_node, _label=wf_label, _description=wf_desc) + future = self.submit(kkr_dos_wc, kkr=code, remote_data=remote, wf_parameters=wfdospara_node, label=wf_label, description=wf_desc) return ToContext(doscal=future) @@ -1403,49 +1400,49 @@ def create_scf_result_node(**kwargs): outdict = {} if has_last_outpara: - outputnode = outpara.copy() + outputnode = outpara outputnode.label = 'workflow_Results' outputnode.description = ('Contains self-consistency results and ' 'information of an kkr_scf_wc run.') outdict['output_kkr_scf_wc_ParameterResults'] = outputnode if has_last_calc_out_dict: - outputnode = last_calc_out_dict.copy() + outputnode = last_calc_out_dict outputnode.label = 'last_calc_out' outputnode.description = ('Contains the Results Parameter node from the output ' 'of the last calculation done in the workflow.') outdict['last_calc_out'] = outputnode if has_last_RemoteData: - outputnode = last_RemoteData_dict.copy() + outputnode = last_RemoteData_dict outputnode.label = 'last_RemoteData' outputnode.description = ('Contains a link to the latest remote data node ' 'where the output of the calculation can be accessed.') outdict['last_RemoteData'] = outputnode if has_last_InputParameters: - outputnode = last_InputParameters_dict.copy() + outputnode = last_InputParameters_dict outputnode.label = 'last_InputParameters' outputnode.description = ('Contains the latest parameter data node ' 'where the input of the last calculation can be found.') outdict['last_InputParameters'] = outputnode if has_vorostart_output: - outputnode = vorostart_output_dict.copy() + outputnode = vorostart_output_dict outputnode.label = 'results_vorostart' outputnode.description = ('Contains the results parameter data node ' 'of the vorostart sub-workflow (sets up starting portentials).') outdict['results_vorostart'] = outputnode if has_starting_dos: - outputnode = start_dosdata_interpol_dict.copy() + outputnode = start_dosdata_interpol_dict outputnode.label = 'starting_dosdata_interpol' outputnode.description = ('Contains the interpolated DOS data note, computed ' 'from the starting portential.') outdict['starting_dosdata_interpol'] = outputnode if has_final_dos: - outputnode = final_dosdata_interpol_dict.copy() + outputnode = final_dosdata_interpol_dict outputnode.label = 'final_dosdata_interpol' outputnode.description = ('Contains the interpolated DOS data note, computed ' 'from the converged potential.') diff --git a/aiida_kkr/workflows/voro_start.py b/aiida_kkr/workflows/voro_start.py index db25be6e..2b1ec796 100644 --- a/aiida_kkr/workflows/voro_start.py +++ b/aiida_kkr/workflows/voro_start.py @@ -7,16 +7,15 @@ from aiida.orm import Code, DataFactory from aiida.work.workchain import WorkChain, while_, if_, ToContext -from aiida.work.run import submit +from aiida.work.launch import submit from aiida.work import workfunction as wf -from aiida.work.process_registry import ProcessRegistry from aiida_kkr.calculations.kkr import KkrCalculation from aiida_kkr.calculations.voro import VoronoiCalculation -from aiida_kkr.tools.kkr_params import kkrparams +from masci_tools.io.kkr_params import kkrparams from aiida_kkr.workflows.dos import kkr_dos_wc from aiida_kkr.tools.common_workfunctions import (test_and_get_codenode, update_params, update_params_wf, get_inputs_voronoi) -from aiida_kkr.tools.common_functions import get_ef_from_potfile, get_Ry2eV +from masci_tools.io.common_functions import get_ef_from_potfile, get_Ry2eV from aiida.common.datastructures import calc_states from numpy import where @@ -97,7 +96,7 @@ def define(cls, spec): spec.input("wf_parameters", valid_type=ParameterData, required=False, default=ParameterData(dict=cls._wf_default)) spec.input("structure", valid_type=StructureData, required=True) - spec.input("kkr", valid_type=Code, required=True) + spec.input("kkr", valid_type=Code, required=False) spec.input("voronoi", valid_type=Code, required=True) spec.input("calc_parameters", valid_type=ParameterData, required=False) @@ -107,16 +106,15 @@ def define(cls, spec): cls.start, # check if another iteration is done (in case of either voro_ok, doscheck_ok is False) while_(cls.do_iteration_check)( - # run voronoi calculation - cls.run_voronoi, - # check voronoi output (also sets ctx.voro_ok) - if_(cls.check_voronoi)( - # create starting DOS using dos sub-workflow - cls.get_dos, - # perform some checks and set ctx.doscheck_ok accordingly - cls.check_dos - ) - ), + # run voronoi calculation + cls.run_voronoi, + # check voronoi output (also sets ctx.voro_ok) + if_(cls.check_voronoi)( + # create starting DOS using dos sub-workflow + cls.get_dos, + # perform some checks and set ctx.doscheck_ok accordingly + cls.check_dos) + ), # collect results and return cls.return_results ) @@ -126,9 +124,8 @@ def start(self): """ init context and some parameters """ - self.report('INFO: started VoroStart workflow version {}\n' - 'INFO: Workchain node identifiers: {}' - ''.format(self._workflowversion, ProcessRegistry().current_calc_node)) + self.report('INFO: started VoroStart workflow version {}' + ''.format(self._workflowversion)) ####### init ####### @@ -212,13 +209,14 @@ def start(self): self.ctx.formula = '' # get kkr and voronoi codes from input - try: - test_and_get_codenode(self.inputs.kkr, 'kkr.kkr', use_exceptions=True) - except ValueError: - error = ("The code you provided for kkr does not " - "use the plugin kkr.kkr") - self.ctx.errors.append(error) - self.control_end_wc(error) + if self.ctx.check_dos: + try: + test_and_get_codenode(self.inputs.kkr, 'kkr.kkr', use_exceptions=True) + except ValueError: + error = ("The code you provided for kkr does not " + "use the plugin kkr.kkr") + self.ctx.errors.append(error) + self.control_end_wc(error) try: test_and_get_codenode(self.inputs.voronoi, 'kkr.voro', use_exceptions=True) except ValueError: @@ -361,7 +359,7 @@ def run_voronoi(self): VoronoiProcess, inputs = get_inputs_voronoi(voronoicode, structure, options, label, description, params=params) self.report('INFO: run voronoi step {}'.format(self.ctx.iter)) - future = submit(VoronoiProcess, **inputs) + future = self.submit(VoronoiProcess, **inputs) # return remote_voro (passed to dos calculation as input) @@ -490,7 +488,7 @@ def get_dos(self): wf_desc = 'subworkflow of a DOS calculation that perform a singe-shot KKR calc.' future = submit(kkr_dos_wc, kkr=code, remote_data=remote, wf_parameters=wfdospara_node, - _label=wf_label, _description=wf_desc) + label=wf_label, description=wf_desc) return ToContext(doscal=future) @@ -617,7 +615,7 @@ def control_end_wc(self, errormsg): self.ctx.abort = True self.report(errormsg) self.return_results() - self.abort(errormsg) + #self.abort(errormsg) def return_results(self): @@ -784,10 +782,10 @@ def update_voro_input(params_old, updatenode, voro_output): Pseudo wf used to keep track of updated parameters in voronoi calculation. voro_output only enters as dummy argument for correct connection but logic using this value is done somewhere else. """ - dummy = voro_output.copy() + dummy = voro_output # voro_output is only dummy input to draw connection in graph updatenode_dict = updatenode.get_dict() new_parameternode = update_params(params_old, nodename=None, nodedesc=None, **updatenode_dict) - return new_parameternode \ No newline at end of file + return new_parameternode diff --git a/docs/source/.DS_Store b/docs/source/.DS_Store deleted file mode 100644 index 4d5d3d5a..00000000 Binary files a/docs/source/.DS_Store and /dev/null differ diff --git a/docs/source/examples/kkr_bandstruc_example.py b/docs/source/examples/kkr_bandstruc_example.py index 794fbcc0..cb9d4a07 100644 --- a/docs/source/examples/kkr_bandstruc_example.py +++ b/docs/source/examples/kkr_bandstruc_example.py @@ -78,7 +78,7 @@ def wait_for_it(calc, maxwait=300): #plotting of bandstructure and previously calculated DOS data # load DOS data -from aiida_kkr.tools.common_functions import interpolate_dos +from masci_tools.io.common_functions import interpolate_dos dospath_host = host_dos_calc.out.retrieved.get_abs_path('') ef, dos, dos_interpol = interpolate_dos(dospath_host, return_original=True) dos, dos_interpol = dos[0], dos_interpol[0] @@ -119,4 +119,4 @@ def wait_for_it(calc, maxwait=300): title('DOS') suptitle(struc.get_formula(), fontsize=16) -show() \ No newline at end of file +show() diff --git a/docs/source/examples/kkrimp_dos_example.py b/docs/source/examples/kkrimp_dos_example.py index ae129322..13b30d99 100644 --- a/docs/source/examples/kkrimp_dos_example.py +++ b/docs/source/examples/kkrimp_dos_example.py @@ -81,7 +81,7 @@ def wait_for_it(calc, maxwait=300): # Finally plot the DOS: # get interpolated DOS from GF_host_doscalc calculation: -from aiida_kkr.tools.common_functions import interpolate_dos +from masci_tools.io.common_functions import interpolate_dos dospath_host = GF_host_doscalc.out.retrieved.get_abs_path('') ef, dos, dos_interpol = interpolate_dos(dospath_host, return_original=True) dos, dos_interpol = dos[0], dos_interpol[0] @@ -109,4 +109,4 @@ def wait_for_it(calc, maxwait=300): ylim(-0.5,8.5) xlabel('E-E_F (eV)') ylabel('DOS (states/eV)') -show() \ No newline at end of file +show() diff --git a/docs/source/index.rst b/docs/source/index.rst index 9408cc65..956b6891 100755 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -11,22 +11,38 @@ .. _Jülich KKRcode: http://www.judft.de/pm/index.php + Welcome to documentation of the AiiDA plugin for the Jülich KKRcode! =========================================================================== -The plugin is available at https://github.com/broeder-j/aiida-kkr +The plugin is available at https://github.com/JuDFTteam/aiida-kkr If you use this plugin for your research, please cite the following work: .. highlights:: Author Name1, Author Name2, *Paper title*, Jornal Name XXX, YYYY (Year). -If you use AiiDA for your research, please cite the following work: +iAlso please cite the original AiiDA paper: .. highlights:: Giovanni Pizzi, Andrea Cepellotti, Riccardo Sabatini, Nicola Marzari, and Boris Kozinsky, *AiiDA: automated interactive infrastructure and database for computational science*, Comp. Mat. Sci 111, 218-230 (2016); http://dx.doi.org/10.1016/j.commatsci.2015.09.013; http://www.aiida.net. +Requirements +------------ + +- Installation of `aiida-core`_ +- Installation of KKR codes (*kkrhost*, *kkrimp*, *voronoi*) of the `JuKKR package`_ +- Installation of `aiida-kkr`_ + +Once all requirements are installed you need to `set up the computers and codes`_ before you can submit KKR calcutions using the *aiida-kkr* plugin. + + +.. _`aiida-core`: https://aiida-core.readthedocs.io/en/stable/installation/index.html +.. _`aiida-kkr`: https://github.com/JuDFTteam/aiida-kkr/blob/master/README.md +.. _`JuKKR package`: https://iffgit.fz-juelich.de/kkr/jukkr +.. _`set up the computers and codes`: https://aiida-core.readthedocs.io/en/stable/get_started/index.html#setup-of-computers-and-codes + User's guide ++++++++++++ diff --git a/docs/source/module_guide/.DS_Store b/docs/source/module_guide/.DS_Store deleted file mode 100644 index 5008ddfc..00000000 Binary files a/docs/source/module_guide/.DS_Store and /dev/null differ diff --git a/docs/source/module_guide/tools.rst b/docs/source/module_guide/tools.rst index 9233eead..a4d5cf2f 100644 --- a/docs/source/module_guide/tools.rst +++ b/docs/source/module_guide/tools.rst @@ -1,13 +1,6 @@ Tools +++++ -Common functions that do not need aiida ---------------------------------------- -.. automodule:: aiida_kkr.tools.common_functions - :members: - :private-members: - :special-members: - Common (work)functions that need aiida -------------------------------------- .. automodule:: aiida_kkr.tools.common_workfunctions @@ -15,27 +8,6 @@ Common (work)functions that need aiida :private-members: :special-members: -KKR parameters class --------------------- -.. automodule:: aiida_kkr.tools.kkr_params - :members: - :private-members: - :special-members: - -KKR parser functions --------------------- -.. automodule:: aiida_kkr.tools.kkrparser_functions - :members: - :private-members: - :special-members: - -Voronoi parser functions ------------------------- -.. automodule:: aiida_kkr.tools.voroparser_functions - :members: - :private-members: - :special-members: - KKRimp tools ------------ .. automodule:: aiida_kkr.tools.tools_kkrimp diff --git a/docs/source/module_guide/workflows.rst b/docs/source/module_guide/workflows.rst index e5eee9e3..194113c4 100644 --- a/docs/source/module_guide/workflows.rst +++ b/docs/source/module_guide/workflows.rst @@ -47,3 +47,25 @@ Find magnetic ground state :members: :private-members: :special-members: + +Find Green Function writeout for KKRimp +--------------------------------------- +.. automodule:: aiida_kkr.workflows.gf_writeout + :members: + :private-members: + :special-members: + +KKRimp self-consistency +----------------------- +.. automodule:: aiida_kkr.workflows.kkr_imp_sub + :members: + :private-members: + :special-members: + +KKRimp complete calculation +--------------------------- +.. automodule:: aiida_kkr.workflows.kkr_imp + :members: + :private-members: + :special-members: + diff --git a/docs/source/user_guide/.DS_Store b/docs/source/user_guide/.DS_Store deleted file mode 100644 index 5008ddfc..00000000 Binary files a/docs/source/user_guide/.DS_Store and /dev/null differ diff --git a/docs/source/user_guide/calculations.rst b/docs/source/user_guide/calculations.rst index adb1bdfb..2c737147 100644 --- a/docs/source/user_guide/calculations.rst +++ b/docs/source/user_guide/calculations.rst @@ -605,7 +605,7 @@ node) and the host GF which contains the DOS contour information (via ``host_Gre Finally we plot the DOS:: # get interpolated DOS from GF_host_doscalc calculation: - from aiida_kkr.tools.common_functions import interpolate_dos + from masci_tools.io.common_functions import interpolate_dos dospath_host = GF_host_doscalc.out.retrieved.get_abs_path('') ef, dos, dos_interpol = interpolate_dos(dospath_host, return_original=True) dos, dos_interpol = dos[0], dos_interpol[0] @@ -1167,7 +1167,7 @@ Download: :download:`this example script <../examples/kkrimp_dos_example.py>` # Finally plot the DOS: # get interpolated DOS from GF_host_doscalc calculation: - from aiida_kkr.tools.common_functions import interpolate_dos + from masci_tools.io.common_functions import interpolate_dos dospath_host = GF_host_doscalc.out.retrieved.get_abs_path('') ef, dos, dos_interpol = interpolate_dos(dospath_host, return_original=True) dos, dos_interpol = dos[0], dos_interpol[0] @@ -1293,7 +1293,7 @@ Download: :download:`this example script <../examples/kkr_bandstruc_example.py>` #plotting of bandstructure and previously calculated DOS data # load DOS data - from aiida_kkr.tools.common_functions import interpolate_dos + from masci_tools.io.common_functions import interpolate_dos dospath_host = host_dos_calc.out.retrieved.get_abs_path('') ef, dos, dos_interpol = interpolate_dos(dospath_host, return_original=True) dos, dos_interpol = dos[0], dos_interpol[0] @@ -1334,4 +1334,4 @@ Download: :download:`this example script <../examples/kkr_bandstruc_example.py>` title('DOS') suptitle(struc.get_formula(), fontsize=16) - show() \ No newline at end of file + show() diff --git a/docs/source/user_guide/workflows.rst b/docs/source/user_guide/workflows.rst index cca45ce6..2e4afd1c 100644 --- a/docs/source/user_guide/workflows.rst +++ b/docs/source/user_guide/workflows.rst @@ -293,7 +293,175 @@ Case 2: Start from structure and run voronoi calculation first run(kkr_scf_wc, structure=Cu, kkr=kkrcode, voronoi=vorocode, calc_parameters=ParameterData(dict=kkr_settings.get_dict())) + +KKR flex (GF calculation) ++++++++++++++++++++++++++ + +The Green's function writeout workflow performs a KKR calculation with runoption +``KKRFLEX`` to write out the ``kkr_flexfiles``. Those are needed for a ``kkrimp`` +calculation. + +Inputs: + * ``kkr`` (*aiida.orm.Code*): KKRcode using the ``kkr.kkr`` plugin + * ``remote_data`` (*RemoteData*): The remote folder of the (converged) kkr calculation + * ``imp_info`` (*ParameterData*): ParameterData node containing the information of the desired impurities (needed to write out the ``kkr_flexfiles`` and the ``scoef`` file) + * ``options_parameters`` (*ParameterData*, optional): Some settings of the workflow behavior (e.g. computer settings) + * ``label`` (*str*, optional): Label of the workflow + * ``description`` (*str*, optional): Longer description of the workflow + +Returns nodes: + * ``calculation_info`` (*ParameterData*): Node containing general information about the workflow (e.g. errors, computer information, ...) + * ``GF_host_remote`` (*RemoteData*): RemoteFolder with all of the ``kkrflexfiles`` and further output of the workflow + + +Example Usage +------------- + +We start by getting an installation of the KKRcode:: + + from aiida.orm import Code + kkrcode = Code.get_from_string('KKRcode@my_mac') + +Next load the remote folder node of the previous calculation +(here the :ref:`converged calculation of the Cu bulk test case `) +from which we want to start the following KKRFLEX calculation:: + + # import old KKR remote folder + from aiida.orm import load_node + kkr_remote_folder = load_node().out.remote_folder + +Afterwards, the information regarding the impurity has to be given +(in this example, we use a Au impurity with a cutoff radius of 2 alat which is placed in the first labelled lattice point of the unit cell). Further keywords for the ``impurity_info`` node can be found in the respective part of the documentation:: + + # set up impurity info node + imps = ParameterData(dict={'ilayer_center':0, 'Rcut':2, 'Zimp':[79.]}) + +Then we set some settings of the options parameters (this step is optional):: + + # create workflow settings + from aiida.orm import DataFactory + ParameterData = DataFactory('parameter') + options = ParameterData(dict={'use_mpi':'false', 'queue_name':'viti_node', 'walltime_sec' : 60*60*2, + 'resources':{'num_machines':1, 'num_mpiprocs_per_machine':1}}) + +Finally we run the workflow:: + + from aiida_kkr.workflows.gf_writeout import kkr_flex_wc + from aiida.work import run + run(kkr_flex_wc, label='test_gf_writeout', description='My test KKRflex calculation.', + kkr=kkrcode, remote_data=kkr_remote_folder, options_parameters=options) + +KKR impurity self consistency ++++++++++++++++++++++++++++++ + +This workflow performs a KKRimp self consistency calculation starting from a +given host-impurity startpotential and converges it. + +.. note:: + This workflow does only work for a non-magnetic calculation without spin-orbit-coupling. Those + two features will be added at a later stage. This is also just a sub workflow, meaning that it only + converges an already given host-impurity potential. The whole kkrimp workflow starting from scratch + will also be added at a later stage. + +Inputs: + * ``kkrimp`` (*aiida.orm.Code*): KKRimpcode using the ``kkr.kkrimp`` plugin + * ``host_imp_startpot`` (*SinglefileData*): File containing the host impurity potential (potential file with the whole cluster with all host and impurity potentials) + * ``GF_remote_data`` (*RemoteData*): Output from a KKRflex calculation (can be extracted from the output of the GF writeout workflow) + * ``structure`` (*StructureData*, optional): Structure of the problem (not yet needed, needed later for the magnetic feature implementation) + * ``options_parameters`` (*ParameterData*, optional): Some general settings for the workflow (e.g. computer settings, queue, ...) + * ``wf_parameters`` (*ParameterData*, optional) : Settings for the behavior of the workflow (e.g. convergence settings, physical properties, ...) + * ``label`` (*str*, optional): Label of the workflow + * ``description`` (*str*, optional): Longer description of the workflow + +Returns nodes: + * ``calculation_info`` (*ParameterData*): Node containing general information about the workflow (e.g. errors, computer information, ...) + * ``host_imp_pot`` (*SinglefileData*): Converged host impurity potential that can be used for further calculations (DOS calc, new input for different KKRimp calculation) + + +Example Usage +------------- + +We start by getting an installation of the KKRimpcode:: + + from aiida.orm import Code + kkrimpcode = Code.get_from_string('KKRimpcode@my_mac') + +Next, either load the remote folder node of the previous calculation +(here the KKRflex calculation that writes out the GF and KKRflexfiles) or the output node +of the gf_writeout workflow from which we want to start the following KKRimp calculation:: + + # import old KKRFLEX remote folder + from aiida.orm import load_node + GF_host_output_folder = load_node().out.remote_folder # 1st possibility + # GF_host_output_folder = load_node() # 2nd possibility: take ``GF_host_remote`` output node from gf_writeout workflow + +Now, load a converged calculation of the host system (here Cu bulk) as well as an auxiliary voronoi calculation +(here Au) for the desired impurity:: + + # load converged KKRcalc + kkrcalc_converged = load_node() + # load auxiliary voronoi calculation + voro_calc_aux = load_node() + +Using those, one can obtain the needed host-impurity potential that is needed as input for the workflow. Therefore, +we use the ``neworder_potential_wf`` workfunction which is able to generate the startpot:: + + ## load the neccessary function + from aiida_kkr.tools.common_workfunctions import neworder_potential_wf + import numpy as np + + # extract the name of the converged host potential + potname_converged = kkrcalc_converged._POTENTIAL + # set the name for the potential of the desired impurity (here Au) + potname_imp = 'potential_imp' + + neworder_pot1 = [int(i) for i in np.loadtxt(GF_host_calc.out.retrieved.get_abs_path('scoef'), skiprows=1)[:,3]-1] + potname_impvorostart = voro_calc_aux._OUT_POTENTIAL_voronoi + replacelist_pot2 = [[0,0]] + + # set up settings node to use as argument for the neworder_potential function + settings_dict = {'pot1': potname_converged, 'out_pot': potname_imp, 'neworder': neworder_pot1, + 'pot2': potname_impvorostart, 'replace_newpos': replacelist_pot2, 'label': 'startpot_KKRimp', + 'description': 'starting potential for Au impurity in bulk Cu'} + settings = ParameterData(dict=settings_dict) + + # finally create the host-impurity potential (here ``startpot_Au_imp_sfd``) using the settings node as well as + the previously loaded converged KKR calculation and auxiliary voronoi calculation: + startpot_Au_imp_sfd = neworder_potential_wf(settings_node=settings, + parent_calc_folder=kkrcalc_converged.out.remote_folder, + parent_calc_folder2=voro_calc_aux.out.remote_folder) + +.. note :: + Further information how the neworder potential function works can be found in the respective part of + this documentation. + + +Afterwards, the information regarding the impurity has to be given +(in this example, we use a Au impurity with a cutoff radius of 2 alat which is placed in the first labelled lattice point of the unit cell). Further +keywords for the ``impurity_info`` node can be found in the respective part of the documentation:: + + # set up impurity info node + imps = ParameterData(dict={'ilayer_center':0, 'Rcut':2, 'Zimp':[79.]}) + +Then, we set some settings of the options parameters on the one hand and specific wf_parameters +regarding the convergence etc.:: + + options = ParameterData(dict={'use_mpi':'false', 'queue_name':'viti_node', 'walltime_sec' : 60*60*2, + 'resources':{'num_machines':1, 'num_mpiprocs_per_machine':20}}) + kkrimp_params = ParameterData(dict={'nsteps': 50, 'convergence_criterion': 1*10**-8, 'strmix': 0.1, + 'threshold_aggressive_mixing': 3*10**-2, 'aggressive_mix': 3, + 'aggrmix': 0.1, 'kkr_runmax': 5}) + +Finally we run the workflow:: + + from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc + from aiida.work import run + run(kkr_imp_sub_wc, label='kkr_imp_sub test (CuAu)', description='test of the kkr_imp_sub workflow for Cu, Au system', + kkrimp=kkrimpcode, options_parameters=options, host_imp_startpot=startpot_Au_imp_sfd, + GF_remote_data=GF_host_output_folder, wf_parameters=kkrimp_params) + + Equation of states ++++++++++++++++++ diff --git a/examples/kkr_plugins_scf_wc_test.ipynb b/examples/kkr_plugins_scf_wc_test.ipynb index 9ab120b0..1de2f708 100644 --- a/examples/kkr_plugins_scf_wc_test.ipynb +++ b/examples/kkr_plugins_scf_wc_test.ipynb @@ -2,20 +2,14 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "# AiiDA-KKR demo" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "Here is a Demo to run the Voronoi code with a follow up KKR calculation with AiiDA \n", "with pure python code. Also add the end we run the same with the kkr_scf workchain" @@ -23,10 +17,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "Some Comments:\n", "\n", @@ -40,11 +31,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "%load_ext autoreload\n", @@ -58,7 +45,7 @@ " load_dbenv()\n", "from aiida.orm import Code, load_node\n", "from aiida.orm import DataFactory, CalculationFactory\n", - "from aiida_kkr.tools.kkrcontrol import write_kkr_inputcard_template, fill_keywords_to_inputcard, create_keyword_default_values\n", + "from aiida_kkr.tools.kkr_params import kkrparams\n", "from pprint import pprint\n", "from scipy import array\n", "from aiida_kkr.calculations.kkr import KkrCalculation\n", @@ -73,14 +60,10 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "# Prepare and AiiDAStructure Data as input, example Cu\n", + "# Prepare and fill AiiDA StructureData as input, example Cu\n", "\n", "alat = 6.830000 # in a_Bohr\n", "abohr = 0.52917721067\n", @@ -93,78 +76,50 @@ "Cu = StructureData(cell=[[a, a, 0.0], [a, 0.0, a], [0.0, a, a]])\n", "Cu.append_atom(position=[0.0, 0.0, 0.0], symbols='Cu')\n", "#Cu.store()\n", - "Cu = load_node(79546)\n", + "Cu = load_node(1)\n", "print(Cu)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Now gernerate a ParameterData node with keyvalues needed by voronoi and KKR \n", "# we use a helper function for some defaults and set some values\n", "\n", - "keywords = create_keyword_default_values()\n", - "keywords['NATYP'][0] = natyp\n", - "keywords['ALATBASIS'][0] = alat\n", - "keywords['NSPIN'][0] = 1\n", - "keywords['LMAX'][0] = 2\n", - "# choose only coarse energy contour and k-mesh for test purposes\n", - "keywords['NPOL'][0] = 4\n", - "keywords['NPT1'][0] = 3\n", - "keywords['NPT2'][0] = 10\n", - "keywords['NPT3'][0] = 3\n", - "keywords['BZKX'][0] = 10\n", - "keywords['BZKY'][0] = 10\n", - "keywords['RCLUSTZ'][0] = 1.50\n", - "keywords['RCLUSTXY'][0] = 1.50\n", - "# for ASA\n", - "keywords['INS'] = [0, '%i']\n", - "keywords['KSHAPE'] = [0, '%i']\n", - "pprint(keywords)" + "keywords = kkrparams(NATYP=natyp, NSPIN=1, LMAX=2, RCLUSTZ=1.5, INS=0)\n", + "pprint(keywords.get_set_values())" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Store the node\n", - "keyw = ParameterData(dict=keywords)\n", + "keyw = ParameterData(dict=keywords.get_dict())\n", "#keyw.store()\n", - "keyw = load_node(79550)\n", + "keyw = load_node(2)\n", "print keyw" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Running a single Vornoi calculation\n", - "voronoi = Code.get_from_string('voro@local_mac')\n", + "voronoi = Code.get_from_string('voronoi@iff003')\n", "calc = VoronoiCalculation()\n", "calc.label = 'Test voronoi'\n", "calc.set_withmpi(False)\n", "calc.set_resources({\"num_machines\" : 1})\n", "calc.set_max_wallclock_seconds(300)\n", - "calc.set_computer('local_mac')\n", + "calc.set_computer(voronoi.get_computer())\n", "calc.use_code(voronoi)\n", "calc.use_structure(Cu)\n", "calc.use_parameters(keyw)" @@ -173,14 +128,10 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "submit_test = True\n", + "submit_test = False\n", "\n", "if submit_test:\n", " subfolder, script_filename = calc.submit_test()\n", @@ -202,26 +153,18 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "!cat submit_test/20171110-00007/_aiidasubmit.sh\n", - "!ls submit_test/20171110-00001/\n", - "! cat submit_test/20171110-00007/inputcard" + "!cat submit_test/20181120-00001/_aiidasubmit.sh\n", + "!ls submit_test/20181120-00001/\n", + "!cat submit_test/20181120-00001/inputcard" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Ontop the voronoi calculation we want to run a KKR calculation\n", @@ -231,11 +174,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# use the calculation run before or load a voronoi calculation\n", @@ -245,11 +184,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# We create a new parameter node in which we store the emin extracted form the voronoi calculation\n", @@ -263,11 +198,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "keyw2 = ParameterData(dict=keywords2)\n", @@ -279,11 +210,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Now we create and run the kkr Calculation\n", @@ -303,11 +230,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "submit_test = False\n", @@ -331,11 +254,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "! cat submit_test/20171110-00020///_aiidasubmit.sh\n", @@ -346,11 +265,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Check with the verdi shell if everything with you calculations went right" @@ -359,22 +274,14 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Voronoi parser test" @@ -383,11 +290,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "n = load_node(79559)\n", @@ -402,22 +305,14 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Test KKR parser functions" @@ -426,11 +321,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "from aiida_kkr.parsers.kkr import KkrParser, parse_kkr_outputfile" @@ -439,11 +330,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "outfile = '/Users/broeder/aiida/github/aiida-kkr/aiida_kkr/tests/files/kkr/outputfiles/out_kkr_Cu'" @@ -452,11 +339,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "res = parse_kkr_outputfile(outfile)" @@ -465,11 +348,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "print res" @@ -478,22 +357,14 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# Test kkr_scf workchain" @@ -502,11 +373,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "from aiida_kkr.workflows.kkr_scf import kkr_scf_wc\n", @@ -516,11 +383,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# use same nodes as above\n", @@ -536,11 +399,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "# The workflow is still very premitive, therefore we use for testing parameters that should work for both codes\n", @@ -550,11 +409,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "print(res)" @@ -563,11 +418,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "res = submit(kkr_scf_wc, structure=Cu, calc_parameters=key2w, voronoi=voronoi, kkr=kkr)# wf_parameters=wf_parameters," @@ -576,11 +427,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [] } @@ -601,7 +448,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", - "version": "2.7.10" + "version": "2.7.15" }, "toc": { "colors": { @@ -659,5 +506,5 @@ } }, "nbformat": 4, - "nbformat_minor": 0 + "nbformat_minor": 1 } diff --git a/setup.json b/setup.json index 439eb7f3..59faef62 100644 --- a/setup.json +++ b/setup.json @@ -14,12 +14,16 @@ "Topic :: Scientific/Engineering :: Physics", "Natural Language :: English" ], - "version": "0.1.2", + "version": "1.0.0", "setup_requires": ["reentry"], "reentry_register": true, "install_requires": [ - "aiida-core < 0.12", - "pytest-cov" + "aiida-core >= 1.0.0a4", + "pgtest", + "pytest-cov >= 2.5.0", + "masci-tools", + "sphinx", + "sphinx_rtd_theme" ], "entry_points": { "aiida.calculations": [ @@ -43,8 +47,11 @@ "kkr.eos = aiida_kkr.workflows.eos:kkr_eos_wc", "kkr.startpot = aiida_kkr.workflows.voro_start:kkr_startpot_wc", "kkr.check_mag = aiida_kkr.workflows.check_magnetic_state:kkr_check_mag_wc", - "kkr.convergence_check = aiida_kkr.workflows.check_para_convergence:kkr_check_para_wc" - ], + "kkr.convergence_check = aiida_kkr.workflows.check_para_convergence:kkr_check_para_wc", + "kkr.gf_writeout = aiida_kkr.workflows.gf_writeout:kkr_flex_wc", + "kkr.imp_sub = aiida_kkr.workflows.kkr_imp_sub:kkr_imp_sub_wc", + "kkr.imp = aiida_kkr.workflows.kkr_imp:kkr_imp_wc" + ], "console_scripts": [ "kkrstructure = aiida_kkr.cmdline.data_cli:cli" ] diff --git a/setup_requirements.txt b/setup_requirements.txt deleted file mode 100644 index 37ea2b4e..00000000 --- a/setup_requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ - - - -# for support of the xml files and parsers -aiida<0.12 - -# for utilities -#ase -#pymatgen - -# for auto tests -pgtest -pytest-cov >= 2.5.0