diff --git a/aiida_kkr/tools/tools_STM_scan.py b/aiida_kkr/tools/tools_STM_scan.py index a38a878a..ccd0f551 100644 --- a/aiida_kkr/tools/tools_STM_scan.py +++ b/aiida_kkr/tools/tools_STM_scan.py @@ -13,7 +13,7 @@ __copyright__ = (u'Copyright (c), 2023, Forschungszentrum Jülich GmbH, ' 'IAS-1/PGI-1, Germany. All rights reserved.') __license__ = 'MIT license, see LICENSE.txt file' -__version__ = '0.1.3' +__version__ = '0.1.4' __contributors__ = (u'Philipp Rüßmann', u'Raffaele Aliberti') ############################################################################## @@ -279,12 +279,15 @@ def STM_pathfinder(host_remote): py_struc = supp_struc.get_pymatgen() struc_dict = py_struc.as_dict() - # Find the Bravais vectors that are in plane vectors + # Find the Bravais vectors that are in-plane vectors (assumes 2D structure) plane_vectors = {'plane_vectors': [], 'space_group': ''} for vec in struc_dict['lattice']['matrix']: # Is this sufficient to find all the in-plane vectors? - if vec[2] == 0: + if vec[2] == 0 or (struc.pbc[2] and (vec[0] + vec[1]) > 0): plane_vectors['plane_vectors'].append(vec[:2]) + # finally check if setting of plane_vectors worked + if 'plane_vectors' not in plane_vectors: + raise ValueError('Could not set "plane_vectors" in STM_pathfinder') # Here we get the symmetry operations that are possible symmetry_matrices = SpacegroupAnalyzer(py_struc).get_point_group_operations(cartesian=True) @@ -493,28 +496,18 @@ def find_linear_combination_coefficients(plane_vectors, vectors): # Formulate the system of equations Ax = b A = np.vstack((plane_vectors[0], plane_vectors[1])).T - - # Solve the system of equations using least squares method - data = [] - for element in vectors: - b = element - # We use the least square mean error procedure to evaulate the units of da and db - # lstsq returns: coeff, residue, rank, and singular value - # We only need the coefficient. - data.append(np.linalg.lstsq(A, b, rcond=None)) - + # inverse of A matrix + Ainv = np.matrix(A)**(-1) indices = [] - for element in data: - supp = [] - for elem in element[0]: - # Here we round to an integer, this is because of the numerical error - # which is present inside the calculation. - supp.append(round(elem)) - indices.append(supp) - - # Before returning the indices, we reorder them first from the lowest to the highest valued - # on the x axis and then from the lowest to the highest on the y axis. - - indices = sorted(indices, key=itemgetter(0, 1)) + # loop over flattened list + for vec in np.array(vectors).reshape(-1, 2): + # get indices from x = A^{-1}.b + index = np.array((Ainv * np.matrix(vec).transpose()).transpose()).reshape(2) + indices.append(index) + # make sure to have integer values + indices = np.round(np.array(indices), 0) + + isort = indices[:, 0] + indices[:, 1] / (10 * max(indices[:, 0])) + indices = indices[isort.argsort()] return indices diff --git a/aiida_kkr/workflows/kkr_STM.py b/aiida_kkr/workflows/kkr_STM.py index 5eab5266..996f0fd9 100644 --- a/aiida_kkr/workflows/kkr_STM.py +++ b/aiida_kkr/workflows/kkr_STM.py @@ -13,7 +13,7 @@ __copyright__ = (u'Copyright (c), 2024, Forschungszentrum Jülich GmbH, ' 'IAS-1/PGI-1, Germany. All rights reserved.') __license__ = 'MIT license, see LICENSE.txt file' -__version__ = '0.1.4' +__version__ = '0.1.5' __contributors__ = (u'Raffaele Aliberti', u'David Antognini Silva', u'Philipp Rüßmann') _VERBOSE_ = False @@ -72,6 +72,18 @@ class kkr_STM_wc(WorkChain): # add defaults of dos_params since they are passed onto that workflow _wf_default['dos_params'] = kkr_imp_dos_wc.get_wf_defaults()['dos_params'] + # return default values (helpful for users) + @classmethod + def get_wf_defaults(cls, silent=False): + """Print and return _wf_default dictionary. + + Can be used to easily create set of wf_parameters. + returns _wf_default, _options_default + """ + if not silent: + print(f'Version of workflow: {cls._wf_version}') + return cls._wf_default + @classmethod def define(cls, spec): """ @@ -118,13 +130,6 @@ def define(cls, spec): required=True, help='Information of the impurity like position in the unit cell, screening cluster, atom type.' ) - spec.input( - 'host_calc', - valid_type=RemoteData, - required=False, - help='The information about the clean host structure is required in order to continue the cluster' - 'Inside a bigger host structure with empty sites.' - ) spec.input( 'host_remote', valid_type=RemoteData, @@ -137,12 +142,6 @@ def define(cls, spec): required=True, help='Impurity potential node', ) - spec.input( - 'remote_data', - valid_type=RemoteData, - required=False, - help='Remote data from a converged kkr calculation, required for the gf writeout step', - ) spec.input( 'kkrflex_files', valid_type=RemoteData, @@ -373,16 +372,16 @@ def impurity_cluster_evaluation(self): impurity_info_aux = impurity_info imp_potential_node_aux = imp_potential_node + # Case in which we don't pass any element to embed in the impurity cluster, it + # uses the impurity files given in the inputs. + if len(coeff) == 0: + return impurity_info, imp_potential_node + for element in coeff: if _VERBOSE_: t0 = time() - # Case in which we don't pass any element to embed in the impurity cluster, it - # uses the impurity files given in the inputs. - if element == []: - return impurity_info, imp_potential_node - # Check if the position is already in the cluster # for this we need to first get the position tmp_pos = self.get_tip_position_dict(element[0], element[1]) diff --git a/aiida_kkr/workflows/kkr_scf.py b/aiida_kkr/workflows/kkr_scf.py index 3c780487..1ff5a150 100644 --- a/aiida_kkr/workflows/kkr_scf.py +++ b/aiida_kkr/workflows/kkr_scf.py @@ -162,7 +162,6 @@ class kkr_scf_wc(WorkChain): _options_default.custom_scheduler_commands = '' # some additional scheduler commands # intended to guide user interactively in setting up a valid wf_params node - @classmethod def get_wf_defaults(cls, silent=False): """Print and return _wf_default dictionary. diff --git a/tests/data_dir/stm.aiida b/tests/data_dir/stm.aiida new file mode 100644 index 00000000..6d4a4939 Binary files /dev/null and b/tests/data_dir/stm.aiida differ diff --git a/tests/workflows/test_stm.py b/tests/workflows/test_stm.py new file mode 100644 index 00000000..29bf842a --- /dev/null +++ b/tests/workflows/test_stm.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +import pytest +from ..dbsetup import * +from aiida.engine import run_get_node +from ..conftest import voronoi_local_code, kkrhost_local_code, test_dir, data_dir, import_with_migration + + +@pytest.mark.timeout(900, method='thread') +def test_stm_wc( + clear_database_before_test, voronoi_local_code, kkrhost_local_code, kkrimp_local_code, enable_archive_cache, + ndarrays_regression +): + """ + STM workflow test for a simple Cu host in host impurity adding a minimal scanning region + """ + from aiida.orm import Code, load_node, Dict, StructureData, load_group + from masci_tools.io.kkr_params import kkrparams + from aiida_kkr.workflows import kkr_STM_wc + from numpy import array + + options = { + 'queue_name': queuename, + 'resources': { + 'num_machines': 1 + }, + 'max_wallclock_seconds': 5 * 60, + 'withmpi': False, + 'custom_scheduler_commands': '' + } + options = Dict(options) + + # import parent calculation (converged host system) + group_pk = import_with_migration('data_dir/kkrimp_full_wc.aiida') + kkr_imp_scf = [n for n in load_group(group_pk).nodes if n.label == 'kkrimp_scf full Cu host_in_host'][0] + + # create process builder to set parameters + builder = kkr_STM_wc.get_builder() + builder.metadata.label = 'stm test' + builder.kkrimp = kkrimp_local_code + builder.voronoi = voronoi_local_code + builder.kkr = kkrhost_local_code + builder.options = options + + builder.host_remote = kkr_imp_scf.inputs.remote_data_host + builder.imp_info = kkr_imp_scf.inputs.impurity_info + builder.imp_potential_node = kkr_imp_scf.outputs.converged_potential + + builder.tip_position = Dict({'ilayer': 0, 'nx': 2, 'ny': 2}) + + builder.wf_parameters = Dict({ + 'jij_run': False, + 'lmdos': True, + 'retrieve_kkrflex': True, + 'dos_params': { + 'nepts': 7, + 'tempr': 200.0, + 'emin': -1.0, + 'emax': 1.0, + 'kmesh': [5, 5, 5] + } + }) + + # now run calculation + with enable_archive_cache(data_dir / 'stm.aiida'): + out, node = run_get_node(builder) + print(out) + print(list(node.called)) + + # check outcome + assert 'STM_dos_data_lmdos' in out + + # check dos data + check_dict = { + 'x': out['STM_dos_data_lmdos'].get_x()[1], + 'y': out['STM_dos_data_lmdos'].get_y()[5][1], + } + print(check_dict) + ndarrays_regression.check(check_dict) + + # print('x', out['STM_dos_data_lmdos'].get_x()[1]) + # print('y', out['STM_dos_data_lmdos'].get_y()[5][1]) diff --git a/tests/workflows/test_stm/test_stm_wc.npz b/tests/workflows/test_stm/test_stm_wc.npz new file mode 100644 index 00000000..24fc1bc9 Binary files /dev/null and b/tests/workflows/test_stm/test_stm_wc.npz differ