From 857abb7058b5ae9bb79f1d37fd1c3045e8703082 Mon Sep 17 00:00:00 2001 From: dantogni Date: Mon, 20 Nov 2023 13:26:58 +0000 Subject: [PATCH] Small fixes --- aiida_kkr/tools/combine_imps.py | 48 +++++++++++++--------------- aiida_kkr/workflows/_combine_imps.py | 4 ++- aiida_kkr/workflows/gf_writeout.py | 2 +- aiida_kkr/workflows/imp_BdG.py | 11 ++++++- aiida_kkr/workflows/kkr_imp_dos.py | 4 +-- aiida_kkr/workflows/kkr_imp_sub.py | 2 +- 6 files changed, 39 insertions(+), 32 deletions(-) diff --git a/aiida_kkr/tools/combine_imps.py b/aiida_kkr/tools/combine_imps.py index 45bba767..1e88b367 100644 --- a/aiida_kkr/tools/combine_imps.py +++ b/aiida_kkr/tools/combine_imps.py @@ -82,31 +82,27 @@ def make_potfile_sfd(**kwargs): for key in kwargs.keys(): retrieved = kwargs[key] - with SandboxFolder() as tempfolder: - # find path of tempfolder - with tempfolder.open('.dummy', 'w') as dummyfile: - tempfolder_path = dummyfile.name - tempfolder_path = tempfolder_path.replace('.dummy', '') - - # extract output potential here - tar_filenames = [] - if KkrimpCalculation._FILENAME_TAR in retrieved.list_object_names(): - # get path of tarfile - with retrieved.open(KkrimpCalculation._FILENAME_TAR) as tf: - tfpath = tf.name - # extract file from tarfile of retrieved to tempfolder - with tarfile.open(tfpath) as tf: - tar_filenames = [ifile.name for ifile in tf.getmembers()] - filename = KkrimpCalculation._OUT_POTENTIAL - if filename in tar_filenames: - tf.extract(filename, tempfolder_path) # extract to tempfolder - - # store as SingleFileData - with tempfolder.open(KkrimpCalculation._OUT_POTENTIAL, 'rb') as potfile: - potfile_sfd = SinglefileData(file=potfile) - - return potfile_sfd - + from aiida.plugins import DataFactory + import tempfile + import os + + # Create a SinglefileData node + SinglefileData = DataFactory('singlefile') + out_potential_content = retrieved.get_object_content("out_potential") + + # Create a temporary file + temp_dir = tempfile.gettempdir() + temp_file = os.path.join(temp_dir, 'out_potential') + with open(temp_file, 'w') as f: + f.write(out_potential_content) + + # Create a SinglefileData node with the temporary file + potfile_sfd = SinglefileData(temp_file) + + # Remove the temporary file + os.remove(temp_file) + + return potfile_sfd def extract_potfile_from_retrieved(retrieved): """ @@ -115,7 +111,7 @@ def extract_potfile_from_retrieved(retrieved): # check if retrieved has already a single file data child with given link label children = [res.node for res in retrieved.get_outgoing(link_label_filter='create_potfile_sfd').all()] - if len(children) > 0: + if len(children) > 0 and 'result' in children[0].outputs: potfile_sfd = children[0].outputs.result print('take existing node') else: diff --git a/aiida_kkr/workflows/_combine_imps.py b/aiida_kkr/workflows/_combine_imps.py index e5b384fd..7531a3a1 100644 --- a/aiida_kkr/workflows/_combine_imps.py +++ b/aiida_kkr/workflows/_combine_imps.py @@ -607,7 +607,7 @@ def run_gf_writeout(self): #take gf_writeout directly from input to KkrimpCalculation gf_writeout_calc = self.ctx.imp1.inputs.host_Greenfunction_folder.get_incoming(node_class=KkrCalculation ).first().node - if (self.ctx.imp1.process_class == kkr_imp_sub_wc or self.ctx.imp1.process_class == KkrimpCalculation): + elif self.ctx.imp1.process_class == kkr_imp_sub_wc: imp1_sub = self.ctx.imp1 else: if _debug: @@ -746,6 +746,8 @@ def run_kkrimp_scf(self): builder.options = self.inputs.scf.options if 'wf_parameters' in self.inputs.scf: builder.wf_parameters = self.inputs.scf.wf_parameters + if 'params_overwrite' in self.inputs.scf : + builder.params_overwrite = self.inputs.scf.params_overwrite # take care of LDA+U settings add_ldausettings, settings_LDAU_combined = self.get_ldau_combined() diff --git a/aiida_kkr/workflows/gf_writeout.py b/aiida_kkr/workflows/gf_writeout.py index f5fc9036..c1b00554 100644 --- a/aiida_kkr/workflows/gf_writeout.py +++ b/aiida_kkr/workflows/gf_writeout.py @@ -89,7 +89,7 @@ def define(cls, spec): super(kkr_flex_wc, cls).define(spec) spec.input('kkr', valid_type=Code, required=False) - spec.input('options', valid_type=Dict, required=False, default=lambda: Dict(dict=cls._options_default)) + spec.input('options', valid_type=Dict, required=False)#, default=lambda: Dict(dict=cls._options_default)) spec.input('wf_parameters', valid_type=Dict, required=False) spec.input('remote_data', valid_type=RemoteData, required=True) spec.input('impurity_info', valid_type=Dict, required=True) diff --git a/aiida_kkr/workflows/imp_BdG.py b/aiida_kkr/workflows/imp_BdG.py index 6f695b94..02afa335 100644 --- a/aiida_kkr/workflows/imp_BdG.py +++ b/aiida_kkr/workflows/imp_BdG.py @@ -155,6 +155,13 @@ def define(cls, spec): required=False, help='Parent folder of previously converged host normal state KkrCalculation' ) + + spec.input( + 'imp_scf.remote_data_gf', + valid_type=RemoteData, + required=False, + help='RemoteData node of precomputed host Green function' + ) # inputs for impurity BdG scf spec.expose_inputs( @@ -278,6 +285,8 @@ def imp_pot_calc(self): builder.kkr = self.inputs.kkr builder.kkrimp = self.inputs.kkrimp builder.remote_data_host = self.inputs.imp_scf.remote_data_host + if 'remote_data_gf' in self.inputs.imp_scf: + builder.remote_data_gf = self.inputs.imp_scf.remote_data_gf builder.wf_parameters = self.inputs.imp_scf.wf_parameters if 'options' in self.inputs.imp_scf: builder.options = self.inputs.imp_scf.options @@ -429,7 +438,7 @@ def DOS_calc(self): if 'kkr' in self.inputs.dos.gf_writeout: builder.kkr = self.inputs.dos.gf_writeout.kkr if 'params_kkr_overwrite' in self.inputs.dos.gf_writeout: - builder.params_kkr_overwrite = self.inputs.dos.gf_writeout.params_kkr_overwrite + builder.gf_writeout.params_kkr_overwrite = self.inputs.dos.gf_writeout.params_kkr_overwrite if 'host_remote' in self.inputs.dos.gf_writeout: builder.host_remote = self.inputs.dos.gf_writeout.host_remote if 'options' in self.inputs.dos.gf_writeout: diff --git a/aiida_kkr/workflows/kkr_imp_dos.py b/aiida_kkr/workflows/kkr_imp_dos.py index 0e72914d..b9bc0a28 100644 --- a/aiida_kkr/workflows/kkr_imp_dos.py +++ b/aiida_kkr/workflows/kkr_imp_dos.py @@ -439,9 +439,9 @@ def run_gfstep(self): builder.params_kkr_overwrite = self.inputs.params_kkr_overwrite if 'gf_writeout' in self.inputs: if 'params_kkr_overwrite' in self.inputs.gf_writeout: - builder.params_kkr_overwrite = self.inputs.params_kkr_overwrite + builder.params_kkr_overwrite = self.inputs.gf_writeout.params_kkr_overwrite if 'options' in self.inputs.gf_writeout: - builder.options = self.inputs.options + builder.options = self.inputs.gf_writeout.options future = self.submit(builder) diff --git a/aiida_kkr/workflows/kkr_imp_sub.py b/aiida_kkr/workflows/kkr_imp_sub.py index 4f7f8a3b..ca767bd2 100644 --- a/aiida_kkr/workflows/kkr_imp_sub.py +++ b/aiida_kkr/workflows/kkr_imp_sub.py @@ -118,7 +118,7 @@ def define(cls, spec): spec.input('remote_data_Efshift', valid_type=RemoteData, required=False) spec.input('kkrimp_remote', valid_type=RemoteData, required=False) spec.input('impurity_info', valid_type=Dict, required=False) - spec.input('options', valid_type=Dict, required=False, default=lambda: Dict(dict=cls._options_default)) + spec.input('options', valid_type=Dict, required=False) #, default=lambda: Dict(dict=cls._options_default)) spec.input('wf_parameters', valid_type=Dict, required=False, default=lambda: Dict(dict=cls._wf_default)) spec.input( 'settings_LDAU', valid_type=Dict, required=False, help='LDA+U settings. See KKRimpCalculation for details.'