diff --git a/.github/workflows/alchemical-model-tests.yml b/.github/workflows/alchemical-model-tests.yml new file mode 100644 index 000000000..3b4d088b1 --- /dev/null +++ b/.github/workflows/alchemical-model-tests.yml @@ -0,0 +1,36 @@ +name: Alchemical Model tests + +on: + push: + branches: [main] + pull_request: + # Check all PR + +jobs: + tests: + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: ubuntu-22.04 + python-version: "3.11" + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - run: pip install tox + + - name: run Alchemical Model tests + run: tox -e alchemical-model-tests + env: + # Use the CPU only version of torch when building/running the code + PIP_EXTRA_INDEX_URL: https://download.pytorch.org/whl/cpu + + - name: Upload codecoverage + uses: codecov/codecov-action@v3 + with: + files: ./tests/coverage.xml diff --git a/README.rst b/README.rst index 3f43f7831..c064281dc 100644 --- a/README.rst +++ b/README.rst @@ -65,6 +65,10 @@ atomistic model. * - SOAP BPNN - A Behler-Parrinello neural network with SOAP features + * - Alchemical Model + - A Behler-Parrinello neural network with SOAP features + and Alchemical Compression of the composition space + .. marker-documentation Documentation diff --git a/docs/src/architectures/alchemical-model.rst b/docs/src/architectures/alchemical-model.rst new file mode 100644 index 000000000..dbbac0761 --- /dev/null +++ b/docs/src/architectures/alchemical-model.rst @@ -0,0 +1,37 @@ +.. _architecture-alchemical-model: + +Alchemical Model +================ + +This is an implementation of Alchemical Model: a Behler-Parrinello neural network +:footcite:p:`behler_generalized_2007` with Smooth overlab of atomic positions (SOAP) +features :footcite:p:`bartok_representing_2013` and Alchemical Compression of the +composition space :footcite:p:`willatt_feature_2018, lopanitsyna_modeling_2023, +mazitov_surface_2024`. This model is extremely useful for simulating systems with +large amount of chemical elements. + + +Installation +------------ + +To install the package, you can run the following command in the root +directory of the repository: + +.. code-block:: bash + + pip install .[alchemical-model] + +This will install the package with the Alchemical Model dependencies. + + +Hyperparameters +--------------- + +The hyperparameters (and relative default values) for the Alchemical Model model are: + +.. literalinclude:: ../../../src/metatensor/models/cli/conf/architecture/experimental.alchemical_model.yaml + :language: yaml + +Any of these hyperparameters can be overridden with the training parameter file. + + diff --git a/docs/src/dev-docs/utils/index.rst b/docs/src/dev-docs/utils/index.rst index fb3c33a5c..3b8390ed5 100644 --- a/docs/src/dev-docs/utils/index.rst +++ b/docs/src/dev-docs/utils/index.rst @@ -12,4 +12,5 @@ This is the API for the ``utils`` module of ``metatensor-models``. model-io omegaconf combine_dataloaders - neighbor_lists + neighbors_lists + normalize diff --git a/docs/src/dev-docs/utils/neighbor_lists.rst b/docs/src/dev-docs/utils/neighbors_lists.rst similarity index 75% rename from docs/src/dev-docs/utils/neighbor_lists.rst rename to docs/src/dev-docs/utils/neighbors_lists.rst index b1b65a9a6..258295d74 100644 --- a/docs/src/dev-docs/utils/neighbor_lists.rst +++ b/docs/src/dev-docs/utils/neighbors_lists.rst @@ -3,7 +3,7 @@ Neighbor lists Utilities to attach neighbor lists to a ``metatensor.torch.atomistic.System`` object. -.. automodule:: metatensor.models.utils.neighbor_list +.. automodule:: metatensor.models.utils.neighbors_lists :members: :undoc-members: :show-inheritance: diff --git a/docs/src/dev-docs/utils/normalize.rst b/docs/src/dev-docs/utils/normalize.rst new file mode 100644 index 000000000..3328960e2 --- /dev/null +++ b/docs/src/dev-docs/utils/normalize.rst @@ -0,0 +1,9 @@ +Normalization +============= + +Utilities to normalize the target values used for training. + +.. automodule:: metatensor.models.utils.normalize + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/static/refs.bib b/docs/static/refs.bib index 958face49..3b74b9dfa 100644 --- a/docs/static/refs.bib +++ b/docs/static/refs.bib @@ -41,3 +41,28 @@ @article{willatt_feature_2018 urldate = {2024-02-19}, langid = {english} } + +@article{mazitov_surface_2024, + author={Mazitov, Arslan and Springer, Maximilian A. and Lopanitsyna, Nataliya and Fraux, Guillaume and De, Sandip and Ceriotti, Michele}, + title={Surface segregation in high-entropy alloys from alchemical machine learning}, + journal={Journal of Physics: Materials}, + url={http://iopscience.iop.org/article/10.1088/2515-7639/ad2983}, + year={2024}, + abstract={High-entropy alloys (HEAs), containing several metallic elements in near-equimolar proportions, have long been of interest for their unique mechanical properties. More recently, they have emerged as a promising platform for the development of novel heterogeneous catalysts, because of the large design space, and the synergistic effects between their components. In this work we use a machine-learning potential that can model simultaneously up to 25 transition metals to study the tendency of different elements to segregate at the surface of a HEA. We use as a starting point a potential that was previously developed using exclusively crystalline bulk phases, and show that, thanks to the physically-inspired functional form of the model, adding a much smaller number of defective configurations makes it capable of describing surface phenomena. We then present several computational studies of surface segregation, including both a simulation of a 25-element alloy, that provides a rough estimate of the relative surface propensity of the various elements, and targeted studies of CoCrFeMnNi and IrFeCoNiCu, which provide further validation of the model, and insights to guide the modeling and design of alloys for heterogeneous catalysis.} +} + +@article{lopanitsyna_modeling_2023, + title = {Modeling high-entropy transition metal alloys with alchemical compression}, + author = {Lopanitsyna, Nataliya and Fraux, Guillaume and Springer, Maximilian A. and De, Sandip and Ceriotti, Michele}, + journal = {Phys. Rev. Mater.}, + volume = {7}, + issue = {4}, + pages = {045802}, + numpages = {15}, + year = {2023}, + month = {Apr}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevMaterials.7.045802}, + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.7.045802} +} + diff --git a/examples/alchemical_model/README.rst b/examples/alchemical_model/README.rst new file mode 100644 index 000000000..9eed9c577 --- /dev/null +++ b/examples/alchemical_model/README.rst @@ -0,0 +1,2 @@ +Basic usage of the Alchemical Model CLI +======================================== diff --git a/examples/alchemical_model/alchemical_reduced_10.xyz b/examples/alchemical_model/alchemical_reduced_10.xyz new file mode 120000 index 000000000..ce6eef30d --- /dev/null +++ b/examples/alchemical_model/alchemical_reduced_10.xyz @@ -0,0 +1 @@ +../../tests/resources/alchemical_reduced_10.xyz \ No newline at end of file diff --git a/examples/alchemical_model/eval.yaml b/examples/alchemical_model/eval.yaml new file mode 100644 index 000000000..48402197f --- /dev/null +++ b/examples/alchemical_model/eval.yaml @@ -0,0 +1,6 @@ +structures: "alchemical_reduced_10.xyz" # file where the positions are stored +targets: + energy: + key: "energy" # name of the target value + forces: false + stress: false diff --git a/examples/alchemical_model/options.yaml b/examples/alchemical_model/options.yaml new file mode 100644 index 000000000..422292d07 --- /dev/null +++ b/examples/alchemical_model/options.yaml @@ -0,0 +1,18 @@ +# architecture used to train the model +architecture: + name: experimental.alchemical_model + training: + num_epochs: 10 + +# Mandatory section defining the parameters for structure and target data of the +# training set +training_set: + structures: "alchemical_reduced_10.xyz" # file where the positions are stored + targets: + energy: + key: "energy" # name of the target value + forces: false + stress: false + +test_set: 0.1 # 10 % of the training_set are randomly split and taken for test set +validation_set: 0.1 # 10 % of the training_set are randomly split and for validation diff --git a/pyproject.toml b/pyproject.toml index 63454400c..5ddd0aa7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,9 @@ build-backend = "setuptools.build_meta" [project.optional-dependencies] soap-bpnn = [] +alchemical-model = [ + "torch_alchemical @ git+https://github.com/abmazitov/torch_alchemical.git@fafb0bd", +] [tool.setuptools.packages.find] where = ["src"] diff --git a/src/metatensor/models/cli/conf/architecture/experimental.alchemical_model.yaml b/src/metatensor/models/cli/conf/architecture/experimental.alchemical_model.yaml new file mode 100644 index 000000000..c0fe79da1 --- /dev/null +++ b/src/metatensor/models/cli/conf/architecture/experimental.alchemical_model.yaml @@ -0,0 +1,23 @@ +# default hyperparameters for the Alchemical Model +name: alchemical_model + +model: + soap: + num_pseudo_species: 4 + cutoff_radius: 5.0 + basis_cutoff: 400 # controls how large the radial-angular basis is + radial_basis_type: 'physical' # 'physical' or 'le' + basis_scale: 3.0 # controls the initial scale of the physical basis (in Angstroms, does not affect the le basis) + trainable_basis: true # whether the radial basis is trainable (i.e. contains a small NN) + + bpnn: + num_hidden_layers: 2 + num_neurons_per_layer: 32 + activation_function: SiLU # only SiLU is supported for the moment + +training: + batch_size: 8 + num_epochs: 100 + learning_rate: 0.001 + log_interval: 10 + checkpoint_interval: 25 diff --git a/src/metatensor/models/cli/eval_model.py b/src/metatensor/models/cli/eval_model.py index 44362a5c7..69af2fdce 100644 --- a/src/metatensor/models/cli/eval_model.py +++ b/src/metatensor/models/cli/eval_model.py @@ -14,7 +14,7 @@ from ..utils.info import finalize_aggregated_info, update_aggregated_info from ..utils.loss import TensorMapDictLoss from ..utils.model_io import load_exported_model -from ..utils.neighbor_list import attach_neighbor_lists +from ..utils.neighbors_lists import get_system_with_neighbors_lists from ..utils.omegaconf import expand_dataset_config from .formatter import CustomHelpFormatter @@ -74,7 +74,7 @@ def _eval_targets(model, dataset: Union[_BaseDataset, torch.utils.data.Subset]) dataset, batch_size=1, collate_fn=collate_fn ) for (structure,), _ in dataloader: - attach_neighbor_lists(structure, requested_neighbor_lists) + get_system_with_neighbors_lists(structure, requested_neighbor_lists) # Extract all the possible outputs and their gradients from the dataset: outputs_dict = get_outputs_dict([dataset]) @@ -165,17 +165,15 @@ def eval_model( logging.basicConfig(level=logging.INFO, format="%(message)s") logger.info("Setting up evaluation set.") - dtype = next(model.parameters()).dtype options = expand_dataset_config(options) eval_structures = read_structures( filename=options["structures"]["read_from"], fileformat=options["structures"]["file_format"], - dtype=dtype, ) # Predict targets if hasattr(options, "targets"): - eval_targets = read_targets(conf=options["targets"], dtype=dtype) + eval_targets = read_targets(options["targets"]) eval_dataset = Dataset(structure=eval_structures, energy=eval_targets["energy"]) _eval_targets(model, eval_dataset) @@ -185,7 +183,9 @@ def eval_model( if not hasattr(options, "targets"): # otherwise, the NLs will have been computed for the RMSE calculations above eval_structures = [ - attach_neighbor_lists(structure, model.requested_neighbors_lists()) + get_system_with_neighbors_lists( + structure, model.requested_neighbors_lists() + ) for structure in eval_structures ] eval_options = ModelEvaluationOptions( diff --git a/src/metatensor/models/experimental/alchemical_model/__init__.py b/src/metatensor/models/experimental/alchemical_model/__init__.py new file mode 100644 index 000000000..ff9a77daf --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/__init__.py @@ -0,0 +1,2 @@ +from .model import Model, DEFAULT_HYPERS # noqa: F401 +from .train import train # noqa: F401 diff --git a/src/metatensor/models/experimental/alchemical_model/model.py b/src/metatensor/models/experimental/alchemical_model/model.py new file mode 100644 index 000000000..1405e4d83 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/model.py @@ -0,0 +1,295 @@ +from typing import Dict, List, Optional, Union + +import metatensor.torch +import numpy as np +import torch +from metatensor.torch import Labels, TensorMap +from metatensor.torch.atomistic import ( + ModelCapabilities, + ModelOutput, + NeighborsListOptions, + System, +) +from omegaconf import OmegaConf +from torch_alchemical.nn import AlchemicalEmbedding, LayerNorm, MultiChannelLinear, SiLU +from torch_alchemical.nn.power_spectrum import PowerSpectrum +from torch_alchemical.operations import sum_over_components +from torch_spex.spherical_expansions import SphericalExpansion + +from ... import ARCHITECTURE_CONFIG_PATH +from ...utils.composition import apply_composition_contribution +from ...utils.normalize import apply_normalization +from .utils import systems_to_torch_spex_dict + + +DEFAULT_HYPERS = OmegaConf.to_container( + OmegaConf.load(ARCHITECTURE_CONFIG_PATH / "experimental.alchemical_model.yaml") +) + +DEFAULT_MODEL_HYPERS = DEFAULT_HYPERS["model"] + +ARCHITECTURE_NAME = "experimental.alchemical_model" + + +class AlchemicalSoapCalculator(torch.nn.Module): + def __init__( + self, + all_species: Union[list, np.ndarray], + cutoff_radius: float, + basis_cutoff: float, + radial_basis_type: str = "le", + basis_scale: float = 3.0, + trainable_basis: bool = True, + basis_normalization_factor: Optional[float] = None, + num_pseudo_species: Optional[int] = None, + ): + super().__init__() + if isinstance(all_species, np.ndarray): + all_species = all_species.tolist() + self.all_species = all_species + self.cutoff_radius = cutoff_radius + self.basis_cutoff = basis_cutoff + self.basis_scale = basis_scale + self.radial_basis_type = radial_basis_type + self.basis_normalization_factor = basis_normalization_factor + self.trainable_basis = trainable_basis + self.num_pseudo_species = num_pseudo_species + hypers = { + "cutoff radius": self.cutoff_radius, + "radial basis": { + "type": self.radial_basis_type, + "E_max": self.basis_cutoff, + "mlp": self.trainable_basis, + "scale": self.basis_scale, + "cost_trade_off": False, + }, + } + if self.num_pseudo_species is not None: + hypers["alchemical"] = self.num_pseudo_species + if self.basis_normalization_factor: + hypers["normalize"] = self.basis_normalization_factor + self.spex_calculator = SphericalExpansion( + hypers=hypers, + all_species=self.all_species, + ) + self.l_max = self.spex_calculator.vector_expansion_calculator.l_max + self.ps_calculator = PowerSpectrum(self.l_max, all_species) + + def forward(self, systems: List[System]): + batch_dict = systems_to_torch_spex_dict(systems) + spex = self.spex_calculator( + positions=batch_dict["positions"], + cells=batch_dict["cells"], + species=batch_dict["species"], + cell_shifts=batch_dict["cell_shifts"], + centers=batch_dict["centers"], + pairs=batch_dict["pairs"], + structure_centers=batch_dict["structure_centers"], + structure_pairs=batch_dict["structure_pairs"], + structure_offsets=batch_dict["structure_offsets"], + ) + power_spectrum = self.ps_calculator(spex) + return power_spectrum + + @property + def num_features(self): + vex_calculator = self.spex_calculator.vector_expansion_calculator + n_max = vex_calculator.radial_basis_calculator.n_max_l + l_max = len(n_max) - 1 + n_feat = sum( + [n_max[l_ch] ** 2 * self.num_pseudo_species**2 for l_ch in range(l_max + 1)] + ) + return n_feat + + +class Model(torch.nn.Module): + def __init__( + self, capabilities: ModelCapabilities, hypers: Dict = DEFAULT_MODEL_HYPERS + ) -> None: + super().__init__() + self.name = ARCHITECTURE_NAME + + # Check capabilities + for output in capabilities.outputs.values(): + if output.quantity != "energy": + raise ValueError( + "Alchemical Model only supports energy-like outputs, " + f"but a {output.quantity} was provided" + ) + if output.per_atom: + raise ValueError( + "Alchemical Model only supports per-structure outputs, " + "but a per-atom output was provided" + ) + + self.capabilities = capabilities + self.all_species = capabilities.species + self.hypers = hypers + self.cutoff_radius = self.hypers["soap"]["cutoff_radius"] + + # creates a composition weight tensor that can be directly indexed by species, + # this can be left as a tensor of zero or set from the outside using + # set_composition_weights (recommended for better accuracy) + n_outputs = len(capabilities.outputs) + self.register_buffer( + "composition_weights", torch.zeros((n_outputs, max(self.all_species) + 1)) + ) + # creates a normalization factor for energies + # this can be left as a tensor of 1.0 or set from the outside using + # set_normalization_factor (recommended for better accuracy) + self.register_buffer( + "normalization_factor", torch.tensor(1.0, dtype=torch.float32) + ) + # buffers cannot be indexed by strings (torchscript), so we create a single + # tensor for all output. Due to this, we need to slice the tensor when we use + # it and use the output name to select the correct slice via a dictionary + self.output_to_index = { + output_name: i for i, output_name in enumerate(capabilities.outputs.keys()) + } + + # TODO Inject basis_normalization_factor and device into the hypers + self.soap_features_layer = AlchemicalSoapCalculator( + all_species=self.all_species, **hypers["soap"] + ) + + self.num_pseudo_species = hypers["soap"]["num_pseudo_species"] + ps_input_size = self.soap_features_layer.num_features + + self.layer_norm = LayerNorm(ps_input_size) + vex_calculator = ( + self.soap_features_layer.spex_calculator.vector_expansion_calculator + ) + contraction_layer = vex_calculator.radial_basis_calculator.combination_matrix + self.embedding = AlchemicalEmbedding( + unique_numbers=self.all_species, + num_pseudo_species=self.num_pseudo_species, + contraction_layer=contraction_layer, + ) + + num_hidden_layers = hypers["bpnn"]["num_hidden_layers"] + num_neurons_per_layer = hypers["bpnn"]["num_neurons_per_layer"] + activation_function = hypers["bpnn"]["activation_function"] + if activation_function == "SiLU": + self.activation_function = SiLU() + else: + raise ValueError( + f"Activation function {activation_function} not supported." + ) + + layer_size = [ps_input_size] + [num_neurons_per_layer] * num_hidden_layers + bpnn_layers = [] + for layer_index in range(1, len(layer_size)): + bpnn_layers.append( + MultiChannelLinear( + in_features=layer_size[layer_index - 1], + out_features=layer_size[layer_index], + num_channels=self.num_pseudo_species, + bias=False, + ) + ) + bpnn_layers.append(self.activation_function) + + n_inputs_last_layer = layer_size[-1] + n_outputs_last_layer = 1 + + self.bpnn = torch.nn.ModuleList(bpnn_layers) + + self.last_layers = torch.nn.ModuleDict( + { + output_name: MultiChannelLinear( + in_features=n_inputs_last_layer, + out_features=n_outputs_last_layer, + num_channels=self.num_pseudo_species, + bias=False, + ) + for output_name in capabilities.outputs.keys() + } + ) + + def requested_neighbors_lists( + self, + ) -> List[NeighborsListOptions]: + return [ + NeighborsListOptions( + model_cutoff=self.cutoff_radius, + full_list=True, + ) + ] + + def forward( + self, + systems: List[System], + outputs: Dict[str, ModelOutput], + selected_atoms: Optional[Labels] = None, + ) -> Dict[str, TensorMap]: + if selected_atoms is not None: + raise NotImplementedError( + "Alchemical Model does not support selected atoms." + ) + + soap_features = self.soap_features_layer(systems) + soap_features = self.layer_norm(soap_features) + hidden_features = self.embedding(soap_features) + + for layer in self.bpnn: + hidden_features = layer(hidden_features) + + atomic_energies: Dict[str, TensorMap] = {} + for output_name, output_layer in self.last_layers.items(): + if output_name in outputs: + normalization_factor = self.normalization_factor * torch.sqrt( + torch.tensor(self.num_pseudo_species) + ) + atomic_energies[output_name] = apply_composition_contribution( + apply_normalization( + sum_over_components(output_layer(hidden_features)), + normalization_factor, + ), + self.composition_weights[self.output_to_index[output_name]], + ) + + total_energies: Dict[str, TensorMap] = {} + for output_name, atomic_energy in atomic_energies.items(): + atomic_energy = atomic_energy.keys_to_samples("species_center") + total_energies_item = metatensor.torch.sum_over_samples( + atomic_energy, ["center", "species_center"] + ) + total_energies[output_name] = total_energies_item + # Change the energy label from _ to (0, 1): + total_energies[output_name] = TensorMap( + keys=Labels( + names=["lambda", "sigma"], + values=torch.tensor( + [[0, 1]], + device=total_energies[output_name].block(0).values.device, + ), + ), + blocks=[total_energies[output_name].block()], + ) + + return total_energies + + def set_composition_weights( + self, output_name: str, input_composition_weights: torch.Tensor + ) -> None: + """Set the composition weights for a given output.""" + # all species that are not present retain their weight of zero + self.composition_weights[self.output_to_index[output_name]][ + self.all_species + ] = input_composition_weights.to( + dtype=self.composition_weights.dtype, # type: ignore + device=self.composition_weights.device, # type: ignore + ) + + def set_normalization_factor(self, normalization_factor: torch.Tensor) -> None: + """Set the normalization factor for output of the model.""" + self.normalization_factor = normalization_factor + + def set_basis_normalization_factor(self, basis_normalization_factor: torch.Tensor): + """Set the normalization factor for the basis functions of the model.""" + self.soap_features_layer.spex_calculator.normalization_factor = ( + 1.0 / torch.sqrt(basis_normalization_factor) + ) + self.soap_features_layer.spex_calculator.normalization_factor_0 = ( + 1.0 / basis_normalization_factor ** (3 / 4) + ) diff --git a/src/metatensor/models/experimental/alchemical_model/tests/__init__.py b/src/metatensor/models/experimental/alchemical_model/tests/__init__.py new file mode 100644 index 000000000..b6aa045b3 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/tests/__init__.py @@ -0,0 +1,6 @@ +from pathlib import Path + +DATASET_PATH = str( + Path(__file__).parent.resolve() + / "../../../../../../tests/resources/qm9_reduced_100.xyz" +) diff --git a/src/metatensor/models/experimental/alchemical_model/tests/test_functionality.py b/src/metatensor/models/experimental/alchemical_model/tests/test_functionality.py new file mode 100644 index 000000000..7ab7d8114 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/tests/test_functionality.py @@ -0,0 +1,49 @@ +import ase +import rascaline.torch +import torch +from metatensor.torch.atomistic import ( + MetatensorAtomisticModel, + ModelCapabilities, + ModelEvaluationOptions, + ModelOutput, +) + +from metatensor.models.experimental.alchemical_model import DEFAULT_HYPERS, Model +from metatensor.models.utils.neighbors_lists import get_system_with_neighbors_lists + + +def test_prediction_subset(): + """Tests that the model can predict on a subset + of the elements it was trained on.""" + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=[1, 6, 7, 8], + outputs={ + "energy": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + + alchemical_model = Model(capabilities, DEFAULT_HYPERS["model"]).to(torch.float64) + structure = ase.Atoms("O2", positions=[[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]]) + system = rascaline.torch.systems_to_torch(structure) + system = get_system_with_neighbors_lists( + system, alchemical_model.requested_neighbors_lists() + ) + + evaluation_options = ModelEvaluationOptions( + length_unit=capabilities.length_unit, + outputs=capabilities.outputs, + ) + + model = MetatensorAtomisticModel( + alchemical_model.eval(), alchemical_model.capabilities + ) + model( + [system], + evaluation_options, + check_consistency=True, + ) diff --git a/src/metatensor/models/experimental/alchemical_model/tests/test_invariance.py b/src/metatensor/models/experimental/alchemical_model/tests/test_invariance.py new file mode 100644 index 000000000..b3e8856b3 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/tests/test_invariance.py @@ -0,0 +1,67 @@ +import copy + +import ase.io +import rascaline.torch +import torch +from metatensor.torch.atomistic import ( + MetatensorAtomisticModel, + ModelCapabilities, + ModelEvaluationOptions, + ModelOutput, +) + +from metatensor.models.experimental.alchemical_model import DEFAULT_HYPERS, Model +from metatensor.models.utils.neighbors_lists import get_system_with_neighbors_lists + +from . import DATASET_PATH + + +def test_rotational_invariance(): + """Tests that the model is rotationally invariant.""" + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=[1, 6, 7, 8], + outputs={ + "energy": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + alchemical_model = Model(capabilities, DEFAULT_HYPERS["model"]).to(torch.float64) + structure = ase.io.read(DATASET_PATH) + original_structure = copy.deepcopy(structure) + structure.rotate(48, "y") + original_system = rascaline.torch.systems_to_torch(original_structure) + original_system = get_system_with_neighbors_lists( + original_system, alchemical_model.requested_neighbors_lists() + ) + system = rascaline.torch.systems_to_torch(structure) + system = get_system_with_neighbors_lists( + system, alchemical_model.requested_neighbors_lists() + ) + + evaluation_options = ModelEvaluationOptions( + length_unit=capabilities.length_unit, + outputs=capabilities.outputs, + ) + + model = MetatensorAtomisticModel( + alchemical_model.eval(), alchemical_model.capabilities + ) + original_output = model( + [original_system], + evaluation_options, + check_consistency=True, + ) + rotated_output = model( + [system], + evaluation_options, + check_consistency=True, + ) + + assert torch.allclose( + original_output["energy"].block().values, + rotated_output["energy"].block().values, + ) diff --git a/src/metatensor/models/experimental/alchemical_model/tests/test_regression.py b/src/metatensor/models/experimental/alchemical_model/tests/test_regression.py new file mode 100644 index 000000000..42fb7700b --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/tests/test_regression.py @@ -0,0 +1,140 @@ +import random + +import ase.io +import numpy as np +import rascaline.torch +import torch +from metatensor.learn.data import Dataset +from metatensor.torch.atomistic import ( + MetatensorAtomisticModel, + ModelCapabilities, + ModelEvaluationOptions, + ModelOutput, +) +from omegaconf import OmegaConf + +from metatensor.models.experimental.alchemical_model import DEFAULT_HYPERS, Model, train +from metatensor.models.utils.data import get_all_species +from metatensor.models.utils.data.readers import read_structures, read_targets +from metatensor.models.utils.neighbors_lists import get_system_with_neighbors_lists + +from . import DATASET_PATH + + +def test_regression_init(): + """Perform a regression test on the model at initialization""" + + # reproducibility + random.seed(0) + np.random.seed(0) + torch.manual_seed(0) + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=[1, 6, 7, 8], + outputs={ + "U0": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + alchemical_model = Model(capabilities, DEFAULT_HYPERS["model"]).to(torch.float64) + + # Predict on the first fivestructures + structures = ase.io.read(DATASET_PATH, ":5") + systems = [rascaline.torch.systems_to_torch(structure) for structure in structures] + systems = [ + get_system_with_neighbors_lists( + system, alchemical_model.requested_neighbors_lists() + ) + for system in systems + ] + + evaluation_options = ModelEvaluationOptions( + length_unit=capabilities.length_unit, + outputs=capabilities.outputs, + ) + + model = MetatensorAtomisticModel( + alchemical_model.eval(), alchemical_model.capabilities + ) + output = model( + systems, + evaluation_options, + check_consistency=True, + ) + + expected_output = torch.tensor( + [[-3.2638e-05], [3.3788e-04], [2.7429e-04], [2.7850e-03], [4.7172e-04]], + dtype=torch.float64, + ) + + assert torch.allclose(output["U0"].block().values, expected_output, rtol=1e-3) + + +def test_regression_train(): + """Perform a regression test on the model when + trained for 2 epoch on a small dataset""" + + # reproducibility + random.seed(0) + np.random.seed(0) + torch.manual_seed(0) + + structures = read_structures(DATASET_PATH) + conf = { + "U0": { + "quantity": "energy", + "read_from": DATASET_PATH, + "file_format": ".xyz", + "key": "U0", + "forces": False, + "stress": False, + "virial": False, + } + } + targets = read_targets(OmegaConf.create(conf)) + dataset = Dataset(structure=structures, U0=targets["U0"]) + + hypers = DEFAULT_HYPERS.copy() + hypers["training"]["num_epochs"] = 2 + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=get_all_species(dataset), + outputs={ + "U0": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + alchemical_model = train( + train_datasets=[dataset], + validation_datasets=[dataset], + requested_capabilities=capabilities, + hypers=hypers, + ) + + # Predict on the first five structures + evaluation_options = ModelEvaluationOptions( + length_unit=alchemical_model.capabilities.length_unit, + outputs=alchemical_model.capabilities.outputs, + ) + + model = MetatensorAtomisticModel( + alchemical_model.eval(), alchemical_model.capabilities + ) + output = model( + structures[:5], + evaluation_options, + check_consistency=True, + ) + + expected_output = torch.tensor( + [[-40.4833], [-56.5604], [-76.4256], [-77.3501], [-93.4282]], + dtype=torch.float64, + ) + + assert torch.allclose(output["U0"].block().values, expected_output, rtol=1e-3) diff --git a/src/metatensor/models/experimental/alchemical_model/tests/test_torchscript.py b/src/metatensor/models/experimental/alchemical_model/tests/test_torchscript.py new file mode 100644 index 000000000..951553964 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/tests/test_torchscript.py @@ -0,0 +1,49 @@ +import torch # noqa: E402 +from metatensor.torch.atomistic import ModelCapabilities, ModelOutput # noqa: E402 + +from metatensor.models.experimental.alchemical_model import ( # noqa: E402 + DEFAULT_HYPERS, + Model, +) + + +def test_torchscript(): + """Tests that the model can be jitted.""" + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=[1, 6, 7, 8], + outputs={ + "energy": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + alchemical_model = Model(capabilities, DEFAULT_HYPERS["model"]).to(torch.float64) + torch.jit.script( + alchemical_model, {"energy": alchemical_model.capabilities.outputs["energy"]} + ) + + +def test_torchscript_save(): + """Tests that the model can be jitted and saved.""" + + capabilities = ModelCapabilities( + length_unit="Angstrom", + species=[1, 6, 7, 8], + outputs={ + "energy": ModelOutput( + quantity="energy", + unit="eV", + ) + }, + ) + alchemical_model = Model(capabilities, DEFAULT_HYPERS["model"]).to(torch.float64) + torch.jit.save( + torch.jit.script( + alchemical_model, + {"energy": alchemical_model.capabilities.outputs["energy"]}, + ), + "alchemical_model.pt", + ) diff --git a/src/metatensor/models/experimental/alchemical_model/train.py b/src/metatensor/models/experimental/alchemical_model/train.py new file mode 100644 index 000000000..b9d0afdbc --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/train.py @@ -0,0 +1,267 @@ +import logging +from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union + +import torch +from metatensor.learn.data import DataLoader +from metatensor.learn.data.dataset import _BaseDataset +from metatensor.torch.atomistic import ModelCapabilities + +from ...utils.composition import calculate_composition_weights +from ...utils.compute_loss import compute_model_loss +from ...utils.data import ( + check_datasets, + collate_fn, + combine_dataloaders, + get_all_targets, +) +from ...utils.extract_targets import get_outputs_dict +from ...utils.info import finalize_aggregated_info, update_aggregated_info +from ...utils.logging import MetricLogger +from ...utils.loss import TensorMapDictLoss +from ...utils.merge_capabilities import merge_capabilities +from ...utils.model_io import load_checkpoint, save_model +from ...utils.neighbors_lists import get_system_with_neighbors_lists +from ...utils.normalize import ( + get_average_number_of_atoms, + get_average_number_of_neighbors, +) +from .model import DEFAULT_HYPERS, Model + + +logger = logging.getLogger(__name__) + + +def train( + train_datasets: List[Union[_BaseDataset, torch.utils.data.Subset]], + validation_datasets: List[Union[_BaseDataset, torch.utils.data.Subset]], + requested_capabilities: ModelCapabilities, + hypers: Dict = DEFAULT_HYPERS, + continue_from: Optional[str] = None, + output_dir: str = ".", + device_str: str = "cpu", +): + if continue_from is None: + model = Model( + capabilities=requested_capabilities, + hypers=hypers["model"], + ) + new_capabilities = requested_capabilities + else: + model = load_checkpoint(continue_from) + filtered_new_dict = {k: v for k, v in hypers["model"].items() if k != "restart"} + filtered_old_dict = {k: v for k, v in model.hypers.items() if k != "restart"} + if filtered_new_dict != filtered_old_dict: + logger.warn( + "The hyperparameters of the model have changed since the last " + "training run. The new hyperparameters will be discarded." + ) + # merge the model's capabilities with the requested capabilities + merged_capabilities, new_capabilities = merge_capabilities( + model.capabilities, requested_capabilities + ) + model.capabilities = merged_capabilities + # make the new model capable of handling the new outputs + for output_name in new_capabilities.outputs.keys(): + model.add_output(output_name) + + model_capabilities = model.capabilities + + # Perform canonical checks on the datasets: + logger.info("Checking datasets for consistency") + check_datasets( + train_datasets, + validation_datasets, + model_capabilities, + ) + + # Calculating the neighbors lists for the training and validation datasets: + logger.info("Calculating neighbors lists for the datasets") + requested_neighbor_lists = model.requested_neighbors_lists() + for dataset in train_datasets + validation_datasets: + for i in range(len(dataset)): + structure = dataset[i].structure + # The following line attached the neighbors lists to the structure, + # and doesn't require to reassign the structure to the dataset: + _ = get_system_with_neighbors_lists(structure, requested_neighbor_lists) + + # Calculate the average number of atoms and neighbors in the training datasets: + average_number_of_atoms = get_average_number_of_atoms(train_datasets) + average_number_of_neighbors = get_average_number_of_neighbors(train_datasets) + + # Given that currently multiple datasets are not supported, we can assume that: + average_number_of_atoms = average_number_of_atoms[0] + average_number_of_neighbors = average_number_of_neighbors[0] + + # Set the normalization factor for the basis functions of the model: + + # Set the normalization factors for the model: + model.set_normalization_factor(average_number_of_atoms) + model.set_basis_normalization_factor(average_number_of_neighbors) + + logger.info(f"Training on device {device_str}") + if device_str == "gpu": + device_str = "cuda" + device = torch.device(device_str) + if device.type == "cuda": + if not torch.cuda.is_available(): + raise ValueError("CUDA is not available on this machine.") + model.to(device) + + # Calculate and set the composition weights for all targets: + for target_name in new_capabilities.outputs.keys(): + # TODO: warn in the documentation that capabilities that are already + # present in the model won't recalculate the composition weights + # find the datasets that contain the target: + train_datasets_with_target = [] + for dataset in train_datasets: + if target_name in get_all_targets(dataset): + train_datasets_with_target.append(dataset) + if len(train_datasets_with_target) == 0: + raise ValueError( + f"Target {target_name} in the model's new capabilities is not " + "present in any of the training datasets." + ) + composition_weights = calculate_composition_weights( + train_datasets_with_target, target_name + ) + model.set_composition_weights(target_name, composition_weights) + + hypers_training = hypers["training"] + + logger.info("Setting up data loaders") + + # Create dataloader for the training datasets: + train_dataloaders = [] + for dataset in train_datasets: + train_dataloaders.append( + DataLoader( + dataset=dataset, + batch_size=hypers_training["batch_size"], + shuffle=True, + collate_fn=collate_fn, + ) + ) + train_dataloader = combine_dataloaders(train_dataloaders, shuffle=True) + + # Create dataloader for the validation datasets: + validation_dataloaders = [] + for dataset in validation_datasets: + validation_dataloaders.append( + DataLoader( + dataset=dataset, + batch_size=hypers_training["batch_size"], + shuffle=False, + collate_fn=collate_fn, + ) + ) + validation_dataloader = combine_dataloaders(validation_dataloaders, shuffle=False) + + # Create dataloader for the validation datasets: + validation_dataloaders = [] + for dataset in validation_datasets: + validation_dataloaders.append( + DataLoader( + dataset=dataset, + batch_size=hypers_training["batch_size"], + shuffle=False, + collate_fn=collate_fn, + ) + ) + validation_dataloader = combine_dataloaders(validation_dataloaders, shuffle=False) + + # Extract all the possible outputs and their gradients from the training set: + outputs_dict = get_outputs_dict(train_datasets) + for output_name in outputs_dict.keys(): + if output_name not in model_capabilities.outputs: + raise ValueError( + f"Output {output_name} is not in the model's capabilities." + ) + + # Create a loss weight dict: + loss_weights_dict = {} + for output_name, value_or_gradient_list in outputs_dict.items(): + loss_weights_dict[output_name] = { + value_or_gradient: 1.0 for value_or_gradient in value_or_gradient_list + } + + # Create a loss function: + loss_fn = TensorMapDictLoss(loss_weights_dict) + + # Create an optimizer: + optimizer = torch.optim.Adam( + model.parameters(), lr=hypers_training["learning_rate"] + ) + + # counters for early stopping: + best_validation_loss = float("inf") + epochs_without_improvement = 0 + + # Train the model: + logger.info("Starting training") + for epoch in range(hypers_training["num_epochs"]): + # aggregated information holders: + aggregated_train_info: Dict[str, Tuple[float, int]] = {} + aggregated_validation_info: Dict[str, Tuple[float, int]] = {} + + train_loss = 0.0 + for batch in train_dataloader: + optimizer.zero_grad() + structures, targets = batch + assert len(structures[0].known_neighbors_lists()) > 0 + loss, info = compute_model_loss(loss_fn, model, structures, targets) + train_loss += loss.item() + loss.backward() + optimizer.step() + aggregated_train_info = update_aggregated_info(aggregated_train_info, info) + finalized_train_info = finalize_aggregated_info(aggregated_train_info) + + validation_loss = 0.0 + for batch in validation_dataloader: + structures, targets = batch + # TODO: specify that the model is not training here to save some autograd + loss, info = compute_model_loss(loss_fn, model, structures, targets) + validation_loss += loss.item() + aggregated_validation_info = update_aggregated_info( + aggregated_validation_info, info + ) + finalized_validation_info = finalize_aggregated_info(aggregated_validation_info) + + # Now we log the information: + if epoch == 0: + metric_logger = MetricLogger( + model_capabilities, + train_loss, + validation_loss, + finalized_train_info, + finalized_validation_info, + ) + if epoch % hypers_training["log_interval"] == 0: + metric_logger.log( + epoch, + train_loss, + validation_loss, + finalized_train_info, + finalized_validation_info, + ) + + if epoch % hypers_training["checkpoint_interval"] == 0: + save_model( + model, + Path(output_dir) / f"model_{epoch}.pt", + ) + + # early stopping criterion: + if validation_loss < best_validation_loss: + best_validation_loss = validation_loss + epochs_without_improvement = 0 + else: + epochs_without_improvement += 1 + if epochs_without_improvement >= 50: + logger.info( + "Early stopping criterion reached after 50 " + "epochs without improvement." + ) + break + + return model diff --git a/src/metatensor/models/experimental/alchemical_model/utils/__init__.py b/src/metatensor/models/experimental/alchemical_model/utils/__init__.py new file mode 100644 index 000000000..a3dc8d7e7 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/utils/__init__.py @@ -0,0 +1,5 @@ +from .systems_to_torch_spex_dict import systems_to_torch_spex_dict + +__all__ = [ + "systems_to_torch_spex_dict", +] diff --git a/src/metatensor/models/experimental/alchemical_model/utils/systems_to_torch_spex_dict.py b/src/metatensor/models/experimental/alchemical_model/utils/systems_to_torch_spex_dict.py new file mode 100644 index 000000000..63e46e3d6 --- /dev/null +++ b/src/metatensor/models/experimental/alchemical_model/utils/systems_to_torch_spex_dict.py @@ -0,0 +1,65 @@ +from typing import List, Optional + +import torch +from metatensor.torch.atomistic import NeighborsListOptions, System + + +def systems_to_torch_spex_dict( + systems: List[System], nl_options: Optional[NeighborsListOptions] = None +): + """ + Convert a list of metatensor.torch.atomistic.Systems to a dictionary of torch + tensors compatible with torch_spex calculators. + """ + device = systems[0].positions.device + positions = torch.cat([item.positions for item in systems]) + cells = torch.stack([item.cell for item in systems]) + species = torch.cat([item.species for item in systems]) + centers = torch.cat([torch.arange(len(item), device=device) for item in systems]) + if nl_options is None: + nl_options = systems[0].known_neighbors_lists()[0] + nls = [item.get_neighbors_list(nl_options) for item in systems] + pairs = torch.cat( + [ + torch.stack( + (item.samples.column("first_atom"), item.samples.column("second_atom")) + ) + for item in nls + ], + dim=1, + ).T + cell_shifts = torch.cat( + [ + torch.stack( + ( + item.samples.column("cell_shift_a"), + item.samples.column("cell_shift_b"), + item.samples.column("cell_shift_c"), + ) + ) + for item in nls + ], + dim=1, + ).T + + lenghts = torch.tensor([len(item) for item in systems], device=device) + nl_lenghts = torch.tensor([len(item.values) for item in nls], device=device) + index = torch.arange(len(systems), device=device) + structure_centers = torch.repeat_interleave(index, lenghts) + structure_pairs = torch.repeat_interleave(index, nl_lenghts) + structure_offsets = torch.cat( + [torch.tensor([0], device=device), torch.cumsum(lenghts[:-1], dim=0)] + ) + + batch_dict = { + "positions": positions, + "cells": cells, + "species": species, + "centers": centers, + "pairs": pairs, + "cell_shifts": cell_shifts, + "structure_centers": structure_centers, + "structure_pairs": structure_pairs, + "structure_offsets": structure_offsets, + } + return batch_dict diff --git a/src/metatensor/models/utils/model_io.py b/src/metatensor/models/utils/model_io.py index 54f32e726..2aa45f170 100644 --- a/src/metatensor/models/utils/model_io.py +++ b/src/metatensor/models/utils/model_io.py @@ -3,9 +3,18 @@ from pathlib import Path from typing import Union +# The followng imports are necessary to avoid C++ related errors +# when loading the model from a checkpoint +import metatensor.torch # noqa: F401 import torch +try: + import sphericart.torch # noqa: F401 +except ImportError: + pass + + def save_model( model: torch.nn.Module, path: Union[str, Path], diff --git a/src/metatensor/models/utils/neighbor_list.py b/src/metatensor/models/utils/neighbors_lists.py similarity index 88% rename from src/metatensor/models/utils/neighbor_list.py rename to src/metatensor/models/utils/neighbors_lists.py index 86ee0b13d..65c671618 100644 --- a/src/metatensor/models/utils/neighbor_list.py +++ b/src/metatensor/models/utils/neighbors_lists.py @@ -10,7 +10,7 @@ ) -def attach_neighbor_lists( +def get_system_with_neighbors_lists( system: System, neighbor_lists: List[NeighborsListOptions] ) -> System: """Attaches neighbor lists to a `System` object. @@ -35,9 +35,12 @@ def attach_neighbor_lists( # Compute the neighbor lists for options in neighbor_lists: - neighbor_list = _compute_single_neighbor_list(atoms, options) - register_autograd_neighbors(system, neighbor_list) - system.add_neighbors_list(options, neighbor_list) + if options not in system.known_neighbors_lists(): + neighbor_list = _compute_single_neighbor_list(atoms, options).to( + device=system.device, dtype=system.dtype + ) + register_autograd_neighbors(system, neighbor_list) + system.add_neighbors_list(options, neighbor_list) return system diff --git a/src/metatensor/models/utils/normalize.py b/src/metatensor/models/utils/normalize.py new file mode 100644 index 000000000..4eb11cbf0 --- /dev/null +++ b/src/metatensor/models/utils/normalize.py @@ -0,0 +1,88 @@ +from typing import List, Union + +import torch +from metatensor.learn.data.dataset import Dataset +from metatensor.torch import TensorBlock, TensorMap + + +def get_average_number_of_atoms( + datasets: List[Union[Dataset, torch.utils.data.Subset]] +): + """Calculates the average number of atoms in a dataset. + + :param datasets: A list of datasets. + + :return: A `torch.Tensor` object with the average number of atoms. + """ + average_number_of_atoms = [] + for dataset in datasets: + num_atoms = [] + for i in range(len(dataset)): + structure = dataset[i].structure + num_atoms.append(len(structure)) + average_number_of_atoms.append( + torch.mean(torch.tensor(num_atoms).to(torch.get_default_dtype())) + ) + return torch.tensor(average_number_of_atoms) + + +def get_average_number_of_neighbors( + datasets: List[Union[Dataset, torch.utils.data.Subset]] +) -> torch.Tensor: + """Calculate the average number of neighbors in a dataset. + + :param datasets: A list of datasets. + + :return: A `torch.Tensor` object with the average number of neighbors. + """ + average_number_of_neighbors = [] + for dataset in datasets: + num_neighbors = [] + for i in range(len(dataset)): + structure = dataset[i].structure + known_neighbors_lists = structure.known_neighbors_lists() + if len(known_neighbors_lists) == 0: + raise ValueError( + f"Structure {structure} does not have a neighbors list" + ) + elif len(known_neighbors_lists) > 1: + raise ValueError( + "More than one neighbors list per structure is not yet supported" + ) + nl = structure.get_neighbors_list(known_neighbors_lists[0]) + num_neighbors.append( + torch.mean( + torch.unique(nl.samples["first_atom"], return_counts=True)[1].to( + torch.get_default_dtype() + ) + ) + ) + average_number_of_neighbors.append(torch.mean(torch.tensor(num_neighbors))) + return torch.tensor(average_number_of_neighbors) + + +def apply_normalization( + atomic_property: TensorMap, normalization: torch.Tensor +) -> TensorMap: + """Applies the normalization to an atomic property by dividing the + atomic property by a normalization factor. + + :param atomic_property: A `TensorMap` with atomic property to be normalized. + :param normalization: A `torch.Tensor` object with the normalization factor. + + :return: A `TensorMap` object with the normalized atomic property. + """ + + new_blocks: List[TensorBlock] = [] + for _, block in atomic_property.items(): + new_values = block.values / normalization + new_blocks.append( + TensorBlock( + values=new_values, + samples=block.samples, + components=block.components, + properties=block.properties, + ) + ) + + return TensorMap(keys=atomic_property.keys, blocks=new_blocks) diff --git a/tests/resources/alchemical_reduced_10.xyz b/tests/resources/alchemical_reduced_10.xyz index b7820449e..e2b26a3da 100644 --- a/tests/resources/alchemical_reduced_10.xyz +++ b/tests/resources/alchemical_reduced_10.xyz @@ -1,205 +1,217 @@ 36 -Lattice="10.684835525 0.0 0.0 0.0 10.684835525 0.0 0.0 0.0 7.123223683" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=1.0625018098857693 fps_idx=-1 name=3709 energy=-274.79272153 stress="0.08617869478775508 -0.004215009701382356 0.001135424132635053 -0.004215009701382356 0.08703453676060532 -5.7091083974454166e-05 0.001135424132635053 -5.7091083974454166e-05 0.08197792821019799" free_energy=-274.80386702 pbc="T T T" -Sc 8.90403000 5.34242000 5.34242000 1.84954700 -0.60305200 0.01523300 -Sc 1.78081000 5.34242000 1.78081000 -0.49242500 -0.74499900 -0.27740800 -Sc 3.56161000 7.12322000 3.56161000 1.15562000 -0.00043800 -0.06187800 -Sc 0.00000000 0.00000000 3.56161000 -0.85511700 0.01901000 -0.67265000 -Sc 8.90403000 1.78081000 5.34242000 -0.27434500 0.20867500 -0.03951900 -Sc 0.00000000 0.00000000 0.00000000 -1.18264700 -0.54330300 0.62220800 -Sc 3.56161000 0.00000000 3.56161000 1.25239000 -1.18703600 -0.40478100 -V 3.56161000 7.12322000 0.00000000 1.31412200 -0.52462800 -0.15144200 -V 8.90403000 5.34242000 1.78081000 1.06924400 -0.60758500 0.26851100 -V 5.34242000 8.90403000 5.34242000 0.22253700 0.72659900 0.34632000 -V 0.00000000 7.12322000 0.00000000 -0.55686700 -0.08970400 0.17212400 -V 7.12322000 0.00000000 0.00000000 0.48191100 -1.30941400 0.48553700 -V 5.34242000 5.34242000 1.78081000 -0.39142100 -0.80700800 -0.70213500 -Cr 7.12322000 0.00000000 3.56161000 0.01381200 -1.05781000 -0.46442800 -Cr 0.00000000 7.12322000 3.56161000 -0.45647900 -0.34033600 -0.09011900 -Cr 5.34242000 8.90403000 1.78081000 0.10052000 0.58999000 -0.24462500 -Cr 0.00000000 3.56161000 3.56161000 -0.23056400 0.13935000 -0.52112400 -Cu 3.56161000 3.56161000 3.56161000 -0.00713800 0.27627400 -0.08896200 -Cu 3.56161000 0.00000000 0.00000000 0.01210400 -0.33924500 0.13192400 -Cu 0.00000000 3.56161000 0.00000000 -0.18990800 0.01517500 0.19734500 -Y 7.12322000 7.12322000 0.00000000 -0.72846500 0.33287000 1.25593600 -Y 1.78081000 8.90403000 5.34242000 0.03014400 -1.22317300 0.72989600 -Y 1.78081000 8.90403000 1.78081000 -0.05851900 -0.36160900 -0.71290700 -Y 1.78081000 1.78081000 5.34242000 0.32779200 1.96897500 0.33679600 -Y 5.34242000 1.78081000 5.34242000 -0.21380300 -0.15575200 0.75537500 -Y 7.12322000 7.12322000 3.56161000 -0.58523000 0.43246900 -1.23375900 -Y 5.34242000 1.78081000 1.78081000 -0.31247600 -0.10155400 -0.65566900 -Nb 7.12322000 3.56161000 0.00000000 0.02211900 1.29882200 0.92695800 -Nb 1.78081000 5.34242000 5.34242000 -0.77269800 0.31461400 0.20306300 -Nb 8.90403000 8.90403000 1.78081000 0.33117200 0.32551100 0.14628400 -Hf 5.34242000 5.34242000 5.34242000 -1.57041900 -1.08461800 0.84930100 -Hf 7.12322000 3.56161000 3.56161000 0.75825900 1.23749500 -0.92562000 -Hf 8.90403000 1.78081000 1.78081000 -0.17586500 0.38581400 0.05539300 -Hf 1.78081000 1.78081000 1.78081000 -0.00032300 1.64333700 -0.13970500 -Hf 8.90403000 8.90403000 5.34242000 0.26984900 0.53720200 -0.20491000 -Pt 3.56161000 3.56161000 0.00000000 -0.15643700 0.62908400 0.09343600 -36 -Lattice="9.635884875 0.0 0.0 0.0 9.635884875 0.0 0.0 0.0 6.42392325" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=0.9434298026410756 fps_idx=-1 name=2996 energy=-286.87287766 stress="-0.09254085228504165 0.0024403551701108433 -0.006314228948708925 0.0024403551701108433 -0.09549506465997734 0.006802726901955303 -0.006314228948708925 0.006802726901955303 -0.09694182775082971" free_energy=-286.87390508 pbc="T T T" -Sc 0.02921000 3.07534000 3.30779000 0.35901200 2.64848100 -1.17640100 -Sc 4.85256000 1.64607000 1.60982000 -2.32323000 -0.19597400 -1.69855600 -Sc 0.08519000 6.27742000 6.41241000 0.13125800 1.30246100 -0.06136400 -Sc 8.04074000 8.08267000 1.57201000 -0.17883000 -0.39156300 -0.89605200 -Sc 4.89356000 4.93023000 4.84477000 -2.50677700 -3.40979100 -0.03795700 -Sc 6.49174000 0.08619000 3.26806000 -0.68763900 -1.05644400 0.01069600 -Sc 1.69208000 8.06353000 4.75564000 1.52226400 0.64117800 -0.79877100 -Sc 7.99604000 1.57451000 1.65486000 -1.07168300 0.59373800 -2.22462900 -Sc 4.83462000 1.44380000 4.84776000 -2.73325400 1.13088300 1.00102800 -Sc 9.53357000 0.07041000 3.12542000 1.38976400 -1.61965900 -0.06207900 -Sc 3.12921000 6.48182000 0.02447000 -1.36169400 -0.53665100 -0.03391100 -Sc 1.57723000 4.85359000 1.60382000 2.08439900 -0.59142800 0.60903000 -Co 3.17493000 6.45886000 3.26211000 0.18155600 -0.32115400 -0.21299300 -Co 1.51310000 5.00263000 4.75398000 0.82406900 -0.77435300 -0.27676800 -Co 6.47202000 3.22036000 0.02547000 0.03286900 0.27779900 0.14938100 -Co 3.21609000 3.38336000 6.36089000 -0.19411100 -0.05843600 0.37146400 -Co 3.25525000 0.11780000 3.09212000 -0.47316600 -0.22661100 -0.18194700 -Co 3.23903000 9.50952000 0.20074000 -0.09490600 -0.11672600 -0.35531800 -Co 1.68911000 7.99308000 1.49061000 -0.21061600 0.11067300 0.39802700 -Zr 0.09797000 0.15428000 6.38258000 0.19629500 -2.62187900 0.81038500 -Zr 1.42935000 1.64884000 4.78282000 5.08297200 -0.42159700 -0.67851500 -Zr 4.87702000 7.85368000 4.88279000 -2.33524600 2.82148000 -1.68347500 -Zr 0.05230000 3.19811000 6.40585000 -0.10496600 2.39935600 0.16638200 -Zr 8.19241000 4.81908000 4.73051000 -1.22868000 -1.08007200 0.97149900 -Zr 4.89754000 4.94110000 1.69964000 -2.24906500 -2.61724200 -0.33636600 -Zr 6.44153000 3.06623000 3.13056000 0.30210300 1.80915000 0.44235900 -Zr 0.04720000 6.37272000 3.11469000 1.26009500 0.82548700 -0.07187300 -Zr 1.55134000 1.72151000 1.48906000 4.53941500 -1.14875400 1.00802700 -Zr 6.33364000 6.39266000 6.39151000 2.11250700 -0.33827800 1.11585600 -Zr 4.80528000 7.88029000 1.66955000 -2.10128600 2.79408900 1.04210700 -Zr 8.04114000 1.70927000 4.91119000 -2.67768800 -0.32941700 1.54041300 -Zr 7.96040000 8.02735000 4.86003000 0.43958700 0.76381200 0.29915000 -Zr 6.53648000 6.25756000 3.28220000 1.16587500 0.36679100 -1.06470800 -W 6.25718000 9.54549000 6.42115000 1.32460200 -0.11885800 0.59104000 -W 3.08815000 3.30210000 3.20604000 -0.19788700 0.39967900 1.14255700 -W 7.98093000 4.72271000 1.48848000 -0.21792000 -0.91017100 0.18228000 +Lattice="10.17896035 0.0 0.0 0.0 10.17896035 0.0 0.0 0.0 6.785973567" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=1.0936659901056818 fps_idx=-1 name=654 energy=-295.5983349 stress="0.23889555934780998 -0.00207471508249835 0.006845643518704877 -0.00207471508249835 0.2384006576062004 0.0015937943213307935 0.006845643518704877 0.0015937943213307935 0.23851844112491494" free_energy=-295.63442729 pbc="T T T" +Ru 8.48247000 1.69649000 5.08948000 -0.12982700 0.36181500 0.24203200 +Ru 3.39299000 6.78597000 3.39299000 -0.38140600 0.11691800 -0.11230400 +Ru 8.48247000 8.48247000 5.08948000 -0.13502500 0.17453200 0.10926100 +Ru 1.69649000 1.69649000 5.08948000 0.38167900 0.10245300 -0.17445700 +Ru 1.69649000 8.48247000 5.08948000 0.57463300 0.09994000 0.02340500 +Ru 8.48247000 5.08948000 5.08948000 -0.04747600 -0.50482100 -0.10396300 +Ru 5.08948000 5.08948000 5.08948000 -0.12825700 -0.18877800 0.12089700 +Ru 0.00000000 3.39299000 0.00000000 -0.17462300 -0.43186100 -0.22794400 +Ru 1.69649000 5.08948000 5.08948000 0.14677400 -0.20374700 -0.33646500 +Ru 1.69649000 1.69649000 1.69649000 -0.01867200 0.09525000 0.29862700 +Ru 5.08948000 1.69649000 1.69649000 0.17631100 -0.02017200 0.17527900 +Ru 0.00000000 6.78597000 3.39299000 -0.03016500 -0.46664600 0.00804200 +Ru 6.78597000 0.00000000 3.39299000 -0.32422200 0.05867500 0.06183000 +Ag 6.78597000 6.78597000 3.39299000 -0.25876600 -0.06087900 0.32704800 +Ag 0.00000000 0.00000000 3.39299000 0.16669900 0.14996000 0.13023000 +Ag 0.00000000 6.78597000 0.00000000 0.44722300 -0.04459100 -0.21752100 +Ag 3.39299000 3.39299000 0.00000000 -0.19921000 0.16848600 0.19999100 +Hf 8.48247000 5.08948000 1.69649000 -0.12812600 -1.30571600 0.08735800 +Hf 3.39299000 0.00000000 0.00000000 -1.00006500 -1.79502600 0.98678900 +Hf 5.08948000 1.69649000 5.08948000 1.45855000 0.79074400 -1.38070300 +Hf 8.48247000 8.48247000 1.69649000 -0.34849900 1.03554700 -0.50399400 +Ir 6.78597000 6.78597000 0.00000000 0.07768500 0.16412800 0.18465300 +Ir 3.39299000 6.78597000 0.00000000 0.28476700 0.14304000 0.04170600 +Ir 8.48247000 1.69649000 1.69649000 -0.32584700 0.18326200 -0.15463400 +Ir 5.08948000 5.08948000 1.69649000 -0.03131100 -0.16695400 -0.07494500 +Ir 6.78597000 3.39299000 3.39299000 0.06062700 -0.13454200 0.13316800 +Ir 6.78597000 0.00000000 0.00000000 0.07864400 0.17272300 -0.15694000 +Ir 0.00000000 0.00000000 0.00000000 -0.42713800 0.36124800 -0.14929400 +Ir 1.69649000 8.48247000 1.69649000 0.37986100 0.01566900 -0.17221400 +Ir 1.69649000 5.08948000 1.69649000 -0.07261900 0.07827600 0.17032800 +Ir 3.39299000 3.39299000 3.39299000 -0.08279200 0.14605800 0.16004900 +Ir 5.08948000 8.48247000 1.69649000 -0.21679000 0.27509300 -0.05191800 +Ir 6.78597000 3.39299000 0.00000000 0.33245300 -0.14503600 -0.11472300 +Ir 0.00000000 3.39299000 3.39299000 -0.08648300 0.51844200 -0.08246900 +Ir 3.39299000 0.00000000 3.39299000 0.35128200 -0.24542700 0.38977100 +Ir 5.08948000 8.48247000 5.08948000 -0.36987100 0.50193500 0.16402600 36 -Lattice="9.770912168 0.0 0.0 0.0 9.770912168 0.0 0.0 0.0 6.513941446" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=1.0247182617379567 fps_idx=-1 name=712 energy=-308.69694165 stress="0.12166491975314685 0.0068899332674621444 0.002342869039073673 0.0068899332674621444 0.1225653697917198 -0.010675776801348767 0.002342869039073673 -0.010675776801348767 0.12271392395042494" free_energy=-308.70381995 pbc="T T T" -Ti 6.51394000 0.00000000 3.25697000 0.09275800 0.84328500 -0.66454200 -Ti 4.88546000 1.62849000 4.88546000 -0.52071500 0.36620700 -0.52906100 -Ti 1.62849000 1.62849000 1.62849000 0.66985700 -0.47792200 -0.32320600 -Ti 0.00000000 0.00000000 3.25697000 0.16179500 -0.40246200 0.04161600 -Ti 4.88546000 4.88546000 1.62849000 -1.01655000 0.69151000 1.14579700 -Ti 0.00000000 3.25697000 3.25697000 0.35292800 0.35056800 0.45860700 -Ti 3.25697000 0.00000000 3.25697000 -0.45219000 -0.50469500 -1.15525000 -Fe 8.14243000 8.14243000 4.88546000 -0.80646300 0.27182000 -0.39593000 -Fe 6.51394000 3.25697000 3.25697000 0.11288900 -0.22448500 0.65072700 -Fe 4.88546000 4.88546000 4.88546000 -0.37082800 0.25717000 -0.02599500 -Fe 4.88546000 8.14243000 1.62849000 0.36224300 -0.09630100 -0.37213600 -Fe 3.25697000 6.51394000 3.25697000 -0.51768100 -0.22416700 -0.17063000 -Fe 8.14243000 1.62849000 1.62849000 -0.22613300 -0.78889400 0.59572600 -Fe 0.00000000 6.51394000 3.25697000 0.26811400 -0.07800500 0.64069500 -Fe 1.62849000 4.88546000 1.62849000 1.31014300 -0.09394800 0.38258100 -Fe 1.62849000 4.88546000 4.88546000 0.92867200 -0.00729100 -0.49336400 -Fe 4.88546000 1.62849000 1.62849000 -0.65799200 -0.18760500 -0.06113800 -Fe 0.00000000 0.00000000 0.00000000 0.48076800 0.59355400 -0.80716200 -Fe 1.62849000 8.14243000 1.62849000 0.85765700 0.14208300 0.18653800 -Mo 8.14243000 4.88546000 4.88546000 -1.03385400 1.01722400 -2.24430400 -Mo 3.25697000 6.51394000 0.00000000 -1.16089800 -0.41476300 0.43885600 -Mo 6.51394000 6.51394000 0.00000000 -0.72181900 -0.33830700 -0.92411800 -Mo 3.25697000 3.25697000 3.25697000 -0.07263100 0.65746300 -0.05804900 -Mo 6.51394000 6.51394000 3.25697000 -0.48713700 -0.59295300 0.91162500 -Mo 1.62849000 8.14243000 4.88546000 0.54132200 -0.22979500 0.00456800 -Mo 3.25697000 3.25697000 0.00000000 -0.17924100 0.40017100 -0.06557500 -Mo 3.25697000 0.00000000 0.00000000 -0.61518100 -0.10949000 0.87985500 -Lu 4.88546000 8.14243000 4.88546000 -0.59112800 -1.31184500 -0.33813800 -Lu 0.00000000 3.25697000 0.00000000 0.96312300 0.20013900 0.14848500 -Lu 8.14243000 4.88546000 1.62849000 -0.76560300 0.58052700 2.36557600 -Lu 0.00000000 6.51394000 0.00000000 2.05860500 0.65531400 -1.07496700 -Lu 8.14243000 8.14243000 1.62849000 0.71240300 0.19100800 0.68231900 -Lu 6.51394000 3.25697000 0.00000000 -1.23626000 -0.65941500 0.36476200 -Ta 8.14243000 1.62849000 4.88546000 -0.02728300 -0.97718700 -1.09596200 -Ta 1.62849000 1.62849000 4.88546000 1.23017800 -0.48478000 -0.00933500 -Ta 6.51394000 0.00000000 0.00000000 0.35613300 0.98626600 0.91052900 +Lattice="8.470538482 0.0 0.0 0.0 8.470538482 0.0 0.0 0.0 5.647025655" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=0.9140462950723837 fps_idx=-1 name=4599 energy=-232.43740238 stress="-0.5551028322682163 0.010617643385350291 -0.010828831088133675 0.010617643385350291 -0.553487985336604 0.009739993579614217 -0.010828831088133675 0.009739993579614217 -0.5563721180470742" free_energy=-232.44483553 pbc="T T T" +Sc 7.06356000 4.20968000 1.36018000 -0.71162000 -1.09016300 2.76741400 +V 0.01672000 8.42286000 2.84840000 -0.09835200 0.30749700 -1.52562900 +V 5.63776000 5.81834000 2.96876000 0.85988500 -5.65072700 -2.17141700 +V 8.44912000 2.70401000 2.82385000 1.36272600 2.96979900 1.29613600 +V 7.09000000 4.17781000 4.21865000 0.96721700 0.46987100 -1.77875000 +Fe 0.08512000 2.91440000 5.59498000 -2.11761200 -3.25009100 0.34717100 +Fe 8.43908000 5.56381000 2.84604000 -0.63359400 -0.96347200 0.92870800 +Fe 4.35253000 4.25208000 1.54113000 -1.23466800 1.91980300 -0.22488900 +Fe 2.84223000 5.58915000 2.84549000 -0.00008000 0.80950600 -0.96728700 +Fe 2.81595000 8.39327000 2.81460000 -0.31152100 0.68495800 0.94247300 +Zn 4.27857000 1.42065000 4.26019000 -1.52016300 -0.18062800 -0.25500100 +Zn 2.98135000 0.01215000 5.57873000 -2.80748500 -1.99925900 -0.90651300 +Zn 2.78437000 5.63597000 0.12477000 1.55460000 1.11119400 -0.50873900 +Zn 1.38303000 1.38437000 4.24612000 0.50826200 -0.70991600 1.12463600 +Zn 1.37913000 7.13745000 4.05508000 1.33349500 -1.79565400 2.21312200 +Zn 8.39640000 0.15374000 0.04578000 1.16709300 -1.46426600 -1.80520900 +Rh 5.71683000 2.86513000 2.88028000 -3.59406400 2.94564700 -1.09423700 +Rh 4.36513000 7.05475000 4.11228000 -8.03178500 -2.88356200 2.13389500 +Rh 7.11786000 7.21933000 4.12523000 3.44166300 -3.13284900 3.22760400 +Rh 7.04075000 1.49046000 4.11244000 5.49776600 -2.20632800 3.57446100 +Rh 5.60416000 0.03868000 2.96491000 0.21923900 4.04123400 -4.95542500 +Rh 4.31374000 7.07469000 1.50923000 -4.07193500 -1.42833600 -0.93999200 +Rh 5.68337000 2.79417000 5.60472000 0.27032700 1.43269600 1.27176200 +Pd 2.87215000 2.82752000 0.00653000 -0.66375500 -1.69798100 0.19426000 +Pd 1.24012000 6.94337000 1.34469000 3.37882000 2.82304000 0.68041400 +Pd 4.23260000 4.23165000 4.39253000 -1.48400300 2.67374900 -3.32611100 +Pd 5.58330000 0.01089000 5.54795000 2.73698500 0.60196700 1.40577200 +Pd 1.34732000 4.21178000 1.41646000 2.23652700 -0.42942200 3.00877900 +Pd 4.18748000 1.33186000 1.29788000 0.52781000 -0.46777600 3.51233100 +Hf 6.98023000 6.92740000 1.39853000 2.42551700 5.21544500 2.35890800 +Hf 6.99595000 1.50886000 1.45391000 0.34210500 -2.77586800 -2.48987000 +Hf 8.43312000 5.45272000 0.03649000 0.20167500 3.87916900 -4.45817700 +Hf 5.57737000 5.67304000 5.60593000 -0.98899200 -1.42553300 0.79527200 +Hf 2.92739000 2.77952000 2.75245000 0.67670700 2.00634900 0.60049400 +Pt 1.42862000 1.36752000 1.45982000 -1.50382300 -1.85238900 -0.84318000 +Pt 1.46312000 4.21481000 4.30096000 0.06503600 1.51229500 -4.13318500 +48 +Lattice="8.182103461 0.0 0.0 0.0 8.182103461 0.0 0.0 0.0 12.273155192" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=0.9498863707029109 fps_idx=-1 name=2510 energy=-352.70544158 stress="-0.11205496801617043 -0.003212348709363966 0.007141647088999891 -0.003212348709363966 -0.12916693285454786 -0.011578623579426031 0.007141647088999891 -0.011578623579426031 -0.12328456640527706" free_energy=-352.70655496 pbc="T T T" +Ti 6.07931000 6.15929000 4.03516000 0.86877500 -1.56169600 -0.81191300 +Ti 0.00293000 8.11661000 8.18260000 -1.33223800 -0.88222000 1.77454300 +Ti 8.17795000 3.97600000 8.22427000 -0.53979700 0.84738500 -1.10285100 +Ti 0.06540000 4.15437000 4.14571000 -0.90895500 -1.25799200 -1.40517100 +Ti 3.99524000 4.10160000 4.10883000 0.97886800 -1.80310400 0.12705100 +Ti 4.17200000 2.00901000 2.11964000 1.11428500 1.52070300 1.31999000 +Fe 6.16487000 4.08145000 2.02554000 0.78506700 0.00049900 0.64427700 +Fe 6.26746000 0.01908000 10.32914000 -0.10338800 0.10852600 -0.83000900 +Fe 0.01862000 4.13851000 12.18130000 -0.56093100 -0.53258400 0.82349200 +Fe 2.07800000 2.11482000 4.18724000 0.51563900 0.37520800 0.19093300 +Fe 1.94952000 4.20131000 5.90533000 0.12927500 -0.68762800 0.34013600 +Fe 2.02241000 6.14658000 8.25776000 -0.42131700 -0.28048200 -0.65316500 +Fe 6.10795000 2.10450000 8.20943000 0.43636800 -0.16403800 -0.00424100 +Fe 6.08072000 6.08797000 8.14604000 -0.06797500 0.11946000 0.61889500 +Fe 6.25459000 1.99802000 0.04060000 0.04963300 0.55046100 -0.10681700 +Fe 6.16302000 2.07005000 4.08680000 -0.60655100 0.53305800 -0.31721600 +Fe 0.01753000 6.06919000 2.03629000 -0.60322400 -0.65250700 0.14070700 +Y 4.12581000 0.10505000 0.09457000 0.80491400 -0.22312000 -0.38651500 +Y 1.97156000 0.05513000 2.18999000 0.18540500 -0.00286300 0.06330500 +Y 6.13081000 6.17762000 12.19179000 1.11087400 -0.37505900 0.72909900 +Y 1.98358000 6.03523000 4.16697000 -0.58503100 0.15197000 -0.52383800 +Y 0.00181000 1.97054000 6.08471000 -0.16822600 2.06123900 -0.47612300 +Y 8.11645000 6.07315000 6.21688000 0.75253700 -0.62994800 1.68300500 +Y 4.21643000 4.04821000 0.10661000 1.79894400 -1.16574700 0.20136000 +Y 2.05583000 0.05831000 10.25821000 -0.96296700 -0.95357500 -1.11133600 +Y 6.21120000 8.15295000 6.17183000 -1.71006100 0.54362400 0.62271600 +Y 8.18034000 6.01042000 10.24525000 -0.82788100 0.54755700 -1.75278900 +Y 4.08030000 4.04863000 8.25740000 0.23046200 0.36816300 -2.14277800 +Y 0.14204000 0.03016000 0.00589000 -1.68623800 -0.63646700 -0.31242200 +Y 4.11090000 1.97359000 10.33084000 2.67378300 -0.54574300 -0.75713700 +Y 6.07663000 8.12095000 2.03710000 1.73136400 0.59029600 0.62307300 +Y 1.98000000 1.94504000 8.14578000 -0.37478700 1.88758200 -0.01435500 +Y 4.05783000 6.05414000 2.07640000 1.11750500 -0.80977800 0.40469400 +Y 2.10470000 4.13135000 1.92365000 -2.93525800 -1.29050100 1.42612600 +Y 8.07673000 1.89777000 2.04405000 -1.30710000 4.02333600 0.74248800 +Zr 6.16898000 4.19227000 6.07591000 -0.26121400 -1.33786700 0.16652700 +Zr 4.05257000 6.08972000 6.08519000 -0.16383100 -0.66216100 0.95210400 +Zr 4.06046000 2.10598000 6.07524000 0.20631100 0.59743600 -1.60142200 +Zr 2.14282000 4.23976000 10.19569000 -1.67981400 -1.10494700 -0.14605900 +Zr 6.06226000 4.06691000 10.33828000 1.80157600 -1.30937400 -0.91261900 +Zr 4.00108000 8.16450000 8.17074000 2.86559000 -0.83650700 0.47508000 +Zr 2.12539000 6.09336000 12.23264000 -1.16917500 0.23664600 0.64279600 +Zr 4.09772000 8.10282000 3.93642000 0.20825100 1.96857300 2.32380800 +Zr 2.14156000 2.10078000 0.02299000 -1.43235800 1.14352300 -0.20156600 +Zr 4.05712000 6.16297000 10.10970000 1.53670700 1.89256300 -0.53250200 +Zr 8.04705000 2.07642000 10.17236000 -1.40950600 0.17752400 0.86417600 +Zr 0.04437000 8.17222000 4.16041000 -0.41756300 -0.22497900 -1.08483300 +Zr 2.05366000 8.10143000 6.27713000 0.33325000 -0.31444200 -0.71270200 36 -Lattice="8.782773097 0.0 0.0 0.0 8.782773097 0.0 0.0 0.0 5.855182065" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=0.9496633517239711 fps_idx=-1 name=457 energy=-298.46974511 stress="-0.2768875068083879 -0.005414652721413622 0.0029666517026235716 -0.005414652721413622 -0.2674046134726871 -0.015330326058394074 0.0029666517026235716 -0.015330326058394074 -0.2529102376975591" free_energy=-298.46273679 pbc="T T T" -Co 5.85518000 5.85518000 2.92759000 0.63243700 -0.01359400 -0.34872300 -Co 1.46380000 4.39139000 4.39139000 -0.70372600 -0.50039500 -0.26274500 -Co 1.46380000 7.31898000 1.46380000 -0.32713300 0.16614900 0.01649300 -Ni 1.46380000 7.31898000 4.39139000 -0.93799400 -0.20898300 -0.38204500 -Ni 0.00000000 2.92759000 2.92759000 -0.01338100 0.05091700 0.60243400 -Ni 4.39139000 4.39139000 1.46380000 1.02426200 0.74788700 -0.39609100 -Ni 0.00000000 2.92759000 0.00000000 0.34718200 -0.26807600 -0.82662200 -Ni 5.85518000 0.00000000 0.00000000 0.97699000 0.02603000 -0.03345200 -Ni 7.31898000 1.46380000 4.39139000 0.23885200 0.34220500 0.59843600 -Cu 0.00000000 5.85518000 2.92759000 1.02987700 0.77767300 -0.19765400 -Cu 7.31898000 7.31898000 1.46380000 -1.12528500 -0.73536500 0.11870100 -Cu 5.85518000 5.85518000 0.00000000 -0.08526800 -0.05343000 1.75210800 -Rh 0.00000000 0.00000000 2.92759000 0.47939100 -1.06462900 0.15976000 -Rh 2.92759000 0.00000000 2.92759000 -1.92185100 -1.35850200 -0.10284100 -Rh 1.46380000 1.46380000 1.46380000 -1.91930200 0.68628700 -0.08622700 -Rh 4.39139000 7.31898000 1.46380000 1.33896700 -0.56895600 -0.24677600 -Rh 5.85518000 2.92759000 0.00000000 0.50824900 -0.57313600 0.32699200 -Rh 1.46380000 1.46380000 4.39139000 -1.43135400 0.99008300 -0.25834800 -Hf 7.31898000 1.46380000 1.46380000 1.03622600 1.88803900 0.51361700 -Hf 7.31898000 7.31898000 4.39139000 -1.76365700 0.34708300 -2.63827200 -Hf 4.39139000 7.31898000 4.39139000 1.25548700 0.78055100 1.26839700 -Hf 0.00000000 5.85518000 0.00000000 3.47981700 0.39839700 1.69715900 -Hf 2.92759000 2.92759000 2.92759000 -2.10900700 0.94306400 1.44668100 -Hf 7.31898000 4.39139000 4.39139000 -0.31173700 -2.52985900 -1.43397400 -Hf 2.92759000 5.85518000 2.92759000 -2.05541800 0.18904700 -1.30321900 -Hf 0.00000000 0.00000000 0.00000000 3.10616500 0.48842800 -0.11358200 -Hf 4.39139000 1.46380000 1.46380000 0.55084000 -1.68864000 -1.80466000 -Ir 2.92759000 5.85518000 0.00000000 -0.90688800 0.06169700 -0.42066800 -Ir 5.85518000 0.00000000 2.92759000 1.70372800 -0.55762300 0.31761500 -Ir 7.31898000 4.39139000 1.46380000 0.80149100 1.07259100 0.28088700 -Ir 4.39139000 1.46380000 4.39139000 0.78974300 -0.41505500 0.56402000 -Ir 5.85518000 2.92759000 2.92759000 0.64432600 0.20875900 -0.42608500 -Ir 2.92759000 2.92759000 0.00000000 -0.29839300 1.51968000 0.29139400 -Ir 4.39139000 4.39139000 4.39139000 0.14495700 1.63451200 -0.40929800 -Ir 2.92759000 0.00000000 0.00000000 -1.39147600 -1.99023200 0.29028900 -Pt 1.46380000 4.39139000 1.46380000 -2.78711800 -0.79260600 1.44629900 +Lattice="9.883640051 0.0 0.0 0.0 9.883640051 0.0 0.0 0.0 6.589093367" Properties=species:S:1:pos:R:3:forces:R:3 name=527 energy=-171.17145311 stress="0.13263293649478775 0.0015481251990567058 -0.001552544187517831 0.0015481251990567058 0.13087043418178432 -0.0017256149940694482 -0.001552544187517831 -0.0017256149940694482 0.1296045812329457" free_energy=-171.2460493 pbc="T T T" +Ti 4.89143000 8.23478000 5.05098000 0.57862800 0.42604200 0.34052700 +Ti 9.87294000 0.03866000 0.03729000 0.43869700 0.36324800 0.59601000 +Ti 4.85292000 1.64484000 4.97127000 1.13751700 0.13176000 0.13112400 +Co 5.07027000 4.94643000 5.12986000 -0.16756400 -0.59735600 -0.88502600 +Co 8.14137000 1.52126000 1.76087000 0.81293800 0.04966400 0.07060300 +Co 9.84968000 6.74687000 3.37568000 0.52191800 0.37189500 -0.41762100 +Co 8.17908000 1.48837000 4.84918000 0.55991000 0.19618300 0.23507900 +Co 1.75338000 1.53479000 5.00266000 -1.03709800 0.21075500 -0.01777500 +Co 6.61490000 9.85740000 6.51423000 -0.31378800 0.36901600 -0.49094000 +Co 9.85835000 3.21962000 0.00376000 0.09747500 -0.88558100 0.00827300 +Co 0.06086000 6.54001000 0.03242000 0.10241300 1.07896300 0.19957300 +Co 1.56560000 8.38701000 1.67967000 -1.32177700 0.13335200 0.01444700 +Co 6.54694000 3.34589000 3.18848000 -0.39547400 -0.84240900 0.39284300 +Co 8.20098000 8.23716000 1.52705000 0.94468700 0.31992800 -0.34519200 +Co 9.88332000 3.48283000 3.33143000 -0.26977300 -1.06895500 -0.15108700 +Co 0.07841000 0.19616000 3.10368000 0.21241700 0.21671900 -0.31093700 +Co 4.85281000 1.66007000 1.56779000 0.40990000 -0.07217200 0.10313100 +Co 1.72314000 1.55411000 1.62807000 -1.10408100 -0.27142400 0.00568200 +Co 1.56151000 8.23161000 4.95778000 -0.70074600 -0.08723900 0.03510500 +Ag 3.40556000 6.56819000 3.24242000 -0.02857600 0.58145100 1.01078900 +Ag 8.21158000 4.91596000 4.93151000 0.56608500 -0.48261500 0.03282700 +Ag 6.70010000 6.50587000 3.31648000 -0.66927000 0.54460200 0.43538200 +Ag 6.66474000 3.09976000 0.04300000 0.09615600 -0.21960600 0.02482200 +Ag 3.20348000 3.16919000 6.57527000 -0.13323000 0.28096100 0.08876500 +Ag 1.85178000 4.99397000 1.89072000 -1.70297600 -0.22670900 -0.85133600 +Ag 6.69395000 0.06959000 3.29218000 -0.47879000 -0.20103000 -0.13586100 +Ag 8.34931000 8.28672000 4.89456000 0.07447700 0.05393400 0.26185400 +Ag 4.99244000 8.21976000 1.73138000 0.22351000 0.50653600 -0.35627500 +Ag 3.20156000 0.07012000 3.40094000 -0.13686800 -0.30965700 -0.42992700 +Ag 4.91896000 4.96943000 1.54659000 0.44820000 -1.06410400 0.52220200 +Ag 3.35163000 6.45008000 0.04342000 -0.74554400 0.78062500 -0.65683400 +Ag 6.52382000 6.43099000 6.51782000 0.29525100 1.27218200 -0.15934500 +Ag 1.49305000 4.99444000 4.93981000 -0.17346600 0.03636500 0.03964300 +Ag 8.04772000 5.07815000 1.60302000 1.49569600 -0.80272000 0.04549400 +Ag 3.18478000 0.00575000 6.53131000 0.00262300 -0.28013000 0.42936900 +Ag 3.28501000 3.32146000 3.36929000 0.36052400 -0.51247600 0.18461300 48 -Lattice="7.699206955 0.0 0.0 0.0 7.699206955 0.0 0.0 0.0 11.548810433" Properties=species:S:1:pos:R:3:forces:R:3 name=132 energy=-492.3406702 stress="0.20076053153203025 0.0003938267428249818 0.0006968020788136071 0.0003938267428249818 0.204728052913553 -0.00046455552423949086 0.0006968020788136071 -0.00046455552423949086 0.19849039479880043" free_energy=-492.34422801 pbc="T T T" -Mn 0.00000000 1.92480000 5.77441000 0.37615400 -0.35167600 -0.00738200 -Mn 1.92480000 0.00000000 1.92480000 0.01014900 0.70746300 -0.22709100 -Mn 5.77441000 1.92480000 3.84960000 -0.00675500 -0.63748000 0.10510300 -Mn 5.77441000 3.84960000 1.92480000 0.02357700 -0.68199800 -0.78033100 -Mn 3.84960000 0.00000000 7.69921000 -0.89206400 0.42856000 -0.07030900 -Mn 3.84960000 5.77441000 1.92480000 0.34186600 0.68345900 -0.24820200 -Mn 3.84960000 3.84960000 7.69921000 -0.40315100 -0.38630300 0.03747000 -Mn 1.92480000 3.84960000 5.77441000 0.03795800 -1.09537100 1.09587000 -Mn 0.00000000 0.00000000 3.84960000 -0.01906200 1.35800500 -0.02467400 -Mn 0.00000000 3.84960000 7.69921000 0.46912100 -0.40399500 0.04118600 -Mn 0.00000000 1.92480000 9.62401000 0.26916200 0.01899600 0.24555100 -Mn 1.92480000 0.00000000 5.77441000 0.05510400 1.01565300 0.37939400 -Mn 5.77441000 5.77441000 0.00000000 0.37978500 -0.12199200 -0.04888900 -Mn 1.92480000 5.77441000 7.69921000 -0.01243100 0.26035800 0.08508700 -Mn 0.00000000 3.84960000 0.00000000 -0.51066300 -0.29069400 -0.21307400 -Mn 1.92480000 1.92480000 3.84960000 0.00376900 -0.86223900 0.39197200 -Mn 5.77441000 0.00000000 1.92480000 0.11532800 0.66502600 -0.21637500 -Mn 3.84960000 1.92480000 5.77441000 -0.34168900 -0.41799900 0.11146400 -Mn 0.00000000 0.00000000 7.69921000 0.88754200 0.42212300 -0.01732900 -Mn 0.00000000 5.77441000 9.62401000 0.29511800 -0.00572800 0.26545100 -Mn 5.77441000 3.84960000 9.62401000 0.27213400 -0.07834000 0.40286100 -Mn 3.84960000 5.77441000 9.62401000 -0.34327900 0.00096700 -0.35648600 -Mn 3.84960000 0.00000000 3.84960000 0.05738100 0.99915400 -0.36335800 -Mn 0.00000000 1.92480000 1.92480000 -0.28125700 -0.66237900 -0.28226400 -Mn 3.84960000 1.92480000 1.92480000 0.25278100 -0.58963100 0.22676400 -Mn 5.77441000 1.92480000 0.00000000 0.56795700 0.07757200 0.19660600 -Mn 1.92480000 0.00000000 9.62401000 -0.26151500 -0.01347200 -0.21549200 -Mn 5.77441000 0.00000000 5.77441000 -0.04281200 1.01459500 -0.21414300 -Fe 1.92480000 1.92480000 0.00000000 -0.42701000 -0.35805200 0.03457600 -Fe 3.84960000 1.92480000 9.62401000 -0.25947000 -0.00228400 -0.18020400 -Fe 5.77441000 3.84960000 5.77441000 -0.05192400 -0.85754200 0.34840600 -Fe 1.92480000 5.77441000 0.00000000 -0.30525500 0.44245700 -0.22143700 -Fe 0.00000000 0.00000000 0.00000000 0.16461300 0.17861500 0.06608600 -Fe 1.92480000 1.92480000 7.69921000 0.01891100 -0.17809700 -0.17872800 -W 0.00000000 5.77441000 5.77441000 0.71273000 0.67376700 1.56388400 -W 3.84960000 0.00000000 0.00000000 -0.86865900 0.12589500 0.69110300 -W 3.84960000 3.84960000 3.84960000 0.83454100 -2.37801900 0.02897400 -W 5.77441000 1.92480000 7.69921000 -0.11415500 0.75168300 -0.77724300 -W 3.84960000 3.84960000 0.00000000 1.13608300 -0.10832000 -0.02439400 -W 5.77441000 5.77441000 7.69921000 0.03687900 -0.86703800 0.36429200 -W 3.84960000 5.77441000 5.77441000 -0.70956700 0.70792600 1.38956200 -W 0.00000000 5.77441000 1.92480000 -0.64020700 1.70371200 -2.36013800 -W 5.77441000 5.77441000 3.84960000 -0.81184100 1.53481200 -0.57412500 -W 1.92480000 3.84960000 1.92480000 -0.09772300 -1.55518600 -1.92848400 -W 0.00000000 3.84960000 3.84960000 -0.80107200 -3.23575500 0.85786700 -W 5.77441000 0.00000000 9.62401000 0.77915200 0.10037100 0.93724800 -W 1.92480000 3.84960000 9.62401000 -0.67480300 -0.15918500 -0.50703200 -W 1.92480000 5.77441000 3.84960000 0.77857200 2.42761200 0.17041100 +Lattice="7.17941738 0.0 0.0 0.0 7.17941738 0.0 0.0 0.0 10.76912607" Properties=species:S:1:pos:R:3:forces:R:3 name=712 energy=-297.34150755 stress="-0.19552168339816534 -0.020412056683033083 -0.008598065794469993 -0.020412056683033083 -0.20278676257163872 -0.007518478202475092 -0.008598065794469993 -0.007518478202475092 -0.19521058785880394" free_energy=-297.34652158 pbc="T T T" +V 7.11718000 7.11129000 7.18645000 1.79158300 0.33415000 1.92658200 +V 1.84239000 1.74629000 3.62451000 -1.57853500 1.32765800 0.59399900 +Co 7.00040000 1.72838000 1.66946000 2.39490300 0.11479600 2.90189300 +Ni 5.30558000 7.15008000 8.94637000 0.80961600 -0.05160300 -0.96203300 +Ni 5.32352000 3.59696000 8.98374000 -0.39508900 0.50432400 -0.45463700 +Ni 3.57677000 1.77579000 8.95092000 -0.52971800 0.53371300 -0.76224800 +Ni 3.61419000 3.62694000 7.19572000 -0.45087100 -0.58329600 0.43082400 +Ni 7.03560000 7.14007000 3.72586000 0.97577400 0.52731200 -1.42999400 +Ni 3.62360000 5.36291000 8.86829000 0.38819100 0.52322800 0.72481000 +Ni 1.80336000 3.75544000 8.97631000 1.05306300 -1.75691700 0.18744200 +Ni 0.14380000 5.39523000 8.91598000 -2.34231200 0.30886000 0.52227100 +Ni 3.57701000 5.29785000 5.44087000 0.22962200 0.49758200 0.19986700 +Ni 1.81301000 7.10648000 8.96311000 0.06175500 1.32211000 -0.20679200 +Ni 5.40925000 1.85474000 3.66549000 0.49396800 -0.75501800 -0.77806000 +Ni 0.00978000 3.52374000 3.68158000 -0.21729000 0.49814500 -1.65871100 +Ni 3.52386000 0.10375000 7.08630000 0.49194400 -0.76310600 1.05212200 +Zn 5.35152000 5.38908000 0.07003000 0.65874800 0.08725900 -3.03859000 +Zn 1.71810000 1.78022000 10.76614000 1.45147700 0.15531600 -0.03485700 +Zn 5.43885000 5.36625000 7.14949000 -1.53760900 0.09137600 1.34935700 +Zn 5.40297000 0.04311000 5.41880000 -1.34142400 -0.91455200 -0.38931700 +Zn 5.38307000 1.88036000 7.08385000 -1.95693800 -2.01589500 3.46932300 +Zn 0.08957000 0.04350000 0.01047000 -0.46440400 -0.70785500 -1.08040500 +Zn 1.72689000 1.83419000 7.04878000 3.14855900 -1.93362600 4.37125200 +Zn 1.83785000 0.07951000 2.00092000 -2.26467200 -0.39580300 -2.50355000 +Zn 1.74631000 3.64878000 1.82096000 -1.12646900 -1.39356600 0.39805900 +Zn 0.03932000 1.84746000 8.99739000 -0.03015200 -1.19657000 -0.63139300 +Zn 7.12077000 5.36361000 5.38609000 0.66458300 -0.46866600 -0.14865200 +Zn 3.64047000 1.74866000 5.43851000 -0.63977700 0.53187900 -0.17942500 +Zn 1.77344000 7.07163000 5.35802000 0.54077300 0.27867600 0.69147200 +Ru 7.15192000 3.49530000 7.23537000 0.57050400 4.24848300 1.90585800 +Ru 3.51936000 5.44304000 1.76501000 0.41417900 0.01168600 1.18428900 +Ru 5.37670000 5.33852000 3.56367000 0.02034200 0.46416400 1.02338100 +Ru 3.58858000 3.61462000 0.12729000 -2.76913000 0.45009300 -4.69317100 +Ru 3.62656000 0.04205000 10.72898000 -2.27418300 -0.89703100 -1.11254400 +Ru 3.65283000 0.07619000 3.54955000 0.20058700 -1.08409600 2.16137000 +Ru 1.85735000 5.39837000 3.64583000 -1.19351700 -1.49949000 -0.21424300 +Ru 5.34272000 3.54244000 5.44358000 -0.07257600 1.74005600 -1.58712900 +Ru 3.58709000 1.90088000 1.89881000 -0.88839900 -2.63735200 -1.45169500 +Ru 0.01587000 5.39409000 1.77150000 0.82901100 1.60199700 1.76215500 +Ru 1.85439000 5.42581000 7.30256000 -0.90550100 -0.11805500 -2.15382400 +Ru 1.68721000 3.43545000 5.38194000 3.99508700 5.03680400 -1.33306000 +Ru 1.82443000 5.40731000 10.73790000 0.27205500 0.28496700 -0.63418800 +Ru 5.31221000 0.01647000 1.67253000 1.42779700 -0.19844200 3.09981000 +Ru 3.58128000 3.55006000 3.55187000 -1.21577400 1.54090900 3.07020300 +Pt 5.31998000 3.59215000 1.78596000 3.88356500 0.53526800 2.02880600 +Pt 7.17476000 3.57228000 10.75546000 1.10117300 1.11831900 -1.20907700 +Pt 0.03858000 1.80205000 5.48368000 -2.88233000 -4.56367800 -3.73236800 +Pt 5.37761000 1.80432000 10.72731000 -0.79219000 -0.73450900 -2.67517700 36 Lattice="10.17896035 0.0 0.0 0.0 10.17896035 0.0 0.0 0.0 6.785973567" Properties=species:S:1:pos:R:3:forces:R:3 class=3 scale=1.0936659901056818 fps_idx=-1 name=654 energy=-295.5983349 stress="0.23889555934780998 -0.00207471508249835 0.006845643518704877 -0.00207471508249835 0.2384006576062004 0.0015937943213307935 0.006845643518704877 0.0015937943213307935 0.23851844112491494" free_energy=-295.63442729 pbc="T T T" Ru 8.48247000 1.69649000 5.08948000 -0.12982700 0.36181500 0.24203200 diff --git a/tests/utils/test_neighbor_list.py b/tests/utils/test_neighbor_list.py index df4f03421..f095d02b8 100644 --- a/tests/utils/test_neighbor_list.py +++ b/tests/utils/test_neighbor_list.py @@ -3,7 +3,7 @@ from metatensor.torch.atomistic import NeighborsListOptions from metatensor.models.utils.data.readers.structures import read_structures_ase -from metatensor.models.utils.neighbor_list import attach_neighbor_lists +from metatensor.models.utils.neighbors_lists import get_system_with_neighbors_lists RESOURCES_PATH = Path(__file__).parent.resolve() / ".." / "resources" @@ -19,7 +19,9 @@ def test_attach_neighbor_lists(): NeighborsListOptions(model_cutoff=6.0, full_list=True), ] - new_system = attach_neighbor_lists(structures[0], requested_neighbor_lists) + new_system = get_system_with_neighbors_lists( + structures[0], requested_neighbor_lists + ) assert requested_neighbor_lists[0] in new_system.known_neighbors_lists() assert requested_neighbor_lists[1] in new_system.known_neighbors_lists() diff --git a/tox.ini b/tox.ini index 42d1bdc42..0842d840c 100644 --- a/tox.ini +++ b/tox.ini @@ -81,6 +81,15 @@ deps = commands = pytest --import-mode=append {posargs} src/metatensor/models/experimental/soap_bpnn/tests/ +[testenv:alchemical-model-tests] +description = Run Alchemical Model tests with pytest +passenv = * +deps = + pytest + torch_alchemical @ git+https://github.com/abmazitov/torch_alchemical.git@fafb0bd +commands = + pytest --import-mode=append {posargs} src/metatensor/models/experimental/alchemical_model/tests/ + [testenv:docs] description = builds the documentation with sphinx deps =