How to use the pyiron.base.master.generic.GenericMaster function in pyiron

To help you get started, we’ve selected a few pyiron examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pyiron / pyiron / pyiron / base / master / parallel.py View on Github external
def run_if_interactive(self):
        if not (
            self.ref_job.server.run_mode.interactive
            or self.ref_job.server.run_mode.interactive_non_modal
        ):
            raise ValueError(
                "The child job has to be run_mode interactive or interactive_non_modal."
            )
        if isinstance(self.ref_job, GenericMaster):
            self.run_static()
        elif self.server.cores == 1:
            self.interactive_ref_job_initialize()
            for parameter in self._job_generator.parameter_list:
                self._job_generator.modify_job(job=self.ref_job, parameter=parameter)
                self.ref_job.run()
            self.ref_job.interactive_close()
        else:
            if self.server.cores > len(self._job_generator.parameter_list):
                number_of_jobs = len(self._job_generator.parameter_list)
            else:
                number_of_jobs = self.server.cores
            max_tasks_per_job = (
                int(len(self._job_generator.parameter_list) // number_of_jobs) + 1
            )
            parameters_sub_lst = [
github pyiron / pyiron / pyiron / base / master / parallel.py View on Github external
def run_if_interactive(self):
        if not (
            self.ref_job.server.run_mode.interactive
            or self.ref_job.server.run_mode.interactive_non_modal
        ):
            raise ValueError(
                "The child job has to be run_mode interactive or interactive_non_modal."
            )
        if isinstance(self.ref_job, GenericMaster):
            self.run_static()
        elif self.server.cores == 1:
            self.interactive_ref_job_initialize()
            for parameter in self._job_generator.parameter_list:
                self._job_generator.modify_job(job=self.ref_job, parameter=parameter)
                self.ref_job.run()
            self.ref_job.interactive_close()
        else:
            if self.server.cores > len(self._job_generator.parameter_list):
                number_of_jobs = len(self._job_generator.parameter_list)
            else:
                number_of_jobs = self.server.cores
            max_tasks_per_job = (
                int(len(self._job_generator.parameter_list) // number_of_jobs) + 1
            )
            parameters_sub_lst = [
github pyiron / pyiron / pyiron / atomistics / job / atomistic.py View on Github external
def restart(self, job_name=None, job_type=None):
        """
        Restart a new job created from an existing calculation.
        Args:
            project (pyiron.project.Project instance): Project instance at which the new job should be created
            job_name (str): Job name
            job_type (str): Job type

        Returns:
            new_ham: New job
        """
        new_ham = super(AtomisticGenericJob, self).restart(
            job_name=job_name, job_type=job_type
        )
        if isinstance(new_ham, GenericMaster) and not isinstance(self, GenericMaster):
            new_child = self.restart(job_name=None, job_type=None)
            new_ham.append(new_child)
        new_ham.structure = self.get_structure(iteration_step=-1)
        if new_ham.structure is None:
            new_ham.structure = self.structure.copy()
        new_ham._generic_input['structure'] = 'atoms'
        return new_ham
github pyiron / pyiron / pyiron / base / master / generic.py View on Github external
def _child_job_update_hdf(self, parent_job, child_job):
        """

        Args:
            parent_job:
            child_job:
        """
        child_job.project_hdf5.file_name = parent_job.project_hdf5.file_name
        child_job.project_hdf5.h5_path = (
            parent_job.project_hdf5.h5_path + "/" + child_job.job_name
        )
        if isinstance(child_job, GenericMaster):
            for sub_job_name in child_job._job_name_lst:
                self._child_job_update_hdf(
                    parent_job=child_job,
                    child_job=child_job._load_job_from_cache(sub_job_name),
                )
        parent_job.job_object_dict[child_job.job_name] = child_job
github pyiron / pyiron / pyiron / base / master / list.py View on Github external
The ListMaster behaves like a list, just for job objects.
"""

__author__ = "Jan Janssen"
__copyright__ = (
    "Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - "
    "Computational Materials Design (CM) Department"
)
__version__ = "1.0"
__maintainer__ = "Jan Janssen"
__email__ = "janssen@mpie.de"
__status__ = "production"
__date__ = "Sep 1, 2017"


class ListMaster(GenericMaster):
    """
    The ListMaster is the most simple MetaJob derived from the GenericMaster. It behaves like a Python list object. Jobs
    can be append to the ListMaster just like elements are added to a list and then all jobs can be executed together.
    This also works for already executed jobs, unless they are already linked to a different MetaJob - meaning they
    already have a master ID assigned to them.

    Args:
        project (ProjectHDFio): ProjectHDFio instance which points to the HDF5 file the job is stored in
        job_name (str): name of the job, which has to be unique within the project

    Attributes:

        .. attribute:: job_name

            name of the job, which has to be unique within the project
github pyiron / pyiron / pyiron / base / master / parallel.py View on Github external
__version__ = "1.0"
__maintainer__ = "Jan Janssen"
__email__ = "janssen@mpie.de"
__status__ = "production"
__date__ = "Sep 1, 2017"

def job_wrap_function(parameters):
    working_directory, job_id, file_path, submit_on_remote, debug = parameters
    job_wrapper_function(
        working_directory=working_directory,
        job_id=job_id,
        debug=debug,
    )


class ParallelMaster(GenericMaster):
    """
    MasterJob that handles the creation and analysis of several parallel jobs (including master and
    continuation jobs), Examples are Murnaghan or Phonon calculations

    Args:
        project (ProjectHDFio): ProjectHDFio instance which points to the HDF5 file the job is stored in
        job_name (str): name of the job, which has to be unique within the project

    Attributes:

        .. attribute:: job_name

            name of the job, which has to be unique within the project

        .. attribute:: status
github pyiron / pyiron / pyiron / atomistics / job / interactivewrapper.py View on Github external
from pyiron.base.job.generic import GenericJob
from pyiron.base.master.generic import GenericMaster

__author__ = "Osamu Waseda, Jan Janssen"
__copyright__ = (
    "Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - "
    "Computational Materials Design (CM) Department"
)
__version__ = "1.0"
__maintainer__ = "Jan Janssen"
__email__ = "janssen@mpie.de"
__status__ = "development"
__date__ = "Sep 1, 2017"


class InteractiveWrapper(GenericMaster):
    def __init__(self, project, job_name):
        super(InteractiveWrapper, self).__init__(project, job_name)
        self._ref_job = None
        self.input = GenericParameters("parameters")

    @property
    def structure(self):
        if self.ref_job:
            return self._ref_job.structure
        else:
            return None

    @structure.setter
    def structure(self, basis):
        if self.ref_job:
            self._ref_job.structure = basis
github pyiron / pyiron / pyiron / base / master / flexible.py View on Github external
The Flexible master uses a list of functions to connect multiple jobs in a series.
"""

__author__ = "Jan Janssen, Liam Huber"
__copyright__ = (
    "Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - "
    "Computational Materials Design (CM) Department"
)
__version__ = "1.0"
__maintainer__ = "Jan Janssen"
__email__ = "janssen@mpie.de"
__status__ = "development"
__date__ = "Mar 24, 2019"


class FlexibleMaster(GenericMaster):
    """
    The FlexibleMaster uses a list of functions to connect multiple jobs in a series.

    Args:
        project (ProjectHDFio): ProjectHDFio instance which points to the HDF5 file the job is stored in
        job_name (str): name of the job, which has to be unique within the project

    Attributes:

        .. attribute:: job_name

            name of the job, which has to be unique within the project

        .. attribute:: status

            execution status of the job, can be one of the following [initialized, appended, created, submitted,
github pyiron / pyiron / pyiron / atomistics / job / atomistic.py View on Github external
def restart(self, snapshot=-1, job_name=None, job_type=None):
        """
        Restart a new job created from an existing calculation.
        Args:
            project (pyiron.project.Project instance): Project instance at which the new job should be created
            snapshot (int): Snapshot of the calculations which would be the initial structure of the new job
            job_name (str): Job name
            job_type (str): Job type

        Returns:
            new_ham: New job
        """
        new_ham = super(AtomisticGenericJob, self).restart(snapshot=snapshot, job_name=job_name, job_type=job_type)
        if isinstance(new_ham, GenericMaster) and not isinstance(self, GenericMaster):
            new_child = self.restart(snapshot=snapshot, job_name=None, job_type=None)
            new_ham.append(new_child)
        if self.status.finished:
            new_ham.structure = self.get_structure(iteration_step=snapshot)
            new_ham._generic_input['structure'] = 'atoms'
        else:
            new_ham._generic_input['structure'] = 'continue_final'
        return new_ham