Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self._interactive_cache["energy"].append(energy)
def interactive_close(self):
self._interactive_library = False
self.to_hdf()
with self.project_hdf5.open("output") as h5:
h5["generic/energy"] = np.array(self._interactive_cache["energy"])
h5["generic/volume"] = np.array(self._interactive_cache["alat"])
h5["generic/alat"] = np.array(self._interactive_cache["alat"])
h5["generic/count"] = np.array(self._interactive_cache["count"])
h5["generic/energy_tot"] = np.array(self._interactive_cache["energy"])
self.project.db.item_update(self._runtime(), self._job_id)
self.status.finished = True
class ExampleInput(GenericParameters):
"""
Input class for the ExampleJob based on the GenericParameters class.
Args:
input_file_name (str): Name of the input file - optional
"""
def __init__(self, input_file_name=None):
super(ExampleInput, self).__init__(
input_file_name=input_file_name, table_name="input_inp", comment_char="#"
)
def load_default(self):
"""
Loading the default settings for the input file.
"""
def from_hdf(self, hdf):
"""
Reads the attributes and reconstructs the object from a hdf file
Args:
hdf: The hdf5 instance
"""
with hdf.open("dft") as hdf_dft:
for node in hdf_dft.list_nodes():
if node == "description":
# self.description = hdf_go[node]
pass
else:
self.log_dict[node] = hdf_dft[node]
class Incar(GenericParameters):
"""
Class to control the INCAR file of a vasp simulation
"""
def __init__(self, input_file_name=None, table_name="incar"):
super(Incar, self).__init__(
input_file_name=input_file_name,
table_name=table_name,
comment_char="#",
separator_char="=",
)
self._bool_dict = {True: ".TRUE.", False: ".FALSE."}
def load_default(self):
"""
Loads the default file content
def __init__(self, input_file_name=None, table_name="potcar"):
GenericParameters.__init__(
self,
input_file_name=input_file_name,
table_name=table_name,
val_only=False,
comment_char="#",
)
self._structure = None
self.electrons_per_atom_lst = list()
self.max_cutoff_lst = list()
self.el_path_lst = list()
self.el_path_dict = dict()
self.modified_elements = dict()
"""
Loads the default file content
"""
file_content = '''\
SYSTEM = ToDo # jobname
PREC = Accurate
ALGO = Fast
ENCUT = 250
LREAL = False
LWAVE = False
LORBIT = 0
'''
self.load_string(file_content)
class Kpoints(GenericParameters):
"""
Class to control the KPOINTS file of a vasp simulation
"""
def __init__(self, input_file_name=None, table_name="kpoints"):
super(Kpoints, self).__init__(input_file_name=input_file_name, table_name=table_name, val_only=True,
comment_char="!")
def set(self, method=None, size_of_mesh=None, shift=None):
"""
Sets appropriate tags and values in the KPOINTS file
Args:
method (str): Type of meshing scheme (Gamma Point, MP, Manual or Line)
size_of_mesh (list/numpy.ndarray): List of size 1x3 specifying the required mesh size
shift (list): List of size 1x3 specifying the user defined shift from the Gamma point
"""
self.potcar.to_hdf(hdf5_input)
def from_hdf(self, hdf):
"""
Reads the attributes and reconstructs the object from a hdf file
Args:
hdf: The hdf5 instance
"""
with hdf.open("input") as hdf5_input:
self.incar.from_hdf(hdf5_input)
self.kpoints.from_hdf(hdf5_input)
self.potcar.from_hdf(hdf5_input)
class Incar(GenericParameters):
"""
Class to control the INCAR file of a vasp simulation
"""
def __init__(self, input_file_name=None, table_name="incar"):
super(Incar, self).__init__(input_file_name=input_file_name, table_name=table_name, comment_char="#",
separator_char="=")
self._bool_dict = {True: ".TRUE.", False: ".FALSE."}
def load_default(self):
"""
Loads the default file content
"""
file_content = '''\
SYSTEM = ToDo # jobname
PREC = Accurate
0
Monkhorst_Pack
4 4 4
0 0 0
'''
self.load_string(file_content)
def set_kmesh_by_density(self, structure):
if "density_of_mesh" in self._dataset and self._dataset["density_of_mesh"] is not None:
if self._dataset["density_of_mesh"] != 0.0:
k_mesh = get_k_mesh_by_cell(structure.get_cell(),
kspace_per_in_ang=self._dataset["density_of_mesh"])
self.set(size_of_mesh=k_mesh)
class Potcar(GenericParameters):
pot_path_dict = {"GGA": "paw-gga-pbe", "PBE": "paw-gga-pbe", "LDA": "paw-lda"}
def __init__(self, input_file_name=None, table_name="potcar"):
GenericParameters.__init__(self, input_file_name=input_file_name, table_name=table_name, val_only=False,
comment_char="#")
self._structure = None
self.electrons_per_atom_lst = list()
self.max_cutoff_lst = list()
self.el_path_lst = list()
self.el_path_dict = dict()
def potcar_set_structure(self, structure):
self._structure = structure
self._set_default_path_dict()
self._set_potential_paths()
positions[frame] = coordinates + factor*mode.reshape((-1,3))
try:
import nglview
except ImportError:
raise ImportError("The animate_nma_mode() function requires the package nglview to be installed")
animation = nglview.show_asetraj(Trajectory(positions,self.structure))
if spacefill:
animation.add_spacefill(radius_type='vdw', scale=0.5, radius=particle_size)
animation.remove_ball_and_stick()
else:
animation.add_ball_and_stick()
return animation
class GaussianInput(GenericParameters):
def __init__(self, input_file_name=None):
super(GaussianInput, self).__init__(input_file_name=input_file_name, table_name="input_inp", comment_char="#")
def load_default(self):
"""
Loading the default settings for the input file.
"""
input_str = """\
lot HF
basis_set 6-311G(d,p)
spin_mult 1
charge 0
"""
self.load_string(input_str)
class GaussianOutput(GenericOutput):
def __getitem__(self, item):
new_structure = self._structure.copy()
if self._cells is not None:
new_structure.cell = self._cells[item]
new_structure.positions = self._positions[item]
# This step is necessary for using ase.io.write for trajectories
new_structure.arrays['positions'] = new_structure.positions
new_structure.arrays['cells'] = new_structure.cell
return new_structure
def __len__(self):
return len(self._positions)
class GenericInput(GenericParameters):
def __init__(self, input_file_name=None, table_name="generic"):
super(GenericInput, self).__init__(input_file_name=input_file_name, table_name=table_name, comment_char="#",
separator_char="=")
def load_default(self):
"""
Loads the default file content
"""
file_content = '''\
calc_mode=static # static, minimize, md
structure=atoms # atoms, continue_final
'''
self.load_string(file_content)
class GenericOutput(object):
output_dict['enhanced/time'] = data[:,0]
output_dict['enhanced/cv'] = data[:,1:-1]
output_dict['enhanced/bias'] = data[:,-1]
def collect_output(output_file):
# this routine basically reads and returns the output HDF5 file produced by Yaff
# read output
h5 = h5py.File(output_file, mode='r')
# translate to dict
output_dict = hdf2dict(h5)
# read colvar file if it is there
read_colvar(output_file,output_dict)
return output_dict
class YaffInput(GenericParameters):
def __init__(self, input_file_name=None):
super(YaffInput, self).__init__(input_file_name=input_file_name,table_name="input_inp",comment_char="#")
def load_default(self):
'''
Loading the default settings for the input file.
'''
input_str = """\
rcut 28.345892008818783 #(FF) real space cutoff
alpha_scale 3.2 #(FF) scale for ewald alpha parameter
gcut_scale 1.5 #(FF) scale for ewald reciprocal cutoff parameter
smooth_ei True #(FF) smoothen cutoff for real space electrostatics
gpos_rms 1e-8 #(OPT) convergence criterion for RMS of gradients towards atomic coordinates
dpos_rms 1e-6 #(OPT) convergence criterion for RMS of differences of atomic coordinates
grvecs_rms 1e-8 #(OPT) convergence criterion for RMS of gradients towards cell parameters
drvecs_rms 1e-6 #(OPT) convergence criterion for RMS of differences of cell parameters
},
"electron": {
"mass": 1.,
"distance": ANG_TO_BOHR,
"time": 1.,
"energy": EV_TO_HA,
"velocity": ANG_PER_FS_TO_BOHR_PER_FS,
"force": EV_PER_ANG_TO_HA_PER_BOHR,
"temperature": 1.,
"pressure": GPA_TO_PA,
"charge": 1.
},
}
class LammpsControl(GenericParameters):
def __init__(self, input_file_name=None, **qwargs):
super(LammpsControl, self).__init__(
input_file_name=input_file_name, table_name="control_inp", comment_char="#"
)
self._init_block_dict()
@property
def dataset(self):
return self._dataset
def _init_block_dict(self):
block_dict = OrderedDict()
block_dict["read_restart"] = "read_restart"
block_dict["structure"] = (
"units",
"dimension",