How to use the petsc4py.PETSc function in petsc4py

To help you get started, we’ve selected a few petsc4py examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Parallel-in-Time / pySDC / pySDC / playgrounds / PETSc / poisson.py View on Github external
u_yy = (u_n - 2*u + u_s) / hy ** 2
                y[i, j] = u_xx + u_yy

OptDB = PETSc.Options()

n  = OptDB.getInt('n', 4)
nx = OptDB.getInt('nx', n)
ny = OptDB.getInt('ny', n)

da = PETSc.DMDA().create([nx, ny], stencil_width=1)
pde = Poisson2D(da)

x = da.createGlobalVec()
b = da.createGlobalVec()
# A = da.createMat('python')
A = PETSc.Mat().createPython(
    [x.getSizes(), b.getSizes()], comm=da.comm)
print(A.getSize())
A.setPythonContext(pde)
A.setUp()

y = da.createGlobalVec()

pde.formRHS(x, val=1.0)
A.mult(x, b)
pde.formRHS(y, val=-2.0 * (2.0 * np.pi) ** 2)

# u = da.createNaturalVec()
# da.globalToNatural(b, u)

# print((b - y).norm(PETSc.NormType.NORM_INFINITY))
# exit()
github OpenMDAO / OpenMDAO1 / openmdao / core / petsc_impl.py View on Github external
src_idx_set = PETSc.IS().createGeneral(src_idxs, comm=comm)
        if trace: debug("src_idx_set DONE")
        tgt_idx_set = PETSc.IS().createGeneral(tgt_idxs, comm=comm)
        if trace: debug("tgt_idx_set DONE")

        try:
            if trace:  # pragma: no cover
                self.src_idxs = src_idxs
                self.tgt_idxs = tgt_idxs
                self.vec_conns = vec_conns
                arrow = '-->' if mode == 'fwd' else '<--'
                debug("'%s': new %s scatter (sizes: %d, %d)\n   %s %s %s %s %s %s" %
                      (name, mode, len(src_idx_set.indices), len(tgt_idx_set.indices),
                       [v for u, v in vec_conns], arrow, [u for u, v in vec_conns],
                       src_idx_set.indices, arrow, tgt_idx_set.indices))
            self.scatter = PETSc.Scatter().create(uvec, src_idx_set,
                                                  pvec, tgt_idx_set)
            if trace: debug("scatter creation DONE")
        except Exception as err:
            raise RuntimeError("ERROR in %s (src_idxs=%s, tgt_idxs=%s, usize=%d, psize=%d): %s" %
                               (name, src_idxs, tgt_idxs,
                                src_vec.vec.size,
                                tgt_vec.vec.size, str(err)))
github anstmichaels / emopt / emopt / fdtd.py View on Github external
self._Z = dz * (Nz-1)

        self._wavelength = wavelength
        self._R = wavelength/(2*pi)

        ## Courant number < 1
        self._Sc = 0.95
        self._min_rindex = min_rindex
        dt = self._Sc * np.min([dx, dy, dz])/self._R / np.sqrt(3) * min_rindex
        self._dt = dt


        # stencil_type=1 => box
        # stencil_width=1 => 1 element ghosted region
        # boundary_type=1 => ghosted simulation boundary (padded everywhere)
        da = PETSc.DA().create(sizes=[Nx, Ny, Nz], dof=1, stencil_type=0,
                               stencil_width=1, boundary_type=1)

        self._da = da
        ## Setup the distributed array. Currently, we need 2 for each field
        ## component (1 for the field, and 1 for the averaged material) and 1
        ## for each current density component
        self._vglobal = da.createGlobalVec() # global for data sharing

        pos, lens = da.getCorners()
        k0, j0, i0 = pos
        K, J, I = lens

        # field arrays
        self._Ex = np.zeros(((K+2)*(J+2)*(I+2),), dtype=np.double)
        self._Ey = np.zeros(((K+2)*(J+2)*(I+2),), dtype=np.double)
        self._Ez = np.zeros(((K+2)*(J+2)*(I+2),), dtype=np.double)
github mathLab / RBniCS / rbnics / backends / dolfin / wrapping / tensor_load.py View on Github external
def _tensor_load(tensor, directory, filename, matrix_permutation, mpi_comm):
        (matrix_row_permutation, matrix_col_permutation) = matrix_permutation
        writer_mat = _matrix_load(directory, filename, mpi_comm)
        mat = wrapping.to_petsc4py(tensor)
        writer_row_start, writer_row_end = writer_mat.getOwnershipRange()
        for writer_row in range(writer_row_start, writer_row_end):
            row = matrix_row_permutation[writer_row]
            writer_cols, writer_vals = writer_mat.getRow(writer_row)
            cols = list()
            vals = list()
            for (writer_col, writer_val) in zip(writer_cols, writer_vals):
                if writer_val != 0.:
                    cols.append(matrix_col_permutation[writer_col])
                    vals.append(writer_val)
            if len(cols) > 0:
                mat.setValues(row, cols, vals, addv=PETSc.InsertMode.INSERT)
        mat.assemble()
github OP2 / PyOP2 / pyop2 / petsc_base.py View on Github external
def lgmap(self):
        """A PETSc LGMap mapping process-local indices to global
        indices for this :class:`DataSet`.
        """
        lgmap = PETSc.LGMap()
        if self.comm.size == 1:
            lgmap.create(indices=np.arange(self.size, dtype=IntType),
                         bsize=self.cdim, comm=self.comm)
        else:
            lgmap.create(indices=self.halo.local_to_global_numbering,
                         bsize=self.cdim, comm=self.comm)
        return lgmap
github anstmichaels / emopt / emopt / misc.py View on Github external
from petsc4py import PETSc
#import decorator # so that sphinx will document decorated functions :S
import warnings, inspect

__author__ = "Andrew Michaels"
__license__ = "GPL License, Version 3.0"
__version__ = "2019.5.6"
__maintainer__ = "Andrew Michaels"
__status__ = "development"

# functions and variables useful for MPI stuff
COMM = PETSc.COMM_WORLD.tompi4py()
RANK = PETSc.COMM_WORLD.getRank()
NOT_PARALLEL = (RANK == 0)
N_PROC = PETSc.COMM_WORLD.getSize()

def run_on_master(func):
    """Prevent a decorated function from running on any node but the master
    node
    """

    def wrapper(*args, **kwargs):
        if(NOT_PARALLEL):
            return func(*args, **kwargs)
        else:
            return

    return wrapper

def n_silicon(wavelength):
    """Load silicon refractive index vs wavlength and interpolate at desired wavelength.
github Parallel-in-Time / pySDC / pySDC / implementations / problem_classes / GeneralizedFisher_1D_PETSc.py View on Github external
da = PETSc.DMDA().create([problem_params['nvars']], dof=1, stencil_width=1, comm=problem_params['comm'])

        # invoke super init, passing number of dofs, dtype_u and dtype_f
        super(petsc_fisher_multiimplicit, self).__init__(init=da, dtype_u=dtype_u, dtype_f=dtype_f,
                                                         params=problem_params)

        # compute dx and get local ranges
        self.dx = (self.params.interval[1] - self.params.interval[0]) / (self.params.nvars - 1)
        (self.xs, self.xe) = self.init.getRanges()[0]

        # compute discretization matrix A and identity
        self.A = self.__get_A()
        self.localX = self.init.createLocalVec()

        # setup linear solver
        self.ksp = PETSc.KSP()
        self.ksp.create(comm=self.params.comm)
        self.ksp.setType('cg')
        pc = self.ksp.getPC()
        pc.setType('ilu')
        self.ksp.setInitialGuessNonzero(True)
        self.ksp.setFromOptions()
        self.ksp.setTolerances(rtol=self.params.lsol_tol, atol=self.params.lsol_tol,
                               max_it=self.params.lsol_maxiter)
        self.ksp_itercount = 0
        self.ksp_ncalls = 0

        # setup nonlinear solver
        self.snes = PETSc.SNES()
        self.snes.create(comm=self.params.comm)
        if self.params.nlsol_maxiter <= 1:
            self.snes.setType('ksponly')