How to use the thinc.neural._classes.model.Model.__init__ function in thinc

To help you get started, we’ve selected a few thinc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github explosion / thinc / thinc / neural / _classes / maxout.py View on Github external
def __init__(self, nO=None, nI=None, pieces=2, **kwargs):
        Model.__init__(self, **kwargs)
        self.nO = nO
        self.nI = nI
        self.nP = pieces
        self.drop_factor = kwargs.get("drop_factor", 1.0)
github explosion / thinc / thinc / neural / _classes / layernorm.py View on Github external
def __init__(self, child=None, **kwargs):
        self.child = child
        if child is not None:
            self._layers = [child]
        else:
            self._layers = []
        Model.__init__(self, **kwargs)
        if "nO" in kwargs:
            self.nO = kwargs["nO"]
        elif getattr(child, "nO", None):
            self.nO = child.nO
        self.nr_upd = 0
github explosion / thinc / thinc / neural / _classes / static_vectors.py View on Github external
def __init__(self, lang, nO, drop_factor=0.0, column=0):
        Model.__init__(self)
        self.column = column
        self.nO = nO
        # This doesn't seem the cleverest solution,
        # but it ensures multiple models load the
        # same copy of spaCy if they're deserialised.
        self.lang = lang
        vectors = self.get_vectors()
        self.nM = vectors.shape[1]
        self.drop_factor = drop_factor
        self.column = column
        if self.nM == 0:
            raise ValueError(
                "Cannot create vectors table with dimension 0.\n"
                "If you're using pre-trained vectors, are the vectors loaded?"
            )
        self.nV = vectors.shape[0]
github explosion / thinc / thinc / neural / _classes / hash_embed.py View on Github external
def __init__(self, nO, nV, seed=None, **kwargs):
        Model.__init__(self, **kwargs)
        self.column = kwargs.get("column", 0)
        self.nO = nO
        self.nV = nV
        
        if seed is not None:
            self.seed = seed
        else:
            self.seed = self.id
github explosion / thinc / thinc / neural / _classes / encoder_decoder.py View on Github external
def __init__(self, nM=300, nH=6, device="cpu"):
        Model.__init__(self)
        self.attn = MultiHeadedAttention(nM=nM, nH=nH)
        self.ffd = PositionwiseFeedForward(nM, 4 * nM)
        self.norm = PyTorchWrapper(PytorchLayerNorm(nM, device=device))
        self.nM = nM
        self.layers_ = [self.attn, self.ffd, self.norm]
github explosion / thinc / thinc / neural / _classes / feed_forward.py View on Github external
def __init__(self, layers, **kwargs):
        self._layers = []
        for layer in layers:
            if isinstance(layer, FeedForward):
                self._layers.extend(layer._layers)
            else:
                self._layers.append(layer)
        Model.__init__(self, **kwargs)
github explosion / thinc / thinc / neural / _classes / affine.py View on Github external
def __init__(self, nO=None, nI=None, **kwargs):
        Model.__init__(self, **kwargs)
        self.nO = nO
        self.nI = nI
        self.drop_factor = kwargs.get("drop_factor", 1.0)
github explosion / thinc / thinc / neural / _classes / selu.py View on Github external
def __init__(self, nO=None, nI=None, **kwargs):
        Model.__init__(self, **kwargs)
        self.nO = nO
        self.nI = nI
        self.drop_factor = kwargs.get("drop_factor", 1.0)
github explosion / thinc / thinc / neural / _classes / convolution.py View on Github external
def __init__(self, nW=2, gap=0):
        assert gap == 0
        Model.__init__(self)
        self.nW = nW
        self.gap = gap
github explosion / thinc / thinc / neural / _classes / encoder_decoder.py View on Github external
def __init__(self, nM=300, nH=6, nS=6, device="cpu"):
        Model.__init__(self)
        self.stack = clone(EncoderLayer(nM=nM, nH=nH, device=device), nS)
        self.norm = PyTorchWrapper(PytorchLayerNorm(nM=nM, device=device))