How to use the autokeras.hypermodel.base function in autokeras

To help you get started, we’ve selected a few autokeras examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github keras-team / autokeras / autokeras / tuner.py View on Github external
def update_space(self, hyperparameters):
        # Get the block names.
        preprocess_graph, keras_graph = self.hyper_graph.build_graphs(
            hyperparameters)

        # Add the new Hyperparameters to different categories.
        ref_names = {hp.name for hp in self.hyperparameters.space}
        for hp in hyperparameters.space:
            if hp.name not in ref_names:
                hp_type = None
                if any([hp.name.startswith(block.name)
                        for block in self.hyper_graph.blocks
                        if isinstance(block, base.HyperBlock)]):
                    hp_type = GreedyOracle.HYPER
                elif any([hp.name.startswith(block.name)
                          for block in preprocess_graph.blocks]):
                    hp_type = GreedyOracle.PREPROCESS
                elif any([hp.name.startswith(block.name)
                          for block in keras_graph.blocks]):
                    hp_type = GreedyOracle.ARCH
                else:
                    hp_type = GreedyOracle.OPT
                self._hp_names[hp_type].add(hp.name)

        super().update_space(hyperparameters)
github keras-team / autokeras / autokeras / auto_model.py View on Github external
def _meta_build(self, dataset):
        # Using functional API.
        if all([isinstance(output, base.Node) for output in self.outputs]):
            self.hyper_graph = graph.HyperGraph(inputs=self.inputs,
                                                outputs=self.outputs)
        # Using input/output API.
        elif all([isinstance(output, base.Head) for output in self.outputs]):
            self.hyper_graph = meta_model.assemble(inputs=self.inputs,
                                                   outputs=self.outputs,
                                                   dataset=dataset,
                                                   seed=self.seed)
            self.outputs = self.hyper_graph.outputs
github keras-team / autokeras / autokeras / hypermodel / graph.py View on Github external
def _get_metrics(self):
        metrics = {}
        for output_node in self.outputs:
            block = output_node.in_blocks[0]
            if isinstance(block, base.Head):
                metrics[block.name] = block.metrics
        return metrics
github keras-team / autokeras / autokeras / hypermodel / preprocessor.py View on Github external
import numpy as np
import tensorflow as tf
from sklearn.preprocessing import normalize
from tensorflow.python.util import nest

from autokeras import const
from autokeras import encoder
from autokeras import utils
from autokeras.hypermodel import base

with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    import lightgbm as lgb


class Normalization(base.Preprocessor):
    """ Perform basic image transformation and augmentation.

    # Arguments
        mean: Tensor. The mean value. Shape: (data last dimension length,)
        std: Tensor. The standard deviation. Shape is the same as mean.
    """

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.sum = 0
        self.square_sum = 0
        self.count = 0
        self.mean = None
        self.std = None
        self.shape = None
github keras-team / autokeras / autokeras / hypermodel / compiler.py View on Github external
preprocessor_module.FeatureEngineering: feature_engineering_input,
    preprocessor_module.LightGBM: lightgbm_head,
}

# Compile the graph after the preprocessing step.
AFTER = {
    block_module.EmbeddingBlock: embedding_max_features,
}

# Compile the HyperGraph.
HYPER = {**{
    hyperblock_module.StructuredDataBlock: structured_data_block_heads,
}, **BEFORE}

ALL_CLASSES = {
    **vars(base),
    **vars(node_module),
    **vars(head_module),
    **vars(block_module),
    **vars(preprocessor_module),
    **vars(hyperblock_module),
}


def serialize(obj):
    return tf.keras.utils.serialize_keras_object(obj)


def deserialize(config, custom_objects=None):
    return tf.keras.utils.deserialize_keras_object(
        config,
        module_objects={**ALL_CLASSES},
github keras-team / autokeras / autokeras / hypermodel / node.py View on Github external
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow.python.util import nest

from autokeras import utils
from autokeras.hypermodel import base


class TextNode(base.Node):
    pass


class Input(base.Node):
    """Input node for tensor data.

    The data should be numpy.ndarray or tf.data.Dataset.
    """

    def _check(self, x):
        """Record any information needed by transform."""
        if not isinstance(x, (np.ndarray, tf.data.Dataset)):
            raise TypeError('Expect the data to Input to be numpy.ndarray or '
                            'tf.data.Dataset, but got {type}.'.format(type=type(x)))
        if isinstance(x, np.ndarray) and not np.issubdtype(x.dtype, np.number):
            raise TypeError('Expect the data to Input to be numerical, but got '
github keras-team / autokeras / autokeras / hypermodel / graph.py View on Github external
def _is_keras_model_inputs(node):
        for block in node.in_blocks:
            if not isinstance(block, base.Preprocessor):
                return False
        for block in node.out_blocks:
            if not isinstance(block, base.Preprocessor):
                return True
        return False
github keras-team / autokeras / autokeras / auto_model.py View on Github external
def _meta_build(self, dataset):
        # Using functional API.
        if all([isinstance(output, base.Node) for output in self.outputs]):
            self.hyper_graph = graph.HyperGraph(inputs=self.inputs,
                                                outputs=self.outputs)
        # Using input/output API.
        elif all([isinstance(output, base.Head) for output in self.outputs]):
            self.hyper_graph = meta_model.assemble(inputs=self.inputs,
                                                   outputs=self.outputs,
                                                   dataset=dataset,
                                                   seed=self.seed)
            self.outputs = self.hyper_graph.outputs
github keras-team / autokeras / autokeras / hypermodel / hyperblock.py View on Github external
return output_node

    def build(self, hp, inputs=None):
        input_node = nest.flatten(inputs)[0]
        output_node = self.build_feature_engineering(hp, input_node)
        output_node = self.build_body(hp, output_node)
        return output_node


class TimeSeriesBlock(base.HyperBlock):

    def build(self, hp, inputs=None):
        raise NotImplementedError


class GeneralBlock(base.HyperBlock):
    """A general neural network block when the input type is unknown.

    When the input type is unknown. The GeneralBlock would search in a large space
    for a good model.

    # Arguments
        name: String.
    """

    def build(self, hp, inputs=None):
        raise NotImplementedError