Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
>>> serveralIteration = SeveralIteration(2)
creating: createSeveralIteration
"""
def __init__(self, interval, bigdl_type="float"):
"""
Create a SeveralIteration trigger.
:param interval: interval is the "n" where an action is triggeredevery "n" iterations
"""
JavaValue.__init__(self, None, bigdl_type, interval)
class MaxScore(JavaValue):
"""
A trigger that triggers an action when validation score larger than "max" score
>>> maxScore = MaxScore(0.4)
creating: createMaxScore
"""
def __init__(self, max, bigdl_type="float"):
"""
Create a MaxScore trigger.
:param max: max score
"""
JavaValue.__init__(self, None, bigdl_type, max)
def __init__(self, jvalue, bigdl_type, *args):
if (jvalue):
assert(type(jvalue) == JavaObject)
self.value = jvalue
else:
self.value = callBigDlFunc(
bigdl_type, JavaValue.jvm_class_constructor(self), *args)
self.bigdl_type = bigdl_type
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from bigdl.nn.layer import Layer, Node, SharedStaticUtils, Container
from bigdl.util.common import callBigDlFunc, JTensor, JavaValue
if sys.version >= '3':
long = int
unicode = str
class InferShape(JavaValue):
def __init__(self, bigdl_type="float"):
self.bigdl_type = bigdl_type
@classmethod
def __to_keras_shape(cls, shape):
return tuple([None] + shape[1:])
def __process_shape(self, shape):
if len(shape) == 1:
return self.__to_keras_shape(shape[0])
else:
return [self.__to_keras_shape(s) for s in shape]
def get_input_shape(self):
"""
Return a list of shape tuples if there are multiple inputs.
"""
[[Exponential]] is a learning rate schedule, which rescale the learning rate by
lr_{n + 1} = lr * decayRate `^` (iter / decayStep)
:param decay_step the inteval for lr decay
:param decay_rate decay rate
:param stair_case if true, iter / decayStep is an integer division
and the decayed learning rate follows a staircase function.
>>> exponential = Exponential(100, 0.1)
creating: createExponential
"""
def __init__(self, decay_step, decay_rate, stair_case=False, bigdl_type="float"):
JavaValue.__init__(self, None, bigdl_type, decay_step, decay_rate, stair_case)
class Step(JavaValue):
"""
A learning rate decay policy, where the effective learning rate is
calculated as base_lr * gamma ^ (floor(iter / step_size))
:param step_size:
:param gamma:
>>> step = Step(2, 0.3)
creating: createStep
"""
def __init__(self, step_size, gamma, bigdl_type="float"):
JavaValue.__init__(self, None, bigdl_type, step_size, gamma)
class Default(JavaValue):
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import six
from bigdl.util.common import JavaValue
from zoo.common.utils import callZooFunc
if sys.version >= '3':
long = int
unicode = str
class TextFeature(JavaValue):
"""
Each TextFeature keeps information of a single text record.
It can include various status (if any) of a text,
e.g. original text content, uri, category label, tokens, index representation
of tokens, BigDL Sample representation, prediction result and so on.
"""
def __init__(self, text=None, label=None, uri=None, jvalue=None, bigdl_type="float"):
if text is not None:
assert isinstance(text, six.string_types), "text of a TextFeature should be a string"
if uri is not None:
assert isinstance(uri, six.string_types), "uri of a TextFeature should be a string"
if label is not None:
super(TextFeature, self).__init__(jvalue, bigdl_type, text, int(label), uri)
else:
super(TextFeature, self).__init__(jvalue, bigdl_type, text, uri)
>>> maxScore = MaxScore(0.4)
creating: createMaxScore
"""
def __init__(self, max, bigdl_type="float"):
"""
Create a MaxScore trigger.
:param max: max score
"""
JavaValue.__init__(self, None, bigdl_type, max)
class MinLoss(JavaValue):
"""
A trigger that triggers an action when training loss less than "min" loss
>>> minLoss = MinLoss(0.1)
creating: createMinLoss
"""
def __init__(self, min, bigdl_type="float"):
"""
Create a MinLoss trigger.
:param min: min loss
"""
JavaValue.__init__(self, None, bigdl_type, min)
from zoo.pipeline.api.net.tf_dataset import MapDataset
from zoo.pipeline.api.net.utils import _find_placeholders, to_bigdl_optim_method
from zoo.pipeline.estimator import Estimator
from zoo.util import nest
if sys.version >= '3':
long = int
unicode = str
class IdentityCriterion(Criterion):
def __init__(self):
super(IdentityCriterion, self).__init__(None, "float")
class TFValidationMethod(JavaValue):
def __init__(self, val_method, name, output_indices, label_indices):
JavaValue.__init__(self, None, "float",
val_method, name, output_indices, label_indices)
class StatelessMetric(JavaValue):
def __init__(self, metric_name, idx):
self.name = metric_name
self.idx = idx
JavaValue.__init__(self, None, "float", metric_name, idx)
class BigDLMetric(object):
def __init__(self, val_method, outputs, labels):
self.val_method = val_method
self.outputs = outputs
from bigdl.util.common import DOUBLEMAX
from bigdl.util.common import JTensor
from bigdl.util.common import JavaValue
from bigdl.util.common import callBigDlFunc
from bigdl.util.common import callJavaFunc
from bigdl.util.common import get_node_and_core_number
from bigdl.util.common import init_engine
from bigdl.util.common import to_list
from bigdl.dataset.dataset import *
if sys.version >= '3':
long = int
unicode = str
class Top1Accuracy(JavaValue):
"""
Caculate the percentage that output's max probability index equals target.
>>> top1 = Top1Accuracy()
creating: createTop1Accuracy
"""
def __init__(self, bigdl_type="float"):
JavaValue.__init__(self, None, bigdl_type)
class TreeNNAccuracy(JavaValue):
"""
Caculate the percentage that output's max probability index equals target.
>>> top1 = TreeNNAccuracy()
creating: createTreeNNAccuracy
"""
class FeatureTransformer(JavaValue):
def __init__(self, bigdl_type="float", *args):
self.value = callBigDlFunc(
bigdl_type, JavaValue.jvm_class_constructor(self), *args)
def transform(self, sample, bigdl_type="float"):
return callBigDlFunc(bigdl_type, "transform", self.value, sample)
def __call__(self, sample_rdd, bigdl_type="float"):
return callBigDlFunc(bigdl_type, "transformRdd", self.value, sample_rdd)
class Pipeline(JavaValue):
def __init__(self, transformers, bigdl_type="float"):
self.transformer = callBigDlFunc(bigdl_type, "chainTransformer", transformers)
def transform(self, sample, bigdl_type="float"):
transformed = callBigDlFunc(bigdl_type, "transform", self.transformer, sample)
return transformed[0].array.reshape(transformed[1].array)
def __call__(self, sample_rdd, bigdl_type="float"):
return callBigDlFunc(bigdl_type, "transformRdd", self.transformer, sample_rdd)
class Crop(FeatureTransformer):
def __init__(self, start, patch_size, bigdl_type="float"):
super(Crop, self).__init__(bigdl_type, start, patch_size)