How to use the bigml.resourcehandler.get_resource_type function in bigml

To help you get started, we’ve selected a few bigml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bigmlcom / python / bigml / modelfields.py View on Github external
def check_model_fields(model):
    """Checks the model structure to see whether it contains the required
    fields information

    """
    inner_key = FIELDS_PARENT.get(get_resource_type(model), 'model')
    if check_model_structure(model, inner_key):
        model = model.get('object', model)
        fields = model.get("fields", model.get(inner_key, {}).get('fields'))
        input_fields = model.get("input_fields")
        # models only need model_fields to work. The rest of resources will
        # need all fields to work
        model_fields = model.get(inner_key, {}).get( \
            'model_fields', {}).keys()
        # fusions don't have input fields
        if input_fields is None and inner_key != "fusion":
            return False
        if not model_fields:
            fields_meta = model.get('fields_meta', \
                model.get(inner_key, {}).get('fields_meta', {}))
            try:
                return fields_meta['count'] == fields_meta['total']
github bigmlcom / python / bigml / predictionhandler.py View on Github external
def create_prediction(self, model, input_data=None,
                          args=None, wait_time=3, retries=10):
        """Creates a new prediction.
           The model parameter can be:
            - a simple tree model
            - a simple logistic regression model
            - an ensemble
            - a deepnet
            . a linear regression
            - a fusion
           Note that the old `by_name` argument has been deprecated.

        """
        model_id = None

        resource_type = get_resource_type(model)
        if resource_type not in SUPERVISED_PATHS:
            raise Exception("A supervised model resource id is needed"
                            " to create a prediction. %s found." %
                            resource_type)

        model_id = get_resource_id(model)
        if model_id is not None:
            check_resource(model_id,
                           query_string=TINY_RESOURCE,
                           wait_time=wait_time, retries=retries,
                           raise_on_error=True, api=self)

        if input_data is None:
            input_data = {}
        create_args = {}
        if args is not None:
github bigmlcom / bigmler / bigmler / reify / restchain.py View on Github external
def reify_resource(self, resource_id):
        """Redirects to the reify method according to the resource type

        """
        # first check if this is a valid id
        resource_id = get_resource_id(resource_id)

        if resource_id is not None:
            resource_type = get_resource_type(resource_id)

            reify_handler = getattr(self, 'reify_%s' % resource_type)
            message = "Analyzing %s.\n" % resource_id
            self.logger(message)
            reify_handler(resource_id)
            if self.delete:
                self.delete_stored_resource(resource_id)
github bigmlcom / python / bigml / correlationhandler.py View on Github external
def create_correlation(self, dataset, args=None, wait_time=3, retries=10):
        """Creates a correlation from a `dataset`.

        """
        dataset_id = None
        resource_type = get_resource_type(dataset)
        if resource_type == DATASET_PATH:
            dataset_id = get_dataset_id(dataset)
            check_resource(dataset_id,
                           query_string=TINY_RESOURCE,
                           wait_time=wait_time, retries=retries,
                           raise_on_error=True, api=self)
        else:
            raise Exception("A dataset id is needed to create a"
                            " correlation. %s found." % resource_type)

        create_args = {}
        if args is not None:
            create_args.update(args)
        create_args.update({
            "dataset": dataset_id})
github bigmlcom / bigmler / bigmler / reify / restchain.py View on Github external
# if there's no preferred fields, use the fields structure
        if preferred_fields.keys():
            preferred_fields = resource_fields.fields
        max_column = sorted([field['column_number']
                             for _, field in preferred_fields.items()
                             if field['optype'] != "text"],
                            reverse=True)[0]
        objective_column = resource_fields.fields[objective_id][ \
            'column_number']
        if objective_column != max_column:
            opts['create'].update({"objective_field": {"id": objective_id}})

        if origin != "origin_batch_resource":
            # resize
            if (child['size'] != grandparent['size'] and
                    get_resource_type(parent) == 'source'):
                opts['create'].update({"size": child['size']})

            # generated fields
            if child.get('new_fields', None):
                new_fields = child['new_fields']
                for new_field in new_fields:
                    new_field['field'] = new_field['generator']
                    del new_field['generator']

                opts['create'].update({"new_fields": new_fields})

            u.range_opts(child, grandparent, opts)

        # for batch_predictions, batch_clusters, batch_anomalies generated
        # datasets, attributes cannot be set at creation time, so we
        # must update the resource instead
github bigmlcom / bigmler / bigmler / reify / restutils.py View on Github external
def get_fields_changes(resource, referrer=None,
                       updatable_attrs=DEFAULT_UPDATABLE):
    """Changed field attributes

    """
    if referrer is None:
        referrer = {}
    fields_attributes = {}

    resource_fields = Fields(
        {'resource': resource['resource'], 'object': resource}).fields
    resource_type = get_resource_type(resource)
    # for sources, extract all the updatable attributes
    if resource_type == 'source':
        updatable_attrs = SOURCE_UPDATABLE
        for field_id in resource_fields.keys():
            field_opts = {}
            field = resource_fields[field_id]
            for attribute in updatable_attrs:
                if field.get(attribute):
                    field_opts.update({attribute: field[attribute]})
            if field_opts != {}:
                fields_attributes.update({field_id: field_opts})
        return fields_attributes
    # for the rest of resources, check which attributes changed
    if referrer:
        referrer_fields = Fields(
            {'resource': referrer['resource'], 'object': referrer}).fields
github bigmlcom / python / bigml / associationsethandler.py View on Github external
def create_association_set(self, association, input_data=None,
                               args=None, wait_time=3, retries=10):
        """Creates a new association set.

        """
        association_id = None
        resource_type = get_resource_type(association)
        if resource_type == ASSOCIATION_PATH:
            association_id = get_association_id(association)
            check_resource(association_id,
                           query_string=TINY_RESOURCE,
                           wait_time=wait_time, retries=retries,
                           raise_on_error=True, api=self)
        else:
            raise Exception("A association id is needed to create an"
                            " association set. %s found." % resource_type)

        if input_data is None:
            input_data = {}
        create_args = {}
        if args is not None:
            create_args.update(args)
        create_args.update({
github bigmlcom / python / bigml / executionhandler.py View on Github external
create_args.update(args)

        if (isinstance(origin_resource, basestring) or
                isinstance(origin_resource, dict)):
            # single script
            scripts = [origin_resource]
        else:
            scripts = origin_resource
        try:
            script_ids = [get_script_id(script) for script in scripts]
        except TypeError:
            raise Exception("A script id or a list of them is needed to create"
                            " a script execution. %s found." %
                            get_resource_type(origin_resource))

        if all([get_resource_type(script_id) == SCRIPT_PATH for
                script_id in script_ids]):
            for script in scripts:
                check_resource(script,
                               query_string=TINY_RESOURCE,
                               wait_time=wait_time, retries=retries,
                               raise_on_error=True, api=self)
        else:
            raise Exception("A script id or a list of them is needed to create"
                            " a script execution. %s found." %
                            get_resource_type(origin_resource))

        if len(scripts) > 1:
            create_args.update({
                "scripts": script_ids})
        else:
            create_args.update({
github bigmlcom / python / bigml / datasethandler.py View on Github external
If `wait_time` is higher than 0 then the dataset creation
        request is not sent until the `source` has been created successfuly.

        """
        create_args = {}
        if args is not None:
            create_args.update(args)

        if isinstance(origin_resource, list):
            # mutidatasets
            create_args = self._set_create_from_datasets_args(
                origin_resource, args=create_args, wait_time=wait_time,
                retries=retries, key="origin_datasets")
        else:
            # dataset from source
            resource_type = get_resource_type(origin_resource)
            if resource_type == SOURCE_PATH:
                source_id = get_source_id(origin_resource)
                if source_id:
                    check_resource(source_id,
                                   query_string=TINY_RESOURCE,
                                   wait_time=wait_time,
                                   retries=retries,
                                   raise_on_error=True, api=self)
                    create_args.update({
                        "source": source_id})
            # dataset from dataset
            elif resource_type == DATASET_PATH:
                create_args = self._set_create_from_datasets_args(
                    origin_resource, args=create_args, wait_time=wait_time,
                    retries=retries, key="origin_dataset")
            # dataset from cluster and centroid