How to use the celery.group function in celery

To help you get started, we’ve selected a few celery examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github celery / celery / t / integration / test_canvas.py View on Github external
def test_nested_group_chain(self, manager):
        try:
            manager.app.backend.ensure_chords_allowed()
        except NotImplementedError as e:
            raise pytest.skip(e.args[0])

        c = chain(
            add.si(1, 0),
            group(
                add.si(1, 100),
                chain(
                    add.si(1, 200),
                    group(
                        add.si(1, 1000),
                        add.si(1, 2000),
                    ),
                ),
            ),
            add.si(1, 10),
        )
        res = c()
        assert res.get(timeout=TIMEOUT) == 11
github celery / celery / t / integration / test_canvas.py View on Github external
def test_chain_of_chords_with_two_tasks(self, manager):
        try:
            manager.app.backend.ensure_chords_allowed()
        except NotImplementedError as e:
            raise pytest.skip(e.args[0])

        c = add.si(1, 0)
        c = c | group(add.s(1), add.s(1))
        c = c | tsum.s()
        c = c | add.s(1)
        c = c | chord(group(add.s(1), add.s(1)), tsum.s())

        res = c()
        assert res.get(timeout=TIMEOUT) == 12
github celery / celery / t / unit / tasks / test_trace.py View on Github external
def test_callbacks__only_groups(self, group_, maybe_signature):
        sig1 = group([Mock(name='g1'), Mock(name='g2')], app=self.app)
        sig2 = group([Mock(name='g3'), Mock(name='g4')], app=self.app)
        sig1.apply_async = Mock(name='gapply')
        sig2.apply_async = Mock(name='gapply')
        request = {'callbacks': [sig1, sig2], 'root_id': 'root'}

        def passt(s, *args, **kwargs):
            return s
        maybe_signature.side_effect = passt
        retval, _ = self.trace(self.add, (2, 2), {}, request=request)
        sig1.apply_async.assert_called_with(
            (4,), parent_id='id-1', root_id='root', priority=None
        )
        sig2.apply_async.assert_called_with(
            (4,), parent_id='id-1', root_id='root', priority=None
        )
github zenodo / zenodo / zenodo / modules / utils / tasks.py View on Github external
def make_oai_task_group(oais):
    """Make a celery group for an OAISet.

    Since for each OAISet any given record has to be modified by either
    removing or adding the OAISet.spec, it's save to create a single
    group per OAISet for all records (no risk of racing conditions in parallel
    execution).

    :param oais: OAISet for which the task group is to be made.
    :type oais: invenio_oaiserver.modules.OAISet
    """
    spec_q = Q('match', **{'_oai.sets': oais.spec})
    pattern_q = Q('query_string', query=oais.search_pattern)
    spec_remove_q = Q('bool', must=spec_q, must_not=pattern_q)
    spec_add_q = Q('bool', must=pattern_q, must_not=spec_q)
    return group(ichain(iter_record_oai_tasks(spec_remove_q, oais.spec,
                                              remove_oaiset_spec),
                        iter_record_oai_tasks(spec_add_q, oais.spec,
                                              add_oaiset_spec)))
github kernelci / kernelci-backend / app / taskqueue / tasks.py View on Github external
def run_batch_group(batch_op_list, db_options):
    """Execute a list of batch operations.

    :param batch_op_list: List of JSON object used to build the batch
    operation.
    :type batch_op_list: list
    :param db_options: The database connection parameters.
    :type db_options: dictionary
    """
    job = celery.group(
        [
            execute_batch.s(batch_op, db_options)
            for batch_op in batch_op_list
        ]
    )
    result = job.apply_async()
    while not result.ready():
        pass
    return result.get()
github mozilla / kitsune / kitsune / search / management / commands / esindex.py View on Github external
post_index_task = switch_es_index.si(index_alias=index_alias,
                                             new_index_name=new_index_name, **task_kwargs)

        # Task to run in order to add the objects
        # that has been inserted into database while indexing_tasks was running
        # We pass the timestamp of indexing, so its possible to index later items
        # TODO: Find a better way so it can be generalized, not depend of timestamp
        missed_index_task = index_missing_objects.si(index_generation_time=index_time,
                                                     locale=locale, **task_kwargs)

        # http://celery.readthedocs.io/en/latest/userguide/canvas.html#chords
        # chord_tasks = chord(header=list(indexing_tasks), body=post_index_task)
        # # http://celery.readthedocs.io/en/latest/userguide/canvas.html#chain
        chain(pre_index_task).apply()
        group(indexing_tasks).apply()
        post_index_task.apply()
        missed_index_task.apply()

        message = ("Successfully issued tasks for {} {}, total {} items"
                   .format(str(es_document), locale, queryset.count()))
        log.info(message)
github willemarcel / osmcha-django / osmchadjango / changeset / tasks.py View on Github external
def get_filter_changeset_file(url, geojson_filter=settings.CHANGESETS_FILTER):
    """Filter the changesets of the replication file by the area defined in the
    GeoJSON file.
    """
    cl = ChangesetList(url, geojson_filter)
    group(create_changeset.s(c['id']) for c in cl.changesets)()
github lavalamp- / ws-backend-community / tasknode / tasks / scanning / ip.py View on Github external
if scan_config.ip_address_historic_dns:
        collection_sigs.append(get_historic_dns_data_for_ip_address.si(**task_kwargs))
    if scan_config.ip_address_as_data:
        collection_sigs.append(get_as_data_for_ip_address.si(**task_kwargs))
    if scan_config.ip_address_whois_data:
        collection_sigs.append(get_whois_data_for_ip_address.si(**task_kwargs))
    if scan_network_services:
        network_service_sigs = []
        network_service_sigs.append(scan_ip_address_for_network_services.si(**task_kwargs))
        if scan_config.scan_network_services:
            network_service_sigs.append(inspect_network_services_from_ip_address.si(**task_kwargs))
        if len(network_service_sigs) > 1:
            collection_sigs.append(chain(network_service_sigs))
        else:
            collection_sigs.append(network_service_sigs[0])
    task_sigs.append(group(collection_sigs))
    task_sigs.append(create_report_for_ip_address_scan.si(**task_kwargs))
    task_sigs.append(apply_flags_to_ip_address_scan.si(**task_kwargs))
    task_sigs.append(update_ip_address_scan_elasticsearch.si(**task_kwargs))
    task_sigs.append(update_ip_address_scan_completed.si(**task_kwargs))
    scanning_status_signature = update_ip_address_scanning_status.si(
        ip_address_uuid=ip_address_uuid,
        scanning_status=False,
    )
    task_sigs.append(scanning_status_signature)
    if config.pubsub_enabled:
        task_sigs.append(publish_report_for_ip_address_scan.si(**task_kwargs))
    logger.info(
        "Now kicking off all necessary tasks to scan IP address %s."
        % (ip_address_uuid,)
    )
    canvas_sig = chain(task_sigs, link_error=scanning_status_signature)
github polyaxon / polyaxon / polyaxon / hpsearch / tasks / random.py View on Github external
group_tasks.append(
            hp_random_search_create_experiments.s(experiment_group_id=experiment_group.id,
                                                  suggestions=chunk_suggestions))

    chunk_suggestions = []
    for suggestion in suggestions:
        chunk_suggestions.append(suggestion)
        if len(chunk_suggestions) == settings.GROUP_CHUNKS:
            send_chunk()
            chunk_suggestions = []

    if chunk_suggestions:
        send_chunk()

    # Start the group
    group(group_tasks)()

    celery_app.send_task(
        HPCeleryTasks.HP_RANDOM_SEARCH_START,
        kwargs={'experiment_group_id': experiment_group.id, 'auto_retry': True},
        countdown=1)
github dimagi / commcare-hq / corehq / apps / ota / tasks.py View on Github external
:param cache_timeout_hours: Hours to cache the payload
    :param overwrite_cache:     If True overwrite any existing cache
    :param check_cache_only:    Don't generate the payload, just check if it is already cached
    """
    tasks = [
        prime_restore.s(
            username_or_id,
            domain,
            version,
            cache_timeout_hours,
            overwrite_cache,
            check_cache_only,
        ) for username_or_id in usernames_or_ids
    ]

    return group(tasks)()