How to use the multiprocessing.Value function in multiprocessing

To help you get started, we’ve selected a few multiprocessing examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github BBN-Q / Auspex / test / test_alazar.py View on Github external
def __init__(self):
                self.queue = Queue()
                self.points_taken = Value('i', 0)
            def push(self, data):
github intracom-telecom-sdn / nstat / stress_test / sb_idle_scalability_multinet.py View on Github external
:type multinet_base_dir: str
    :type conf: str
    :type output_dir: str
    """

    test_type = '[sb_idle_scalability_multinet]'
    logging.info('{0} initializing test parameters'.format(test_type))

    # Global variables read-write shared between monitor-main thread.
    cpid = 0
    global_sample_id = 0

    t_start = multiprocessing.Value('d', 0.0)

    # Multinet parameters
    multinet_hosts_per_switch = multiprocessing.Value('i', 0)
    multinet_topo_size = multiprocessing.Value('i', 0)

    multinet_worker_ip_list = conf['multinet_worker_ip_list']
    multinet_worker_port_list = conf['multinet_worker_port_list']

    # Controller parameters
    controller_logs_dir = ctrl_base_dir + conf['controller_logs_dir']
    controller_rebuild = conf['controller_rebuild']
    controller_cleanup = conf['controller_cleanup']
    if 'controller_cpu_shares' in conf:
        controller_cpu_shares = conf['controller_cpu_shares']
    else:
        controller_cpu_shares = 100
    multinet_switch_type = conf['multinet_switch_type']

    controller_handlers_set = conf_collections_util.controller_handlers(
github fsxfreak / esys-pbi / src / pupil / pupil_src / capture / main.py View on Github external
fh.setFormatter(logging.Formatter('%(asctime)s - %(processName)s - [%(levelname)s] %(name)s: %(message)s'))
        logger.addHandler(fh)
        #Stream to console.
        ch = logging.StreamHandler()
        ch.setFormatter(logging.Formatter('%(processName)s - [%(levelname)s] %(name)s: %(message)s'))
        logger.addHandler(ch)
        # IPC setup to receive log messages. Use zmq_tools.ZMQ_handler to send messages to here.
        sub = zmq_tools.Msg_Receiver(zmq_ctx,ipc_sub_url,topics=("logging",))
        while True:
            topic,msg = sub.recv()
            record = logging.makeLogRecord(msg)
            logger.handle(record)


    ## IPC
    timebase = Value(c_double,0)
    eyes_are_alive = Value(c_bool,0),Value(c_bool,0)

    zmq_ctx = zmq.Context()

    #Let the OS choose the IP and PORT
    ipc_pub_url = 'tcp://*:*'
    ipc_sub_url = 'tcp://*:*'
    ipc_push_url = 'tcp://*:*'

    # Binding IPC Backbone Sockets to URLs.
    # They are used in the threads started below.
    # Using them in the main thread is not allowed.
    xsub_socket = zmq_ctx.socket(zmq.XSUB)
    xsub_socket.bind(ipc_pub_url)
    ipc_pub_url = xsub_socket.last_endpoint.decode('utf8').replace("0.0.0.0","127.0.0.1")
github Ecogenomics / GTDBTk / gtdbtk / external / tigrfam_search.py View on Github external
def run(self, gene_files):
        """Annotate genes with TIGRFAM HMMs.

        Parameters
        ----------
        gene_files : iterable
            Gene files in FASTA format to process.
        """
        if len(gene_files) == 0:
            raise GTDBTkExit('There are no genomes to process.')
        self.cpus_per_genome = max(1, self.threads / len(gene_files))

        # populate worker queue with data to process
        workerQueue = mp.Queue()
        writerQueue = mp.Queue()
        n_skipped = mp.Value('i', 0)

        for f in gene_files:
            workerQueue.put(f)

        for _ in range(self.threads):
            workerQueue.put(None)

        try:
            workerProc = [mp.Process(target=self._workerThread, args=(
                workerQueue, writerQueue, n_skipped)) for _ in range(self.threads)]
            writeProc = mp.Process(target=self._writerThread, args=(
                len(gene_files), writerQueue))

            writeProc.start()

            for p in workerProc:
github keiohta / tf2rl / tf2rl / algos / apex_multienv.py View on Github external
global_rb = manager.PrioritizedReplayBuffer(
        obs_shape=env.observation_space.shape,
        act_dim=get_act_dim(env),
        size=args.replay_buffer_size)

    # queues to share network parameters between a learner and explorers
    queues = [manager.Queue(), manager.Queue()]

    # Event object to share training status. if event is set True, all exolorers stop sampling transitions
    is_training_done = Event()

    # Lock
    lock = manager.Lock()

    # Shared memory objects to count number of samples and applied gradients
    trained_steps = Value('i', 0)

    return global_rb, queues, is_training_done, lock, trained_steps
github cylc / cylc-flow / lib / cylc / mp_pool.py View on Github external
if not mesg.endswith("\n"):
                    mesg += "\n"
                ret += fmt % {
                    "cmd_key": self.cmd_key,
                    "attr": attr,
                    "mesg": mesg}
        return ret.rstrip()


class SuiteProcPool(object):
    """Use a process pool to execute shell commands."""

    JOBS_SUBMIT = "jobs-submit"
    JOB_SKIPPED_FLAG = 999
    # Shared memory flag.
    STOP_JOB_SUBMISSION = multiprocessing.Value('i', 0)

    _INSTANCE = None

    @classmethod
    def get_inst(cls, pool_size=None):
        """Return a singleton instance.

        On 1st call, instantiate the singleton. The argument "pool_size" is
        only relevant on 1st call.

        """
        if cls._INSTANCE is None:
            cls._INSTANCE = cls(pool_size)
        return cls._INSTANCE

    def __init__(self, pool_size=None):
github shirokunet / RCVehiclePy / demo / demo_keyboard_controller.py View on Github external
def __init__(self):
        self.throttle = Value(ctypes.c_float,0.0)
        self.steering = Value(ctypes.c_float,0.0)
        self.is_run = Value(ctypes.c_bool,True)

        self._p = Process(target=self._process, args=(10,))
        self._p.start()
        return
github Crypt0s / smb-scan / smbscanner.py View on Github external
expand_range = []
           for i in xrange(len(targets)):
              targets[i] = targets[i].strip()
              if '/' in targets[i]:
                 expand_range = expand_range + ip_expand(targets[i])
                 targets.pop(i)
                 targets = targets + expand_range
                 print "Checking for communication"

    pool = Pool(smbargs.packet_rate)
    valid_targets = pool.map(PortScan, targets)

    valid_targets[:] = (x for x in valid_targets if x is not None)
    print valid_targets
    print "Starting to crawl the targets... this will take some time."
    lock = Value('i',0,lock=True)

# dynamic scanning pool, so we'll take care of this here

    npool = Pool(smbargs.packet_rate)
    results = npool.map_async(smbscan, valid_targets)
    results.get()
    print "Done"
github zhezhaoa / ngram2vec / word2vecf / word2vecf.py View on Github external
size = int(args['--size'])
    processes_num = int(args['--processes_num'])
    negative = int(args['--negative'])
    iters = int(args['--iters'])

    w2i, i2w = load_vocabulary(words_path)
    c2i, i2c = load_vocabulary(contexts_path)
    words = load_count_vocabulary(words_path)
    contexts = load_count_vocabulary(contexts_path)

    pairs_num = 0
    with open(pairs_path, 'r') as f:
        for l in f:
            pairs_num += 1

    global_word_count = Value('l', 0)
    alpha = 0.025
    syn0, syn1 = init_net(size, len(words), len(contexts))
    table = UnigramTable(i2c, contexts)
    print ()

    for i in range(iters):
        pool = Pool(processes=processes_num, initializer=__init_process, initargs=(w2i, c2i, syn0, syn1, table, negative, size, alpha, processes_num, global_word_count, pairs_num, iters, pairs_path))
        pool.map(train_process, range(processes_num))

    save(i2w, syn0, outputs_path)
    print ("word2vecf finished")
github adangert / JoustMania / piparty.py View on Github external
self.pair = pair.Pair()

        self.menu = Value('i', 1)
        self.controller_game_mode = Value('i',1)
        self.restart = Value('i',0)
        self.controller_teams = {}
        self.controller_colors = {}
        self.dead_moves = {}
        self.music_speed= Value('d', 0)
        self.werewolf_reveal = Value('i', 2)
        self.show_team_colors = Value('i', 0)
        self.red_on_kill = Value('i', 0)
        self.zombie_opts = {}
        self.commander_intro = Value('i',1)
        self.commander_move_opts = {}
        self.commander_powers = [Value('d', 0.0), Value('d', 0.0)]
        self.commander_overdrive = [Value('i', 0), Value('i', 0)]
        self.five_controller_opts = {}
        self.swapper_team_colors = Array('i',[0]*6)
        self.fight_club_colors = {}
        self.invincible_moves = {}
        self.num_teams = Value('i',1)
        self.bomb_color = Array('i', [0] * 3)
        self.game_start = Value('i', 0)
        self.false_colors = {}
        self.was_faked = {}
        self.rumble = {}
        self.kill_controller_proc = {}
        self.controller_sensitivity = Array('d', [0] *10)
        self.dead_invince = Value('b', False)

        self.i = 0