Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def options_mocks(mocker):
mgr = multiprocess.Manager()
q = mgr.Queue()
def on_subscribe(self, msg, params, websocket):
new_params = copy.deepcopy(params)
new_params.update({'context': msg.get('context', {})})
q.put(self)
return new_params
def on_connect(self, message, websocket):
q.put(self)
def on_disconnect(self, websocket):
q.put(self)
def on_unsubscribe(self, websocket):
q.put(self)
if info:
print info
raise ValueError, 'there should be no positive refcounts left'
if __name__ == '__main__':
processing.freezeSupport()
assert len(sys.argv) in (1, 2)
if len(sys.argv) == 1 or sys.argv[1] == 'processes':
print ' Using processes '.center(79, '-')
namespace = processing
elif sys.argv[1] == 'manager':
print ' Using processes and a manager '.center(79, '-')
namespace = processing.Manager()
namespace.Process = processing.Process
namespace.currentProcess = processing.currentProcess
namespace.activeChildren = processing.activeChildren
elif sys.argv[1] == 'threads':
print ' Using threads '.center(79, '-')
import processing.dummy as namespace
else:
print 'Usage:\n\t%s [processes | manager | threads]' % sys.argv[0]
raise SystemExit, 2
test(namespace)
def test():
manager = processing.Manager()
gc.disable()
print('\n\t######## testing Queue.Queue\n')
test_queuespeed(threading.Thread, Queue.Queue(),
threading.Condition())
print('\n\t######## testing processing.Queue\n')
test_queuespeed(processing.Process, processing.Queue(),
processing.Condition())
print('\n\t######## testing Queue managed by server process\n')
test_queuespeed(processing.Process, manager.Queue(),
manager.Condition())
print('\n\t######## testing processing.Pipe\n')
test_pipespeed()
print
def test():
manager = processing.Manager()
gc.disable()
print '\n\t######## testing Queue.Queue\n'
test_queuespeed(threading.Thread, Queue.Queue(),
threading.Condition())
print '\n\t######## testing processing.Queue\n'
test_queuespeed(processing.Process, processing.Queue(),
processing.Condition())
print '\n\t######## testing Queue managed by server process\n'
test_queuespeed(processing.Process, manager.Queue(),
manager.Condition())
print '\n\t######## testing processing.Pipe\n'
test_pipespeed()
print
* Underscores? OK: 8.8.8.8_8.8.8.10
"""
click.clear()
click.secho(asciis.print_art(), fg="magenta")
click.secho("\tRelease v{}, {}".format(VERSION, CODENAME), fg="magenta")
click.secho("[+] OSINT Module Selected: ODIN will run all recon modules.", fg="green")
# Perform prep work for reporting
setup_reports(organization)
report_path = "reports/{}/".format(organization)
output_report = report_path + "OSINT_DB.db"
if __name__ == "__main__":
# Create manager server to handle variables shared between jobs
manager = Manager()
ip_list = manager.list()
domain_list = manager.list()
rev_domain_list = manager.list()
# Create reporter object and generate lists of everything, just IP addresses, and just domains
browser = helpers.setup_headless_chrome(unsafe)
report = reporter.Reporter(organization, report_path, output_report, browser)
report.create_tables()
scope, ip_list, domain_list = report.prepare_scope(ip_list, domain_list, scope_file, domain)
# Create some jobs and put Python to work!
# Job queue 1 is for the initial phase
jobs = []
# Job queue 2 is used for jobs using data from job queue 1
more_jobs = []
# Job queue 3 is used for jobs that take a while and use the progress bar, i.e. AWS enum
even_more_jobs = []
# Phase 1 jobs
def __init__(self, filename, mode):
logging.Handler.__init__(self)
self.handler = logging.FileHandler(filename, mode)
manager = multiprocess.Manager()
self.queue = manager.Queue(-1)
# self.queue = multiprocess.Queue(-1)
self.is_closed = False
self.t = threading.Thread(target=self.receive)
self.t.daemon = True
self.t.start()
# Settings: Location, Units, and rapidfire (optional)
latitude = check_env_var("HZN_LAT", printerr=True)
longitude = check_env_var("HZN_LON", printerr=True)
pws_units = check_env_var("PWS_UNITS", default='us', printerr=True) # weewx recommends only using 'us'
pws_wu_loc = check_env_var("PWS_WU_LOC", default='', printerr=True)
pws_wu_rapidfire = check_env_var("PWS_WU_RPDF", default='False', printerr=True)
# Deal with a potential lower-case (boolean value from Horizon) or erroneous value
if pws_wu_rapidfire == "true" or pws_wu_rapidfire == "True":
pws_wu_rapidfire = "True"
else:
pws_wu_rapidfire = "False"
## Shared data structure (dict for flask server to read & serve)
manager = Manager()
sdata = manager.dict()
standard_params = ["wu_id", "stationtype", "model", "latitude", "longitude", "units", "location"]
standard_values = [pws_wu_id, pws_station_type, pws_model, latitude, longitude, pws_units, pws_wu_loc]
sdata["r"] = dict(zip(["status"], ["Station initializing..."]))
sdata["t"] = str(int(time.time())) # Timestamp
sdata["i"] = dict(zip(standard_params, standard_values)) # Station Info
## Flask HTTPserver ----------------------------------------------------------
## Start simple flask server at localhost:port and pass in shared data dict
p_flask = Process(target=fl.run_server, args=('0.0.0.0', 8357, sdata))
p_flask.start()
## Weewx service -------------------------------------------------------------
# Modify the weewx configuration file with our env var settings
weemod = weewx_mod(weewx_config_file, pws_station_type)
weemod.wee_config_script = "/home/weewx/bin/wee_config"
# if self.isThread==1:
# self.q_request.join()
# self.q_finish.join()
if self.isThread==1:
# self.lock = Lock() #线程锁
self.work_queue = Queue.Queue()#任务队列
self.lock = Lock() #线程锁
# self.q_request = Queue() #任务所处理的对象队列
# self.q_finish = Queue() #任务所处理的对象完成队列
else :
# self.lock = multiprocessing.Lock()
# self.q_request=multiprocessing.Queue()
# self.q_finish=multiprocessing.Queue()
self.lock = multiprocessing.Manager().Lock()
temp=multiprocess.Manager()
self.work_queue = temp.Queue()#任务队列