Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def run(self):
current_idx = 0
total_frames = 0
env = False
new_ep = True
solutions = []
## Database connection
client = MongoClient()
db = client.retro_contest
## Put the database variable inside the class instance
self.collection = db[self.current_time]
self.fs = gridfs.GridFS(db)
current_level = self.get_level()
while True:
## Push in the database
if current_idx > PLAYOUTS:
self.add_db()
total_frames += current_idx
current_idx = 0
if total_frames > PLAYOUTS_PER_LEVEL:
for mongod in [master, slave]:
client = MongoClient(port=mongod.port, read_preference=ReadPreference.SECONDARY_PREFERRED)
mongod.dbhash = client.test.command("dbhash")
mongod.dict = mongod.dbhash["collections"]
global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
replicated_collections += master.dict.keys()
for coll in replicated_collections:
if coll not in slave.dict and coll not in lost_in_slave:
lost_in_slave.append(coll)
mhash = master.dict[coll]
shash = slave.dict[coll]
if mhash != shash:
mTestDB = MongoClient(port=master.port).test
sTestDB = MongoClient(port=slave.port,
read_preference=ReadPreference.SECONDARY_PREFERRED).test
mCount = mTestDB[coll].count()
sCount = sTestDB[coll].count()
stats = {'hashes': {'master': mhash, 'slave': shash},
'counts':{'master': mCount, 'slave': sCount}}
try:
mDocs = list(mTestDB[coll].find().sort("_id", 1))
sDocs = list(sTestDB[coll].find().sort("_id", 1))
mDiffDocs = list()
sDiffDocs = list()
for left, right in izip(mDocs, sDocs):
if left != right:
mDiffDocs.append(left)
sDiffDocs.append(right)
def main():
db = MongoClient().get_database('beiyouren')
col = db.get_collection('articles')
cursor = col.find({'status': 'fetched'})
total = cursor.count()
i = 1
exist_emails = []
for doc in cursor:
contact = doc['contact']
# email
emails = contact.get('email')
if emails:
emails = [email.split()[0] for email in emails if email]
remaining_emails = []
for email in emails:
if email in exist_emails:
else:
user_password = str(uuid.uuid4()).replace('-', '')
print('PASSWORD : ' + user_password)
if not user_password:
print("Need to specify PASSWORD")
sys.exit(-1)
if not user_admin:
if sys.stdin.isatty():
user_admin = input('ADMIN : ')
if not user_admin:
print("Need to specify ADMIN")
sys.exit(-1)
user_admin = user_admin.lower() == 'true'
# connect to mongo
client = pymongo.MongoClient(mongo_uri)
dbase = client.get_default_database()
users = dbase.get_collection('social.users')
# check if user already exists
if users.find_one({"identityId.userId": user_name, "identityId.providerId": "userpass"}):
print("USER ALREADY EXISTS, will not create user")
sys.exit(-1)
# generate password if not specified
encrypted_password = "$2a" + bcrypt.hash(user_password, rounds=10)[3:]
# create document that will be inserted
user_document = {
"identityId": {
"userId": user_name,
"providerId": "userpass"
def parallel_update_fs_files_metadata_fn(box_event: typing.Dict):
fs_files = parallel_db["fs.files"]
event_id = box_event["event_id"]
box_id = box_event["box_id"]
data_fs_filename = box_event["data_fs_filename"]
fs_files.update_one({"filename": data_fs_filename},
{"$set": {"metadata": {"event_id": event_id,
"box_id": box_id}}})
if __name__ == "__main__":
print("Connecting to OPQ mongodb...", end=" ", flush=True)
database_name = "opq"
client = pymongo.MongoClient()
db = client.opq
print("Connected.")
# # Migration plan
#
# # measurements
# # 1. Rename device_id -> box_id
# # 2. Change box_id value to str
# print("Migrating measurements...", end=" ", flush=True)
# measurements = db.measurements
# total_meansurements = measurements.count()
# device_ids = [1, 2, 3, 4, 5]
# pool = multiprocessing.Pool(initializer=init_parallel_client)
# j = 0
# for i in pool.imap_unordered(parallel_migrate_and_decimate_measurements, device_ids):
# print("Measurement status", j)
from pymongo import MongoClient
BROKER_URL = 'redis://'
CELERY_RESULT_BACKEND = "redis://"
CELERY_ACCEPT_CONTENT = ['pickle', 'json', 'msgpack', 'yaml']
from celery.schedules import crontab
stockCodes = []
client = MongoClient()
db = client['stockcodes']
documents = db.HS300.find()
for document in documents:
stockCodes.append(document['stockcode'])
CELERYBEAT_SCHEDULE = {
'every-24hours': {
'task': 'tasks.main',
'schedule': crontab(hour='*/24'),
'args': (stockCodes),
},
def connect_db():
settings = get_project_settings()
connection = MongoClient(host=settings['MONGODB_SERVER'],
port=int(settings['MONGODB_PORT']))
db = connection[settings['MONGODB_DB']]
if settings['MONGODB_USER'] and settings['MONGODB_PWD']:
db.authenticate(settings['MONGODB_USER'], settings['MONGODB_PWD'])
return db[settings['MONGODB_COLLECTION']]
os.makedirs(directory, exist_ok=True)
LOGGER.info("replacing inet lib with the one built from commit " + git_hash)
with open(directory + "/download.lock", "wb") as lf:
LOGGER.info(
"starting download, or waiting for the in-progress download to finish")
with flock.Flock(lf, flock.LOCK_EX):
LOGGER.info("download lock acquired")
try:
with open(directory + "/libINET.so", "xb") as f:
LOGGER.info(
"we have just created the file, so we need to download it")
client = pymongo.MongoClient(MONGO_HOST)
gfs = gridfs.GridFS(client.opp)
LOGGER.info("connected, downloading")
f.write(gfs.get(git_hash).read())
LOGGER.info("download done")
except FileExistsError:
LOGGER.info("the file was already downloaded")
shutil.copy(directory + "/libINET.so", INET_LIB_FILE)
LOGGER.info("file copied to the right place")
def db(self):
if self._database is None:
if options.CFG.uri and 'replicaSet' in options.CFG.uri:
conn = pymongo.MongoReplicaSetClient(options.CFG.uri)
else:
conn = pymongo.MongoClient(options.CFG.uri)
self._database = conn[options.CFG.database]
return self._database
def getWatchlist():
WatchList = []
try:
# connect to mongo
client = MongoClient(options.mongohost, options.mongoport)
mozdefdb = client.meteor
watchlistentries = mozdefdb['watchlist']
# Log the entries we are removing to maintain an audit log
expired = watchlistentries.find({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(hours=1)}})
for entry in expired:
logger.debug('Deleting entry {0} from watchlist /n'.format(entry))
# delete any that expired
watchlistentries.delete_many({'dateExpiring': {"$lte": datetime.utcnow() - timedelta(hours=1)}})
# Lastly, export the combined watchlist
watchCursor=mozdefdb['watchlist'].aggregate([
{"$sort": {"dateAdded": -1}},
{"$match": {"watchcontent": {"$exists": True}}},
{"$match":