Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
authdomain TEXT,
regcontact TEXT,
regfirst TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
reglatest TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
except sqlite3.OperationalError as e:
logging.warning('Cannot create `domains` table; maybe multiple processes started in parallel? %s' % str(e))
# Try to get out of the way of a parallel updater
time.sleep(1)
# Someone else already created the table; he probably also
# migrated it
return
try:
if olddb is None:
return
elif isinstance(olddb, str):
db = bsddb3.hashopen(olddb, 'r')
else: # dict
db = olddb
for k,v in db.items():
k = unutf8(k, 'illegal')
v = unutf8(v, 'illegal')
try:
(authsecret, authurl, authdomain, extra) = v.split("\t", 3)
except ValueError:
(authsecret, authurl, authdomain) = v.split("\t", 2)
extra = None
self.conn.execute('''INSERT INTO domains (xmppdomain, authsecret, authurl, authdomain) VALUES (?, ?, ?, ?)''', (k, authsecret, authurl, authdomain))
if isinstance(olddb, str):
db.close()
except bsddb3.db.DBError as e:
logging.error('Trouble converting %s: %s' % (olddb, e))
def db_upgrade_cache(self, olddb):
logging.debug('Upgrading cache from %s' % olddb)
try:
if olddb is None:
return
elif isinstance(olddb, str):
db = bsddb3.hashopen(olddb, 'r')
else: # dict
db = olddb
for k,v in db.items():
k = unutf8(k, 'illegal').replace(':', '@')
v = unutf8(v, 'illegal')
(pwhash, ts1, tsv, tsa, rest) = v.split("\t", 4)
ts1 = datetime.utcfromtimestamp(int(ts1))
tsv = datetime.utcfromtimestamp(int(tsv))
tsa = datetime.utcfromtimestamp(int(tsa))
# First import goes into persistent database
self.conn.execute('''INSERT INTO authcache (jid, pwhash, firstauth, remoteauth, anyauth)
VALUES (?, ?, ?, ?, ?)''', (k, pwhash, ts1, tsv, tsa))
if isinstance(olddb, str):
db.close()
except bsddb3.db.DBError as e:
logging.error('Trouble converting %s: %s' % (olddb, e))
'''
db = ctx.parent.params['db']
newdb = db + '.new'
if dbtype == 'gdbm':
import gdbm
new_d = gdbm.open(newdb, 'n')
elif dbtype == 'dbm':
import dbm
new_d = dbm.open(newdb, 'n')
elif dbtype == 'dbhash':
import dbhash
new_d = dbhash.open(newdb, 'n')
elif dbtype == 'bsddb':
new_d = bsddb.hashopen(newdb, 'n')
elif dbtype == 'dumbdbm':
import dumbdbm
new_d = dumbdbm.open(newdb, 'n')
else:
raise click.ClickException("Invalid type %s" % dbtype)
new_data = shelve.Shelf(new_d, protocol=exaile_pickle_protocol)
for k, v in data.iteritems():
new_data[k] = v
new_data.sync()
new_data.close()
def open_db3hash(*args):
"""Open a bsddb3 hash."""
import bsddb3
return bsddb3.hashopen(*args)
# We do this because BDB is generally considered more performant,
# and because gdbm currently doesn't work at all in MSYS2.
# Some DBM modules don't use the path we give them, but rather they have
# multiple filenames. If the specified path doesn't exist, double check
# to see if whichdb returns a result before trying to open it with bsddb
force_migrate = False
if not os.path.exists(path):
from whichdb import whichdb
if whichdb(path) is not None:
force_migrate = True
if not force_migrate:
try:
db = bsddb.hashopen(path, 'c')
return shelve.BsdDbShelf(db, protocol=PICKLE_PROTOCOL)
except bsddb.db.DBInvalidArgError:
logger.warning("%s was created with an old backend, migrating it", path)
except Exception:
raise
# special case: zero-length file
if not force_migrate and os.path.getsize(path) == 0:
os.unlink(path)
else:
from xl.migrations.database.to_bsddb import migrate
migrate(path)
db = bsddb.hashopen(path, 'c')
return shelve.BsdDbShelf(db, protocol=PICKLE_PROTOCOL)
# Try to get out of the way of a parallel updater
time.sleep(1)
# Someone else already created the table; he probably also
# migrated it
return
rosterinfo_fn = {}
rosterinfo_rh = {}
rosterinfo_lg = {}
rosterusers = set([])
rostergroups = {}
try:
if olddb is None:
return
elif isinstance(olddb, str):
db = bsddb3.hashopen(olddb, 'r')
else: # dict
db = olddb
for k,v in db.items():
k = unutf8(k, 'illegal')
v = unutf8(v, 'illegal')
if k.startswith('FNC:'): # Full name (cache only)
jid = k[4:].replace(':', '@')
rosterusers.add(jid)
if '@' in jid: # Do not copy malformed (old buggy) entries
rosterinfo_fn[jid] = v
if k.startswith('LIG:'): # Login In Group (state information)
jid = k[4:].replace(':', '@')
rosterusers.add(jid)
rosterinfo_lg[jid] = v
if k.startswith('RGC:'): # Reverse Group Cache (state information)
gid = k[4:].replace(':', '@')
def __init__(self, filename, flag='c', protocol=2, writeback=False):
db = bsddb3.hashopen(filename, flag)
args = [self, db, protocol, writeback]
if sys.version_info[0] == 3:
args.append('utf8')
shelve.Shelf.__init__(*args)