Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def quit(self, retcode = const.ENoError):
# saving is the most important
# we save, but don't clean, why?
# think about unmount path, moved files,
# once we discard the information, they are gone.
# so unless the user specifically request a clean,
# we don't act too smart.
#cached.cleancache()
cached.savecache()
self.savesetting()
# if we flush() on Ctrl-C, we get
# IOError: [Errno 32] Broken pipe
sys.stdout.flush()
sys.exit(retcode)
def cleancache():
if cached.loadcache():
for absdir in cached.cache.keys():
if not os.path.exists(absdir):
if cached.verbose:
pr("Directory: '{}' no longer exists, removing the cache entries".format(absdir))
cached.dirty = True
del cached.cache[absdir]
else:
oldfiles = cached.cache[absdir]
files = {}
needclean = False
for f in oldfiles.keys():
#p = os.path.join(absdir, f)
p = joinpath(absdir, f)
if os.path.exists(p):
files[f] = oldfiles[f]
else:
if cached.verbose:
needclean = True
pr("File '{}' no longer exists, removing the cache entry".format(p))
if needclean:
cached.dirty = True
def loadcache(existingcache = {}):
# load cache even we don't use cached hash values,
# because we will save (possibly updated) and hash values
if not cached.cacheloaded: # no double-loading
if cached.verbose:
pr("Loading Hash Cache File '{}'...".format(cached.hashcachepath))
if os.path.exists(cached.hashcachepath):
try:
cached.cache = jsonload(cached.hashcachepath)
# pay the history debt ...
# TODO: Remove some time later when no-body uses the old bin format cache
if cached.isbincache(cached.cache):
pinfo("ONE TIME conversion for binary format Hash Cache ...")
stringifypickle(cached.cache)
pinfo("ONE TIME conversion finished")
if existingcache: # not empty
if cached.verbose:
pinfo("Merging with existing Hash Cache")
cached.mergeinto(existingcache, cached.cache)
cached.cacheloaded = True
if cached.verbose:
pr("Hash Cache File loaded.")
#except (EOFError, TypeError, ValueError, UnicodeDecodeError) as ex:
except Exception as ex:
perr("Fail to load the Hash Cache, no caching.\n{}".format(formatex(ex)))
cached.cache = existingcache
else:
if cached.verbose:
def cleancache():
if cached.loadcache():
for absdir in cached.cache.keys():
if not os.path.exists(absdir):
if cached.verbose:
pr("Directory: '{}' no longer exists, removing the cache entries".format(absdir))
cached.dirty = True
del cached.cache[absdir]
else:
oldfiles = cached.cache[absdir]
files = {}
needclean = False
for f in oldfiles.keys():
#p = os.path.join(absdir, f)
p = joinpath(absdir, f)
if os.path.exists(p):
files[f] = oldfiles[f]
else:
if os.path.isfile(self.__certspath):
self.__checkssl = self.__certspath
else:
# Well, disable cert verification
pwarn(
"** SSL Certificate Verification has been disabled **\n\n"
"If you are confident that your CA Bundle can verify "
"Baidu PCS's certs, you can run the prog with the '" + const.CaCertsOption + \
" ' argument to enable SSL cert verification.\n\n"
"However, most of the time, you can ignore this warning, "
"you are not going to send sensitive data to the cloud plainly right?")
self.__checkssl = False
if not checkssl:
requester.disable_warnings(self.debug)
cached.loadcache()
requester.set_logging_level(debug)
# useful info for debugging
if debug > 0:
pr("----")
pr("Verbose level = {}".format(verbose))
pr("Debug level = {}".format(debug))
# these informations are useful for debugging
pr("Config directory: '{}'".format(self.__configdir))
pr("Token file: '{}'".format(self.__tokenpath))
pr("Hash Cache file: '{}'".format(self.__hashcachepath))
pr("App root path at Baidu Yun '{}'".format(const.AppPcsPath))
pr("sys.stdin.encoding = {}".format(sys.stdin.encoding))
pr("sys.stdout.encoding = {}".format(sys.stdout.encoding))
pr("sys.stderr.encoding = {}".format(sys.stderr.encoding))
pr("----\n")
def __store(self, info, path, value):
cached.dirty = True
info['size'] = getfilesize(path)
info['mtime'] = getfilemtime_int(path)
info[self.f.__name__] = value
if cached.debug:
situation = "Storing cache"
if cached.usecache:
situation = "Cache miss"
pdbg((situation + " for file '{}',\n{}: {}\nsize: {}\nmtime: {}").format(
path, self.f.__name__,
value,
info['size'], info['mtime']))
# periodically save to prevent loss in case of system crash
now = time.time()
if now - gvar.last_cache_save >= const.CacheSavePeriodInSec:
if cached.debug:
if os.path.exists(newfile):
dst = dst + '.old'
result = movefile(oldfile, dst) and result
# we move to JSON for hash caching for better portability
# http://www.benfrederickson.com/dont-pickle-your-data/
# https://kovshenin.com/2010/pickle-vs-json-which-is-faster/
# JSON even outpeforms Pickle and definitely much more portable
# DON'T bother with pickle.
if os.path.exists(const.PicklePath):
oldcache = {}
try:
with io.open(const.PicklePath, 'rb') as f:
oldcache = pickleload(f)
stringifypickle(oldcache)
cached.loadcache(oldcache)
cached.savecache(True)
pinfo("Contents of Pickle (old format hash cache) '{}' "
"has been merged to '{}'".format(const.PicklePath, const.HashCachePath))
mergedfile = const.PicklePath + '.merged'
ok = movefile(const.PicklePath, mergedfile)
if ok == const.ENoError:
pinfo("Pickle (old format hash cache) '{}' "
"has been renamed to '{}".format(const.PicklePath, mergedfile))
else:
perr("Failed to move Pickle (old format hash cache) '{}' to '{}'".format(const.PicklePath, mergedfile))
except (
pickle.PickleError,
# the following is for dealing with corrupted cache file
EOFError, TypeError, ValueError):
invalidfile = const.PicklePath + '.invalid'
ok = movefile(const.PicklePath, invalidfile)
info[self.f.__name__] = value
if cached.debug:
situation = "Storing cache"
if cached.usecache:
situation = "Cache miss"
pdbg((situation + " for file '{}',\n{}: {}\nsize: {}\nmtime: {}").format(
path, self.f.__name__,
value,
info['size'], info['mtime']))
# periodically save to prevent loss in case of system crash
now = time.time()
if now - gvar.last_cache_save >= const.CacheSavePeriodInSec:
if cached.debug:
pdbg("Periodically saving Hash Cash")
cached.savecache()
gvar.last_cache_save = now
def savecache(force_saving = False):
saved = False
# even if we were unable to load the cache, we still save it.
if cached.dirty or force_saving:
if cached.verbose:
pr("Saving Hash Cache...")
try:
jsondump(cached.cache, cached.hashcachepath)
if cached.verbose:
pr("Hash Cache saved.")
saved = True
cached.dirty = False
except Exception as ex:
perr("Failed to save Hash Cache.\n{}".format(formatex(ex)))
else:
if cached.verbose:
pr("Skip saving Hash Cache since it has not been updated.")
return saved