Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return const.ESkipped
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
offset = self.__current_file_size
else:
# revert back at least self.__resumedl_revertcount download chunk(s), default: one
pieces = self.__current_file_size // self.__dl_chunk_size
if pieces > self.__resumedl_revertcount:
offset = (pieces - self.__resumedl_revertcount) * self.__dl_chunk_size
elif os.path.isdir(localfile):
if not self.shalloverwrite("Same-name directory '{}' exists, "
"do you want to remove it? [y/N]".format(localfile)):
pinfo("Same-name directory '{}' exists, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
self.pv("Directory with the same name '{}' exists, removing ...".format(localfile))
result = removedir(localfile, self.verbose)
if result == const.ENoError:
self.pv("Removed")
else:
perr("Error removing the directory '{}'".format(localfile))
return result
ldir, file = os.path.split(localfile)
if ldir and not os.path.exists(ldir):
result = makedir(ldir, verbose = self.verbose)
if result != const.ENoError:
perr("Fail to make directory '{}'".format(ldir))
return result
offset = 0
self.pd("Checking if we already have the copy locally")
if os.path.isfile(localfile):
self.pd("Same-name local file '{}' exists, checking if contents match".format(localfile))
self.__current_file_size = getfilesize(self.__current_file)
if const.ENoError == self.__verify_current_file(self.__remote_json, False) \
and not (self.__downloader[:5] == const.DownloaderAria2 and os.path.exists(localfile + '.aria2')):
self.pd("Same local file '{}' already exists, skip downloading".format(localfile))
self.__remove_remote_on_success(remotefile)
return const.ENoError
else:
if not self.shalloverwrite("Same-name locale file '{}' exists but is different, "
"do you want to overwrite it? [y/N]".format(localfile)):
pinfo("Same-name local file '{}' exists but is different, skip downloading".format(localfile))
#return const.ENoError
return const.ESkipped
if self.__resumedownload and \
self.__compare_size(self.__current_file_size, self.__remote_json) == 2:
if self.__resumedl_revertcount < 0:
if self.__current_file_size:
offset = self.__current_file_size
else:
# revert back at least self.__resumedl_revertcount download chunk(s), default: one
pieces = self.__current_file_size // self.__dl_chunk_size
if pieces > self.__resumedl_revertcount:
offset = (pieces - self.__resumedl_revertcount) * self.__dl_chunk_size
elif os.path.isdir(localfile):
if not self.shalloverwrite("Same-name directory '{}' exists, "
"do you want to remove it? [y/N]".format(localfile)):
# JSON even outpeforms Pickle and definitely much more portable
# DON'T bother with pickle.
if os.path.exists(const.PicklePath):
oldcache = {}
try:
with io.open(const.PicklePath, 'rb') as f:
oldcache = pickleload(f)
stringifypickle(oldcache)
cached.loadcache(oldcache)
cached.savecache(True)
pinfo("Contents of Pickle (old format hash cache) '{}' "
"has been merged to '{}'".format(const.PicklePath, const.HashCachePath))
mergedfile = const.PicklePath + '.merged'
ok = movefile(const.PicklePath, mergedfile)
if ok == const.ENoError:
pinfo("Pickle (old format hash cache) '{}' "
"has been renamed to '{}".format(const.PicklePath, mergedfile))
else:
perr("Failed to move Pickle (old format hash cache) '{}' to '{}'".format(const.PicklePath, mergedfile))
except (
pickle.PickleError,
# the following is for dealing with corrupted cache file
EOFError, TypeError, ValueError):
invalidfile = const.PicklePath + '.invalid'
ok = movefile(const.PicklePath, invalidfile)
perr("{} invalid Pickle (old format hash cache) file '{}' to '{}'".format(
"Moved" if ok == const.ENoError else "Failed to move",
const.PicklePath, invalidfile))
return result
def __prompt_clean(self):
pinfo('-' * 64)
pinfo("""This is most likely caused by authorization errors.
Possible causes:
- You didn't run this program for a long time (more than a month).
- You changed your Baidu password after authorizing this program.
- You didn't give this program the 'netdisk' access while authorizing.
- ...
Possible fixes:
1. Remove the authorization token by running with the parameter '{}', and then re-run this program.
2. If (1) still doesn't solve the problem, you may have to go to:
https://passport.baidu.com/accountbind
and remove the authorization of this program, and then re-run this program.""".format(const.CleanOptionShort))
return const.EInvalidJson
def loadcache(existingcache = {}):
# load cache even we don't use cached hash values,
# because we will save (possibly updated) and hash values
if not cached.cacheloaded: # no double-loading
if cached.verbose:
pr("Loading Hash Cache File '{}'...".format(cached.hashcachepath))
if os.path.exists(cached.hashcachepath):
try:
cached.cache = jsonload(cached.hashcachepath)
# pay the history debt ...
# TODO: Remove some time later when no-body uses the old bin format cache
if cached.isbincache(cached.cache):
pinfo("ONE TIME conversion for binary format Hash Cache ...")
stringifypickle(cached.cache)
pinfo("ONE TIME conversion finished")
if existingcache: # not empty
if cached.verbose:
pinfo("Merging with existing Hash Cache")
cached.mergeinto(existingcache, cached.cache)
cached.cacheloaded = True
if cached.verbose:
pr("Hash Cache File loaded.")
#except (EOFError, TypeError, ValueError, UnicodeDecodeError) as ex:
except Exception as ex:
perr("Fail to load the Hash Cache, no caching.\n{}".format(formatex(ex)))
cached.cache = existingcache
else:
if cached.verbose:
pr("Hash Cache File '{}' not found, no caching".format(cached.hashcachepath))
if const.ENoError == self.__verify_current_file(self.__remote_json, False):
# the two files are the same
upload = False
self.pv("Remote file '{}' already exists, skip uploading".format(rfile))
else: # the two files are different
if not self.shalloverwrite("Remote file '{}' exists but is different, "
"do you want to overwrite it? [y/N]".format(rfile)):
upload = False
self.__isrev = False
if upload:
fileresult = self.__upload_file(lfile, rfile, ondup)
if fileresult != const.ENoError:
result = fileresult # we still continue
else:
pinfo("Remote file '{}' exists and is the same, skip uploading".format(rfile))
# next / continue
return result
self.pd("Local file size : {}".format(self.__current_file_size))
self.pd("Remote file size: {}".format(rsize))
if self.__current_file_size == rsize:
self.pd("Local and remote file size matches")
if self.__verify:
if not gotlmd5:
self.__current_file_md5 = md5(self.__current_file)
self.pd("Local file MD5 : {}".format(self.__current_file_md5))
self.pd("Remote file MD5: {}".format(rmd5))
if self.__current_file_md5 == rmd5:
self.pd("Local and remote file hash matches")
return const.ENoError
else:
pinfo("Local and remote file hash DOESN'T match")
return const.EHashMismatch
else:
return const.ENoError
else:
pinfo("Local and remote file size DOESN'T match")
return const.EHashMismatch
if self.__verify:
if not gotlmd5:
self.__current_file_md5 = md5(self.__current_file)
self.pd("Local file MD5 : {}".format(self.__current_file_md5))
self.pd("Remote file MD5: {}".format(rmd5))
if self.__current_file_md5 == rmd5:
self.pd("Local and remote file hash matches")
return const.ENoError
else:
pinfo("Local and remote file hash DOESN'T match")
return const.EHashMismatch
else:
return const.ENoError
else:
pinfo("Local and remote file size DOESN'T match")
return const.EHashMismatch