Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def set_options(self, *args):
"""Parse commandline and set options attribute."""
my_args = pywikibot.handle_args(args)
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-r', '--edit_redirect', '--edit-redirect',
action='store_true', help='Ignore/edit redirects')
parser.add_argument('-p', '--page', help='Page to edit')
parser.add_argument('-w', '--watch', action='store_true',
help='Watch article after edit')
# convenience positional argument so we can act like a normal editor
parser.add_argument('wikipage', nargs='?', help='Page to edit')
self.options = parser.parse_args(my_args)
if self.options.page and self.options.wikipage:
pywikibot.error('Multiple pages passed. Please specify a single '
'page to edit.')
sys.exit(1)
self.options.page = self.options.page or self.options.wikipage
def __init__(self, **kwargs):
"""Constructor."""
super(SandboxBot, self).__init__(**kwargs)
if self.getOption('delay') is None:
d = min(15, max(5, int(self.getOption('hours') * 60)))
self.availableOptions['delay_td'] = datetime.timedelta(minutes=d)
else:
d = max(5, self.getOption('delay'))
self.availableOptions['delay_td'] = datetime.timedelta(minutes=d)
self.site = pywikibot.Site()
if not content.get(self.site.code) and not self.getOption('text'):
pywikibot.error(u'No content is given for pages, exiting.')
raise RuntimeError
if not self.generator:
if self.site.code not in sandboxTitle:
pywikibot.bot.suggest_help(missing_generator=True)
raise RuntimeError
local_sandbox_title = sandboxTitle[self.site.code]
if not isinstance(local_sandbox_title, list):
local_sandbox_title = [local_sandbox_title]
self.generator = [pywikibot.Page(self.site, page_name) for
page_name in local_sandbox_title]
(self._OCR_METHODS, ocr_tool))
# wrong link fail with Exceptions
retry = 0
while retry < 5:
pywikibot.debug('{0}: get URI {1!r}'.format(ocr_tool, cmd_uri),
_logger)
try:
response = http.fetch(cmd_uri)
except requests.exceptions.ReadTimeout as e:
retry += 1
pywikibot.warning('ReadTimeout %s: %s' % (cmd_uri, e))
pywikibot.warning('retrying in %s seconds ...' % (retry * 5))
time.sleep(retry * 5)
except Exception as e:
pywikibot.error('"%s": %s' % (cmd_uri, e))
return (True, e)
else:
pywikibot.debug('{0}: {1}'.format(ocr_tool, response.text),
_logger)
break
data = json.loads(response.text)
if ocr_tool == self._PHETOOLS: # phetools
assert 'error' in data, 'Error from phetools: %s' % data
assert data['error'] in [0, 1, 2, 3], (
'Error from phetools: %s' % data)
error, _text = bool(data['error']), data['text']
else: # googleOCR
if 'error' in data:
error, _text = True, data['error']
fields[current_args[0]] = (current_args[1], handler)
del current_args[:]
current_args.append(arg)
if len(current_args) == 2:
current_args.append({})
# handle leftover
if len(current_args) == 3:
handler = PropertyOptionHandler(**current_args[2])
fields[current_args[0]] = (current_args[1], handler)
elif len(current_args) == 1:
pywikibot.error('Incomplete command line param-property pair.')
return False
if not template_title:
pywikibot.error(
'Please specify either -template or -transcludes argument')
return
generator = gen.getCombinedGenerator(preload=True)
if not generator:
gen.handleArg('-transcludes:' + template_title)
generator = gen.getCombinedGenerator(preload=True)
bot = HarvestRobot(generator, template_title, fields, **options)
bot.run()
def treat_page(self):
"""Convert all HTML tables in text to wiki syntax and save it."""
text = self.current_page.text
new_text, converted_tables, warnings = self.convertAllHTMLTables(text)
# Check if there are any marked tags left
if re.search('<##table##|', new_text, re.IGNORECASE):
pywikibot.error(
'not all marked table start or end tags processed!')
return
if converted_tables == 0:
pywikibot.output('No changes were necessary.')
return
if warnings:
if self.getOption('always') and self.getOption('skipwarning'):
pywikibot.output(
'There were {0} replacements that might lead to bad '
'output. Skipping.'.format(warnings))
return
if not self.getOption('always'):
pywikibot.output(
'There were {0} replacements that might lead to bad '
def featuredArticles(self, site, task, cache):
articles = []
info = globals()[task + '_name']
if task == 'lists':
code = site.code
else:
code = 'wikidata'
try:
method = info[code][0]
except KeyError:
pywikibot.error(
"language %s doesn't has %s category source."
% (code, task))
return
name = info[code][1]
# hide #-sorted items on en-wiki
try:
hide = info[code][2]
except IndexError:
hide = None
for p in method(site, name, hide):
if p.namespace() == 0: # Article
articles.append(p)
# Article talk (like in English)
elif p.namespace() == 1 and site.code != 'el':
articles.append(pywikibot.Page(p.site,
p.title(with_ns=False)))
def findAdditionalProblems(self):
"""Extract additional settings from configuration page."""
# In every tuple there's a setting configuration
for tupla in self.settingsData:
name = tupla[1]
find_tipe = tupla[2]
find = tupla[3]
find_list = self.load(find)
imagechanges = tupla[4]
if imagechanges.lower() == 'false':
imagestatus = False
elif imagechanges.lower() == 'true':
imagestatus = True
else:
pywikibot.error('Imagechanges set wrongly!')
self.settingsData = None
break
summary = tupla[5]
head_2 = tupla[6]
if head_2.count('==') == 2:
head_2 = re.findall(r'\s*== *(.+?) *==\s*', head_2)[0]
text = tupla[7] % self.imageName
mexCatched = tupla[8]
for k in find_list:
if find_tipe.lower() == 'findonly':
searchResults = re.findall(r'{}'.format(k.lower()),
self.imageCheckText.lower())
if searchResults:
if searchResults[0] == self.imageCheckText.lower():
self.some_problem = True
self.text_used = text
% page.title())
elif isinstance(e, pywikibot.SpamfilterError):
pywikibot.output(
'Cannot change %s because of blacklist entry %s'
% (page.title(), e.url))
elif isinstance(e, pywikibot.LockedPage):
pywikibot.output('Skipping %s (locked page)'
% page.title())
else:
pywikibot.error(
'Skipping %s because of a save related error: %s'
% (page.title(), e))
except pywikibot.ServerError as e:
if not ignore_server_errors:
raise
pywikibot.error('Server Error while processing %s: %s'
% (page.title(), e))
else:
return True
return False
ignorewarn = True
elif arg == '-chunked':
match = CHUNK_SIZE_REGEX.match(option)
chunk_size = get_chunk_size(match)
elif arg == '-descfile':
description_file = value
elif not url:
url = option
else:
description.append(option)
description = ' '.join(description)
if description_file:
if description:
pywikibot.error('Both a description and a -descfile were '
'provided. Please specify only one of those.')
return False
with codecs.open(description_file,
encoding=pywikibot.config.textfile_encoding) as f:
description = f.read().replace('\r\n', '\n')
while not ('://' in url or os.path.exists(url)):
if not url:
error = 'No input filename given.'
else:
error = 'Invalid input filename given.'
if not always:
error += ' Try again.'
if always:
url = None
break
pywikibot.exception(e, tb=True)
continue
if use_accesstime is None:
stinfo2 = os.stat(filepath)
use_accesstime = stinfo.st_atime != stinfo2.st_atime
if use_accesstime:
# Reset access times to values before loading cache entry.
os.utime(filepath, (stinfo.st_atime, stinfo.st_mtime))
entry.stinfo = stinfo
try:
entry.parse_key()
except ParseError:
pywikibot.error('Problems parsing %s with key %s'
% (entry.filename, entry.key))
pywikibot.exception()
continue
try:
entry._rebuild()
except Exception as e:
pywikibot.error('Problems loading %s with key %s, %r'
% (entry.filename, entry.key, entry._parsed_key))
pywikibot.exception(e, tb=True)
continue
if func is None or func(entry):
if output_func or action_func is None:
if output_func is None:
output = entry