Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
description=__("""
Look recursively in for Python modules and packages and create
one reST file with automodule directives per package in the .
The s can be file and/or directory patterns that will be
excluded from generation.
Note: By default this script will not overwrite already created files."""))
parser.add_argument('--version', action='version', dest='show_version',
version='%%(prog)s %s' % __display_version__)
parser.add_argument('module_path',
help=__('path to module to document'))
parser.add_argument('exclude_pattern', nargs='*',
help=__('fnmatch-style file and/or directory patterns '
'to exclude from generation'))
parser.add_argument('-o', '--output-dir', action='store', dest='destdir',
required=True,
help=__('directory to place all output'))
parser.add_argument('-d', '--maxdepth', action='store', dest='maxdepth',
type=int, default=4,
help=__('maximum depth of submodules to show in the TOC '
'(default: 4)'))
parser.add_argument('-f', '--force', action='store_true', dest='force',
help=__('overwrite existing files'))
parser.add_argument('-l', '--follow-links', action='store_true',
dest='followlinks', default=False,
help=__('follow symbolic links. Powerful when combined '
'with collective.recipe.omelette.'))
parser.add_argument('-n', '--dry-run', action='store_true', dest='dryrun',
def find_themes(self, theme_path: str) -> Dict[str, str]:
"""Search themes from specified directory."""
themes = {} # type: Dict[str, str]
if not path.isdir(theme_path):
return themes
for entry in os.listdir(theme_path):
pathname = path.join(theme_path, entry)
if path.isfile(pathname) and entry.lower().endswith('.zip'):
if is_archived_theme(pathname):
name = entry[:-4]
themes[name] = pathname
else:
logger.warning(__('file %r on theme path is not a valid '
'zipfile or contains no theme'), entry)
else:
if path.isfile(path.join(pathname, THEMECONF)):
themes[entry] = pathname
return themes
parser.add_argument('--tocfile', action='store', dest='tocfile', default='modules',
help=__("filename of table of contents (default: modules)"))
parser.add_argument('-T', '--no-toc', action='store_false', dest='tocfile',
help=__("don't create a table of contents file"))
parser.add_argument('-E', '--no-headings', action='store_true',
dest='noheadings',
help=__("don't create headings for the module/package "
"packages (e.g. when the docstrings already "
"contain them)"))
parser.add_argument('-M', '--module-first', action='store_true',
dest='modulefirst',
help=__('put module documentation before submodule '
'documentation'))
parser.add_argument('--implicit-namespaces', action='store_true',
dest='implicit_namespaces',
help=__('interpret module paths according to PEP-0420 '
'implicit namespaces specification'))
parser.add_argument('-s', '--suffix', action='store', dest='suffix',
default='rst',
help=__('file suffix (default: rst)'))
parser.add_argument('-F', '--full', action='store_true', dest='full',
help=__('generate a full project with sphinx-quickstart'))
parser.add_argument('-a', '--append-syspath', action='store_true',
dest='append_syspath',
help=__('append module_path to sys.path, used when --full is given'))
parser.add_argument('-H', '--doc-project', action='store', dest='header',
help=__('project name (default: root module name)'))
parser.add_argument('-A', '--doc-author', action='store', dest='author',
help=__('project author(s), used when --full is given'))
parser.add_argument('-V', '--doc-version', action='store', dest='version',
help=__('project version, used when --full is given'))
parser.add_argument('-R', '--doc-release', action='store', dest='release',
group.add_argument('-a', '--author', metavar='AUTHOR', dest='author',
help=__('author names'))
group.add_argument('-v', metavar='VERSION', dest='version', default='',
help=__('version of project'))
group.add_argument('-r', '--release', metavar='RELEASE', dest='release',
help=__('release of project'))
group.add_argument('-l', '--language', metavar='LANGUAGE', dest='language',
help=__('document language'))
group.add_argument('--suffix', metavar='SUFFIX', default='.rst',
help=__('source file suffix'))
group.add_argument('--master', metavar='MASTER', default='index',
help=__('master document name'))
group.add_argument('--epub', action='store_true', default=False,
help=__('use epub'))
group = parser.add_argument_group(__('Extension options'))
for ext in EXTENSIONS:
group.add_argument('--ext-%s' % ext, action='append_const',
const='sphinx.ext.%s' % ext, dest='extensions',
help=__('enable %s extension') % ext)
group.add_argument('--extensions', metavar='EXTENSIONS', dest='extensions',
action='append', help=__('enable arbitrary extensions'))
group = parser.add_argument_group(__('Makefile and Batchfile creation'))
group.add_argument('--makefile', action='store_true', dest='makefile', default=True,
help=__('create makefile'))
group.add_argument('--no-makefile', action='store_false', dest='makefile',
help=__('do not create makefile'))
group.add_argument('--batchfile', action='store_true', dest='batchfile', default=True,
help=__('create batchfile'))
group.add_argument('--no-batchfile', action='store_false',
dest='batchfile',
def apply(self, **kwargs) -> None:
for node in self.document.footnotes:
if node['names'] == []:
# footnote having duplicated number. It is already warned at parser.
pass
elif node['names'][0] not in self.document.footnote_refs:
logger.warning(__('Footnote [%s] is not referenced.'), node['names'][0],
type='ref', subtype='footnote',
location=node)
for node in self.document.autofootnotes:
if not any(ref['auto'] == node['auto'] for ref in self.document.autofootnote_refs):
logger.warning(__('Footnote [#] is not referenced.'),
type='ref', subtype='footnote',
location=node)
parser.add_argument('-E', '--no-headings', action='store_true',
dest='noheadings',
help=__("don't create headings for the module/package "
"packages (e.g. when the docstrings already "
"contain them)"))
parser.add_argument('-M', '--module-first', action='store_true',
dest='modulefirst',
help=__('put module documentation before submodule '
'documentation'))
parser.add_argument('--implicit-namespaces', action='store_true',
dest='implicit_namespaces',
help=__('interpret module paths according to PEP-0420 '
'implicit namespaces specification'))
parser.add_argument('-s', '--suffix', action='store', dest='suffix',
default='rst',
help=__('file suffix (default: rst)'))
parser.add_argument('-F', '--full', action='store_true', dest='full',
help=__('generate a full project with sphinx-quickstart'))
parser.add_argument('-a', '--append-syspath', action='store_true',
dest='append_syspath',
help=__('append module_path to sys.path, used when --full is given'))
parser.add_argument('-H', '--doc-project', action='store', dest='header',
help=__('project name (default: root module name)'))
parser.add_argument('-A', '--doc-author', action='store', dest='author',
help=__('project author(s), used when --full is given'))
parser.add_argument('-V', '--doc-version', action='store', dest='version',
help=__('project version, used when --full is given'))
parser.add_argument('-R', '--doc-release', action='store', dest='release',
help=__('project release, used when --full is given, '
'defaults to --doc-version'))
group = parser.add_argument_group(__('extension options'))
d['sep'] = do_prompt(__('Separate source and build directories (y/n)'),
'n', boolean)
if 'dot' not in d:
print(__('''
Inside the root directory, two more directories will be created; "_templates"
for custom HTML templates and "_static" for custom stylesheets and other static
files. You can enter another prefix (such as ".") to replace the underscore.'''))
d['dot'] = do_prompt(__('Name prefix for templates and static dir'), '_', ok)
if 'project' not in d:
print(__('''
The project name will occur in several places in the built documentation.'''))
d['project'] = do_prompt(__('Project name'))
if 'author' not in d:
d['author'] = do_prompt(__('Author name(s)'))
if 'version' not in d:
print(__('''
Sphinx has the notion of a "version" and a "release" for the
software. Each version can have multiple releases. For example, for
Python the version is something like 2.5 or 3.0, while the release is
something like 2.5.1 or 3.0a1. If you don't need this dual structure,
just set both to the same value.'''))
d['version'] = do_prompt(__('Project version'), '', allow_empty)
if 'release' not in d:
d['release'] = do_prompt(__('Project release'), d['version'], allow_empty)
if 'language' not in d:
print(__('''
If the documents are to be written in a language other than English,
you can select a language here by its language code. Sphinx will then
def build(self, docnames: Iterable[str], summary: str = None, method: str = 'update') -> None: # NOQA
"""Main build method.
First updates the environment, and then calls :meth:`write`.
"""
if summary:
logger.info(bold(__('building [%s]: ') % self.name) + summary)
# while reading, collect all warnings from docutils
with logging.pending_warnings():
updated_docnames = set(self.read())
doccount = len(updated_docnames)
logger.info(bold(__('looking for now-outdated files... ')), nonl=True)
for docname in self.env.check_dependents(self.app, updated_docnames):
updated_docnames.add(docname)
outdated = len(updated_docnames) - doccount
if outdated:
logger.info(__('%d found'), outdated)
else:
logger.info(__('none found'))
if updated_docnames:
# save the environment
from sphinx.application import ENV_PICKLE_FILENAME
with progress_message(__('pickling environment')):
with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
# global actions
if 'extensions' not in d:
print(__('Indicate which of the following Sphinx extensions should be '
'enabled:'))
d['extensions'] = []
for name, description in EXTENSIONS.items():
if do_prompt('%s: %s (y/n)' % (name, description), 'n', boolean):
d['extensions'].append('sphinx.ext.%s' % name)
# Handle conflicting options
if {'sphinx.ext.imgmath', 'sphinx.ext.mathjax'}.issubset(d['extensions']):
print(__('Note: imgmath and mathjax cannot be enabled at the same '
'time. imgmath has been deselected.'))
d['extensions'].remove('sphinx.ext.imgmath')
if 'makefile' not in d:
print(__('''
A Makefile and a Windows command file can be generated for you so that you
only have to run e.g. `make html' instead of invoking sphinx-build
directly.'''))
d['makefile'] = do_prompt(__('Create Makefile? (y/n)'), 'y', boolean)
if 'batchfile' not in d:
d['batchfile'] = do_prompt(__('Create Windows command file? (y/n)'),
'y', boolean)
print()
# allow pickling self to send it back
return pickle.dumps(self.env, pickle.HIGHEST_PROTOCOL)
def merge(docs: List[str], otherenv: bytes) -> None:
env = pickle.loads(otherenv)
self.env.merge_info_from(docs, env, self.app)
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
for chunk in status_iterator(chunks, __('reading sources... '), "purple",
len(chunks), self.app.verbosity):
tasks.add_task(read_process, chunk, merge)
# make sure all threads have finished
logger.info(bold(__('waiting for workers...')))
tasks.join()