Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def build_all(self):
"""Builds the entire tree. Returns the number of failures."""
failures = 0
path_cache = PathCache(self.env)
with reporter.build('build', self):
self.env.plugin_controller.emit('before-build-all', builder=self)
to_build = self.get_initial_build_queue()
while to_build:
source = to_build.popleft()
prog, build_state = self.build(source, path_cache=path_cache)
self.extend_build_queue(to_build, prog)
failures += len(build_state.failed_artifacts)
self.env.plugin_controller.emit('after-build-all', builder=self)
if failures:
reporter.report_build_all_failure(failures)
return failures
def build_all(self):
"""Builds the entire tree. Returns the number of failures."""
failures = 0
path_cache = PathCache(self.env)
with reporter.build('build', self):
self.env.plugin_controller.emit('before-build-all', builder=self)
to_build = self.get_initial_build_queue()
while to_build:
source = to_build.popleft()
prog, build_state = self.build(source, path_cache=path_cache)
self.extend_build_queue(to_build, prog)
failures += len(build_state.failed_artifacts)
self.env.plugin_controller.emit('after-build-all', builder=self)
if failures:
reporter.report_build_all_failure(failures)
return failures
path_cache = PathCache(self.env)
# We keep a dummy connection here that does not do anything which
# helps us with the WAL handling. See #144
con = self.connect_to_database()
try:
with reporter.build('build', self):
self.env.plugin_controller.emit('before-build-all', builder=self)
to_build = self.get_initial_build_queue()
while to_build:
source = to_build.popleft()
prog, build_state = self.build(source, path_cache=path_cache)
self.extend_build_queue(to_build, prog)
failures += len(build_state.failed_artifacts)
self.env.plugin_controller.emit('after-build-all', builder=self)
if failures:
reporter.report_build_all_failure(failures)
return failures
finally:
con.close()
def prune(self, all=False):
"""This cleans up data left in the build folder that does not
correspond to known artifacts.
"""
path_cache = PathCache(self.env)
with reporter.build(all and 'clean' or 'prune', self):
self.env.plugin_controller.emit(
'before-prune', builder=self, all=all)
with self.new_build_state(path_cache=path_cache) as build_state:
for aft in build_state.iter_unreferenced_artifacts(all=all):
reporter.report_pruned_artifact(aft)
filename = build_state.get_destination_filename(aft)
prune_file_and_folder(filename, self.destination_path)
build_state.remove_artifact(aft)
build_state.prune_source_infos()
if all:
build_state.vacuum()
self.env.plugin_controller.emit(
'after-prune', builder=self, all=all)
def write_source_info(self, info):
"""Writes the source info into the database. The source info is
an instance of :class:`lektor.build_programs.SourceInfo`.
"""
reporter.report_write_source_info(info)
source = self.to_source_filename(info.filename)
con = self.connect_to_database()
try:
cur = con.cursor()
for lang, title in iteritems(info.title_i18n):
cur.execute('''
insert or replace into source_info
(path, alt, lang, type, source, title)
values (?, ?, ?, ?, ?, ?)
''', [info.path, info.alt, lang, info.type, source, title])
con.commit()
finally:
con.close()
def build_artifact(self, artifact):
ctx = get_ctx()
source_out = self.build_state.make_named_temporary('less')
map_out = self.build_state.make_named_temporary('less-sourcemap')
here = os.path.dirname(self.source.source_filename)
cmdline = ['lessc', '--no-js', '--include-path=%s' % here,
'--source-map=%s' % map_out,
self.source.source_filename,
source_out]
reporter.report_debug_info('lessc cmd line', cmdline)
proc = portable_popen(cmdline)
if proc.wait() != 0:
raise RuntimeError('lessc failed')
with open(map_out) as f:
dep_base = os.path.dirname(map_out)
for dep in json.load(f).get('sources') or ():
ctx.record_dependency(os.path.join(dep_base, dep))
artifact.replace_with_file(source_out)
def build_artifact(self, artifact, build_func):
"""Various parts of the system once they have an artifact and a
function to build it, will invoke this function. This ultimately
is what builds.
The return value is the ctx that was used to build this thing
if it was built, or `None` otherwise.
"""
is_current = artifact.is_current
with reporter.build_artifact(artifact, build_func, is_current):
if not is_current:
with artifact.update() as ctx:
# Upon builing anything we record a dependency to the
# project file. This is not ideal but for the moment
# it will ensure that if the file changes we will
# rebuild.
project_file = self.env.project.project_file
if project_file:
ctx.record_dependency(project_file)
build_func(artifact)
return ctx
@self._auto_deferred_update_operation
def operation(con):
sources = [self.build_state.to_source_filename(x)
for x in self.sources]
cur = con.cursor()
cur.execute('''
delete from dirty_sources where source in (%s)
''' % ', '.join(['?'] * len(sources)), list(sources))
cur.close()
reporter.report_dirty_flag(False)
seen.add(source)
for v_source in virtual_dependencies or ():
checksum = v_source.get_checksum(self.build_state.path_cache)
mtime = v_source.get_mtime(self.build_state.path_cache)
rows.append(artifacts_row(
artifact=self.artifact_name,
source=v_source.path,
source_mtime=mtime,
source_size=None,
source_checksum=checksum,
is_dir=False,
is_primary_source=False))
reporter.report_dependencies(rows)
cur = con.cursor()
if not for_failure:
cur.execute('delete from artifacts where artifact = ?',
[self.artifact_name])
if rows:
cur.executemany('''
insert or replace into artifacts (
artifact, source, source_mtime, source_size,
source_checksum, is_dir, is_primary_source)
values (?, ?, ?, ?, ?, ?, ?)
''', rows)
if self.config_hash is None:
cur.execute('''
delete from artifact_config_hashes
def operation(con):
sources = [self.build_state.to_source_filename(x)
for x in self.sources]
cur = con.cursor()
cur.execute('''
delete from dirty_sources where source in (%s)
''' % ', '.join(['?'] * len(sources)), list(sources))
cur.close()
reporter.report_dirty_flag(False)