Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load():
errors_file = None if args.no_errors else sys.stderr
with misc_utils.log_time('beancount.loader (total)', logging.info):
return loader.load_file(args.filename,
log_timings=logging.info,
log_errors=errors_file)
See load() or load_string().
"""
assert isinstance(sources, list) and all(isinstance(el, tuple) for el in sources)
if hasattr(log_timings, 'write'):
log_timings = log_timings.write
# Parse all the files recursively. Ensure that the entries are sorted before
# running any processes on them.
with misc_utils.log_time('parse', log_timings, indent=1):
entries, parse_errors, options_map = _parse_recursive(
sources, log_timings, encoding)
entries.sort(key=data.entry_sortkey)
# Run interpolation on incomplete entries.
with misc_utils.log_time('booking', log_timings, indent=1):
entries, balance_errors = booking.book(entries, options_map)
parse_errors.extend(balance_errors)
# Transform the entries.
with misc_utils.log_time('run_transformations', log_timings, indent=1):
entries, errors = run_transformations(entries, parse_errors, options_map,
log_timings)
# Validate the list of entries.
with misc_utils.log_time('beancount.ops.validate', log_timings, indent=1):
valid_errors = validation.validate(entries, options_map, log_timings,
extra_validations)
errors.extend(valid_errors)
# Note: We could go hardcore here and further verify that the entries
# haven't been modified by user-provided validation routines, by
log_timings = log_timings.write
# Parse all the files recursively. Ensure that the entries are sorted before
# running any processes on them.
with misc_utils.log_time('parse', log_timings, indent=1):
entries, parse_errors, options_map = _parse_recursive(
sources, log_timings, encoding)
entries.sort(key=data.entry_sortkey)
# Run interpolation on incomplete entries.
with misc_utils.log_time('booking', log_timings, indent=1):
entries, balance_errors = booking.book(entries, options_map)
parse_errors.extend(balance_errors)
# Transform the entries.
with misc_utils.log_time('run_transformations', log_timings, indent=1):
entries, errors = run_transformations(entries, parse_errors, options_map,
log_timings)
# Validate the list of entries.
with misc_utils.log_time('beancount.ops.validate', log_timings, indent=1):
valid_errors = validation.validate(entries, options_map, log_timings,
extra_validations)
errors.extend(valid_errors)
# Note: We could go hardcore here and further verify that the entries
# haven't been modified by user-provided validation routines, by
# comparing hashes before and after. Not needed for now.
# Compute the input hash.
options_map['input_hash'] = compute_input_hash(options_map['include'])
with misc_utils.log_time('beancount.parser.parser.parse_file',
log_timings, indent=2):
(src_entries,
src_errors,
src_options_map) = parser.parse_file(filename, encoding=encoding)
cwd = path.dirname(filename)
else:
# Encode the contents if necessary.
if encoding:
if isinstance(source, bytes):
source = source.decode(encoding)
source = source.encode('ascii', 'replace')
# Parse a string buffer from memory.
with misc_utils.log_time('beancount.parser.parser.parse_string',
log_timings, indent=2):
(src_entries,
src_errors,
src_options_map) = parser.parse_string(source)
# If we're parsing a string, the CWD is the current process
# working directory.
cwd = os.getcwd()
# Merge the entries resulting from the parsed file.
entries.extend(src_entries)
parse_errors.extend(src_errors)
# We need the options from the very top file only (the very
# first file being processed). No merging of options should
# occur.
self.entries, self.begin_index = self.apply_filter(self.all_entries, options_map)
# Compute the list of entries for the opening balances sheet.
self.opening_entries = (self.entries[:self.begin_index]
if self.begin_index is not None
else [])
# Compute the list of entries that includes transfer entries of the
# income/expenses amounts to the balance sheet's equity (as "net
# income"). This is used to render the end-period balance sheet, with
# the current period's net income, closing the period.
self.closing_entries = summarize.cap_opt(self.entries, options_map)
# Realize the three sets of entries.
account_types = options.get_account_types(options_map)
with misc_utils.log_time('realize_opening', logging.info):
self.opening_real_accounts = realization.realize(self.opening_entries,
account_types)
with misc_utils.log_time('realize', logging.info):
self.real_accounts = realization.realize(self.entries,
account_types)
with misc_utils.log_time('realize_closing', logging.info):
self.closing_real_accounts = realization.realize(self.closing_entries,
account_types)
assert self.real_accounts is not None
assert self.closing_real_accounts is not None
help='Disable the cache from the command-line.')
parser.add_argument('--cache-filename', action='store',
help='Override the name of the cache')
opts = parser.parse_args()
if opts.verbose:
logging.basicConfig(level=logging.INFO,
format='%(levelname)-8s: %(message)s')
# Override loader caching setup if disabled or if the filename is
# overridden.
if not opts.use_cache or opts.cache_filename:
loader.initialize(opts.use_cache, opts.cache_filename)
with misc_utils.log_time('beancount.loader (total)', logging.info):
# Load up the file, print errors, checking and validation are invoked
# automatically.
entries, errors, _ = loader.load_file(
opts.filename,
log_timings=logging.info,
log_errors=sys.stderr,
# Force slow and hardcore validations, just for check.
extra_validations=validation.HARDCORE_VALIDATIONS)
# Exit with an error code if there were any errors, so this can be used in a
# shell conditional.
return 1 if errors else 0
parse_errors.append(
LoadError(data.new_metadata("", 0),
'Duplicate filename parsed: "{}"'.format(filename),
None))
continue
# Check for a file that does not exist.
if not path.exists(filename):
parse_errors.append(
LoadError(data.new_metadata("", 0),
'File "{}" does not exist'.format(filename), None))
continue
# Parse a file from disk directly.
filenames_seen.add(filename)
with misc_utils.log_time('beancount.parser.parser.parse_file',
log_timings, indent=2):
(src_entries,
src_errors,
src_options_map) = parser.parse_file(filename, encoding=encoding)
cwd = path.dirname(filename)
else:
# Encode the contents if necessary.
if encoding:
if isinstance(source, bytes):
source = source.decode(encoding)
source = source.encode('ascii', 'replace')
# Parse a string buffer from memory.
with misc_utils.log_time('beancount.parser.parser.parse_string',
log_timings, indent=2):
# Issue a warning on a renamed module.
renamed_name = RENAMED_MODULES.get(plugin_name, None)
if renamed_name:
warnings.warn("Deprecation notice: Module '{}' has been renamed to '{}'; "
"please adjust your plugin directive.".format(
plugin_name, renamed_name))
plugin_name = renamed_name
# Try to import the module.
try:
module = importlib.import_module(plugin_name)
if not hasattr(module, '__plugins__'):
continue
with misc_utils.log_time(plugin_name, log_timings, indent=1):
# Run each transformer function in the plugin.
for function_name in module.__plugins__:
if isinstance(function_name, str):
# Support plugin functions provided by name.
callback = getattr(module, function_name)
else:
# Support function types directly, not just names.
callback = function_name
if plugin_config is not None:
entries, plugin_errors = callback(entries, options_map,
plugin_config)
else:
entries, plugin_errors = callback(entries, options_map)
errors.extend(plugin_errors)
# Parse all the files recursively.
entries, parse_errors, options_map = _parse_recursive(sources, log_timings, encoding)
# Ensure that the entries are sorted before running any processes on them.
entries.sort(key=data.entry_sortkey)
# Run interpolation on incomplete entries.
entries, balance_errors = booking.book(entries, options_map)
parse_errors.extend(balance_errors)
# Transform the entries.
entries, errors = run_transformations(entries, parse_errors, options_map, log_timings)
# Validate the list of entries.
with misc_utils.log_time('beancount.ops.validate', log_timings, indent=1):
valid_errors = validation.validate(entries, options_map, log_timings,
extra_validations)
errors.extend(valid_errors)
# Note: We could go hardcore here and further verify that the entries
# haven't been modified by user-provided validation routines, by
# comparing hashes before and after. Not needed for now.
# Compute the input hash.
options_map['input_hash'] = compute_input_hash(options_map['include'])
return entries, errors, options_map