Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_continue_run(self):
def take_first(conflict, _, __, ___):
conflict.take = [('f', x) for x
in range(len(conflict.first_patch.patches))]
return True
lca = {'changeme': 'Jo'}
first = {'changeme': 'Joe'}
second = {'changeme': 'John'}
m = Merger(lca, first, second, {})
try:
m.run()
except UnresolvedConflictsException:
pass
m.continue_run(['f'])
self.assertEqual(m.unified_patches,
[('change', 'changeme', ('Jo', 'Joe'))])
def test_continue_run_multiple_conflicts_per_patch(self):
lca = {'foo': [{'x': 1}, {'y': 2}]}
first = {'foo': [{'x': 1}, {'y': 2}, {'z': 4}]}
second = {'bar': 'baz'}
expected = {
'f': {'foo': [{'x': 1}, {'y': 2}, {'z': 4}],
'bar': 'baz'},
's': {'bar': 'baz'}}
for resolution, expected_value in expected.items():
m = Merger(lca, first, second, {})
try:
m.run()
except UnresolvedConflictsException as e:
m.continue_run([resolution for _ in e.content])
self.assertEqual(patch(m.unified_patches, lca),
expected_value)
def test_run(self):
lca = {'changeme': 'Jo'}
first = {'changeme': 'Joe'}
second = {'changeme': 'John'}
m = Merger(lca, first, second, {})
self.assertRaises(UnresolvedConflictsException, m.run)
super(Releases, self).update(where={"name": name}, what=what, changed_by=changed_by, old_data_version=old_data_version,
transaction=transaction, dryrun=dryrun)
except OutdatedDataError as e:
self.log.debug("trying to update older data_version %s for release %s" % (old_data_version, name))
if blob is not None:
ancestor_change = self.history.getChange(data_version=old_data_version,
column_values={'name': name},
transaction=transaction)
# if we have no historical information about the ancestor blob
if ancestor_change is None:
self.log.debug("history for data_version %s for release %s absent" % (old_data_version, name))
raise
ancestor_blob = ancestor_change.get('data')
tip_release = self.getReleases(name=name, transaction=transaction)[0]
tip_blob = tip_release.get('data')
m = dictdiffer.merge.Merger(ancestor_blob, tip_blob, blob, {})
try:
m.run()
# Merger merges the patches into a single unified patch,
# but we need dictdiffer.patch to actually apply the patch
# to the original blob
unified_blob = dictdiffer.patch(m.unified_patches, ancestor_blob)
# converting the resultant dict into a blob and then
# converting it to JSON
what['data'] = unified_blob
# we want the data_version for the dictdiffer.merged blob to be one
# more than that of the latest blob
tip_data_version = tip_release['data_version']
super(Releases, self).update(where={"name": name}, what=what, changed_by=changed_by, old_data_version=tip_data_version,
transaction=transaction, dryrun=dryrun)
# cache will have a data_version of one plus the tip
# data_version