Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def do_parse_data(type_string, *data_strings):
type_json = unbox_json(ovs.json.from_string(type_string))
type_ = ovs.db.types.Type.from_json(type_json)
for datum_string in data_strings:
datum_json = unbox_json(ovs.json.from_string(datum_string))
datum = data.Datum.from_json(type_, datum_json)
print(ovs.json.to_string(datum.to_json()))
def _check_value(self, ovsrec_row, column_key_value):
column, key, value_json = column_key_value
column_schema = ovsrec_row._table.columns[column]
value = ovs.db.data.Datum.from_json(
column_schema.type, value_json).to_python(ovs.db.idl._uuid_to_row)
datum = getattr(ovsrec_row, column)
if key is None:
if datum == value:
return True
else:
if datum[key] != value:
return True
return False
def from_json(cls, idl, table, uuid, row_json):
data = {}
for column_name, datum_json in six.iteritems(row_json):
column = table.columns.get(column_name)
if not column:
# XXX rate-limit
vlog.warn("unknown column %s in table %s"
% (column_name, table.name))
continue
try:
datum = ovs.db.data.Datum.from_json(column.type, datum_json)
except error.Error as e:
# XXX rate-limit
vlog.warn("error parsing column %s in table %s: %s"
% (column_name, table.name, e))
continue
data[column_name] = datum
return cls(idl, table, uuid, data)
vsctl_fatal('%s does not contain a column whose name matches "%s"'
% (ovsrec_row._table.name, column))
column_schema = ovsrec_row._table.columns[column]
if key is not None:
value_json = ['map', [[key, value_json]]]
if column_schema.type.value.type == ovs.db.types.VoidType:
vsctl_fatal('cannot specify key to set for non-map column %s' %
column)
datum = ovs.db.data.Datum.from_json(column_schema.type, value_json,
self.symtab)
values = getattr(ovsrec_row, column, {})
values.update(datum.to_python(ovs.db.idl._uuid_to_row))
setattr(ovsrec_row, column, values)
else:
datum = ovs.db.data.Datum.from_json(column_schema.type, value_json,
self.symtab)
setattr(ovsrec_row, column,
datum.to_python(ovs.db.idl._uuid_to_row))
def _check_value(self, ovsrec_row, column_value):
"""
:type column_value: tuple of column and value_json
"""
column, value_json = column_value
column_schema = ovsrec_row._table.columns[column]
value = ovs.db.data.Datum.from_json(
column_schema.type, value_json).to_python(ovs.db.idl._uuid_to_row)
datum = getattr(ovsrec_row, column)
if column_schema.type.is_map():
for k, v in value.items():
if k in datum and datum[k] == v:
return True
elif datum == value:
return True
return False
def from_json(json):
if type(json) in [str, unicode]:
return BaseType(AtomicType.from_json(json))
parser = ovs.db.parser.Parser(json, "ovsdb type")
atomic_type = AtomicType.from_json(parser.get("type", [str, unicode]))
base = BaseType(atomic_type)
enum = parser.get_optional("enum", [])
if enum is not None:
base.enum = ovs.db.data.Datum.from_json(
BaseType.get_enum_type(base.type), enum)
elif base.type == IntegerType:
base.min = parser.get_optional("minInteger", [int, long])
base.max = parser.get_optional("maxInteger", [int, long])
if (base.min is not None and base.max is not None
and base.min > base.max):
raise error.Error("minInteger exceeds maxInteger", json)
elif base.type == RealType:
base.min = parser.get_optional("minReal", [int, long, float])
base.max = parser.get_optional("maxReal", [int, long, float])
if (base.min is not None and base.max is not None
and base.min > base.max):
raise error.Error("minReal exceeds maxReal", json)
elif base.type == StringType:
base.min_length = BaseType.__parse_uint(parser, "minLength", 0)
base.max_length = BaseType.__parse_uint(parser, "maxLength",
def __apply_diff(self, table, row, row_diff):
old_row = {}
for column_name, datum_diff_json in six.iteritems(row_diff):
column = table.columns.get(column_name)
if not column:
# XXX rate-limit
vlog.warn("unknown column %s updating table %s"
% (column_name, table.name))
continue
try:
datum_diff = data.Datum.from_json(column.type, datum_diff_json)
except error.Error as e:
# XXX rate-limit
vlog.warn("error parsing column %s in table %s: %s"
% (column_name, table.name, e))
continue
old_row[column_name] = row._data[column_name].copy()
datum = row._data[column_name].diff(datum_diff)
if datum != row._data[column_name]:
row._data[column_name] = datum
return old_row
def from_json(json):
if isinstance(json, six.string_types):
return BaseType(AtomicType.from_json(json))
parser = ovs.db.parser.Parser(json, "ovsdb type")
atomic_type = AtomicType.from_json(parser.get("type",
six.string_types))
base = BaseType(atomic_type)
enum = parser.get_optional("enum", [])
if enum is not None:
base.enum = ovs.db.data.Datum.from_json(
BaseType.get_enum_type(base.type), enum)
elif base.type == IntegerType:
base.min = parser.get_optional("minInteger", six.integer_types)
base.max = parser.get_optional("maxInteger", six.integer_types)
if (base.min is not None and base.max is not None
and base.min > base.max):
raise error.Error("minInteger exceeds maxInteger", json)
elif base.type == RealType:
base.min = parser.get_optional("minReal", REAL_PYTHON_TYPES)
base.max = parser.get_optional("maxReal", REAL_PYTHON_TYPES)
if (base.min is not None and base.max is not None
and base.min > base.max):
raise error.Error("minReal exceeds maxReal", json)
elif base.type == StringType:
base.min_length = BaseType.__parse_uint(parser, "minLength", 0)
base.max_length = BaseType.__parse_uint(parser, "maxLength",