Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
the raw response content (stream of bytes)
:raise:
- ResponseError: If there's an error in the response
- MissingResult: If no result nor error was found
"""
response = self._get_response()
has_result_single = False
has_result_many = False
has_error = False
builder = ObjectBuilder()
for prefix, event, value in ijson.parse(
response.raw, buf_size=self._chunk_size
):
if (prefix, event) == ("error", "start_map"):
# Matched ServiceNow `error` object at the root
has_error = True
elif prefix == "result" and event in ["start_map", "start_array"]:
# Matched ServiceNow `result`
if event == "start_map": # Matched object
has_result_single = True
elif event == "start_array": # Matched array
has_result_many = True
if has_result_many:
# Build the result
if (prefix, event) == ("result.item", "end_map"):
# Reached end of object. Set count and yield
def extract_iocs(self, report):
iocs = set()
parser = ijson.parse(report)
lines = ""
for prefix, event, value in parser:
if prefix in [
"analysis.behavior.network.tcp.packet.item.srcip",
"analysis.behavior.network.tcp.packet.item.dstip",
"analysis.behavior.network.udp.packet.item.srcip",
"analysis.behavior.network.udp.packet.item.dstip",
"analysis.behavior.network.dns.packet.item.name",
]:
if not value.startswith("192.168."):
iocs.add(value)
elif prefix in [
"analysis.behavior.network.http.packet.item.header",
"analysis.behavior.network.https.packet.item.header",
"analysis.behavior.network.sslhttp.packet.item.header",
]:
('start_map', None)
('end_map', None)
('start_array', None)
('end_array', None)
:param jsonStr: string of a single, self contained JSON object
:type jsonStr: String
:param row: partially filled array of values.
:type row: List<>
:return: array of values. Fills into the passed-in row array
:rtype: []
'''
self.jsonToRelationConverter.bumpLineCounter()
try:
try:
parser = ijson.parse(StringIO.StringIO(jsonStr))
except Exception as e:
self.logWarn('Ill formed JSON in track log, line %d: %s' % (self.jsonToRelationConverter.makeFileCitation(), `e`))
return row
# Stack of array index counters for use with
# nested arrays:
arrayIndexStack = Stack()
# Not currently processing
#for prefix,event,value in self.parser:
for nestedLabel, event, value in parser:
#print("Nested label: %s; event: %s; value: %s" % (nestedLabel,event,value))
if event == "start_map":
if not arrayIndexStack.empty():
# Starting a new attribute/value pair within an array: need
# a new number to differentiate column headers
self.incArrayIndex(arrayIndexStack)
for el in enumerate_json_items(f, encoding=encoding, lines=lines, flatten=flatten, fLOG=fLOG):
yield el
else:
st = StringIO(filename)
for el in enumerate_json_items(st, encoding=encoding, lines=lines, flatten=flatten, fLOG=fLOG):
yield el
elif isinstance(filename, bytes):
st = BytesIO(filename)
for el in enumerate_json_items(st, encoding=encoding, lines=lines, flatten=flatten, fLOG=fLOG):
yield el
elif lines:
for el in enumerate_json_items(JsonPerRowsStream(filename),
encoding=encoding, lines=False, flatten=flatten, fLOG=fLOG):
yield el
else:
parser = ijson.parse(filename)
current = None
curkey = None
stack = []
nbyield = 0
for i, (_, event, value) in enumerate(parser):
if i % 1000000 == 0 and fLOG is not None:
fLOG( # pragma: no cover
"[enumerate_json_items] i={0} yielded={1}".format(i, nbyield))
if event == "start_array":
if curkey is None:
current = []
else:
if not isinstance(current, dict):
raise RuntimeError( # pragma: no cover
"Type issue {0}".format(type(current)))
c = []
def _parse(filename, array_delimiter):
'''Parses MoNA json file.'''
records = []
record = {'chemical': {'names:string[]': []},
'spectrum': {':LABEL': 'Spectrum', 'tags:string[]': []}}
name = None
for prefix, typ, value in ijson.parse(open(filename)):
if prefix == 'item' and typ == 'start_map':
record = {'chemical': {'names:string[]': []},
'spectrum': {':LABEL': 'Spectrum',
'tags:string[]': []}}
elif prefix == 'item.compound.item.inchi':
record['chemical']['inchi'] = value
elif prefix == 'item.compound.item.names.item.name':
if 'name' not in record['chemical']:
record['chemical']['name'] = value
record['chemical']['names:string[]'].append(value)
elif prefix == 'item.compound.item.metaData.item.name' or \
prefix == 'item.metaData.item.name':
name = _normalise_name(value.lower())
elif prefix == 'item.compound.item.metaData.item.value':
_parse_compound_metadata(name, value, record)
name = None
def __activate_json_parser(self):
self.__jsonParser = ijson.parse(open(self.__jsonFileName, 'r'))
self.__jsonParser.next() # skip over the start of the list
def get_tables(self):
prefixes = dict()
with open(self._filename) as f:
for prefix, event, _ in ijson.parse(f):
if event in ("start_map", "start_array"):
prefixes[".".join([self._root_prefix, prefix])] = None
return [self._root_prefix] + list(prefixes.keys())[1:]
def streamJsonArrayItems(f):
# read items from a json array -- without loading the entire file into memory
level = 0
currentObject = ijson.ObjectBuilder()
parsed = ijson.parse(f)
# eat the initial start_array event
assertEqual('start_array', next(parsed)[1])
# construct objects. use level in order to support objects within objects
for _, event, value in parsed:
currentObject.event(event, value)
if event == 'start_map':
level += 1
elif event == 'end_map':
level -= 1
if level == 0:
yield currentObject.value
currentObject = ijson.ObjectBuilder()
def jsonObjectReader(filepath):
"""
Creates a generator that parses an array of json objects from a valid
json array file, yielding each top level json object in the array.
:param filepath: path to json file.
"""
top_level_array = False
array_stack = 0
top_level_object = False
object_stack = 0
parser = ijson.parse(open(filepath, 'r'))
for prefix, event, value in parser:
if event == 'start_array':
if not top_level_array:
top_level_array = True
continue
else:
array_stack += 1
if event == 'start_map':
if not top_level_object:
top_level_object = True
builder = ijson.ObjectBuilder()
else:
object_stack += 1
if event == 'end_map':
if not top_level_object: