Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def process_path(self, filename, asset_type, sub_texture=False):
archive_asset = bpy.path.abspath(filename)
if '%' in archive_asset:
archive_asset = self._convert_frame_number(archive_asset)
if asset_type == AssetType.SHADER_ASSET:
dir_name, file_name = os.path.split(archive_asset)
self._searchpaths.append(dir_name)
archive_asset = os.path.splitext(file_name)[0]
if asset_type == AssetType.TEXTURE_ASSET and sub_texture:
base_filename = os.path.splitext(archive_asset)[0]
archive_asset = f"{base_filename}.tx"
if asset_type == AssetType.ARCHIVE_ASSET:
archive_dir, archive = os.path.split(archive_asset)
self._searchpaths.append(archive_dir)
archive_asset = archive
return archive_asset
def process_path(self, filename, asset_type, sub_texture=False):
archive_asset = bpy.path.abspath(filename)
if '%' in archive_asset:
archive_asset = self._convert_frame_number(archive_asset)
if asset_type == AssetType.SHADER_ASSET:
dir_name, file_name = os.path.split(archive_asset)
self._searchpaths.append(dir_name)
archive_asset = os.path.splitext(file_name)[0]
if asset_type == AssetType.TEXTURE_ASSET and sub_texture:
base_filename = os.path.splitext(archive_asset)[0]
archive_asset = f"{base_filename}.tx"
if asset_type == AssetType.ARCHIVE_ASSET:
archive_dir, archive = os.path.split(archive_asset)
self._searchpaths.append(archive_dir)
archive_asset = archive
return archive_asset
r, c, geo_query = handle_geo_query(georel, geometry, coords)
if r:
return r, c
if attrs is not None:
attrs = attrs.split(',')
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
entity_ids = None
if id_:
entity_ids = [s.strip() for s in id_.split(',') if s]
try:
with translator_for(fiware_s) as trans:
entities = trans.query(attr_names=attrs,
entity_type=entity_type,
entity_ids=entity_ids,
aggr_method=aggr_method,
aggr_period=aggr_period,
aggr_scope=aggr_scope,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
return {
if c != 200:
return r, c
r, c, geo_query = handle_geo_query(georel, geometry, coords)
if r:
return r, c
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
entity_ids = None
if id_:
entity_ids = [s.strip() for s in id_.split(',') if s]
try:
with translator_for(fiware_s) as trans:
entities = trans.query(attr_names=[attr_name],
entity_type=entity_type,
entity_ids=entity_ids,
aggr_method=aggr_method,
aggr_period=aggr_period,
aggr_scope=aggr_scope,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
return {
aggr_scope,
options)
if c != 200:
return r, c
r, c, geo_query = handle_geo_query(georel, geometry, coords)
if r:
return r, c
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
entity_ids = None
if id_:
entity_ids = [s.strip() for s in id_.split(',') if s]
try:
with translator_for(fiware_s) as trans:
entities = trans.query(attr_names=[attr_name],
entity_type=type_,
entity_ids=entity_ids,
aggr_method=aggr_method,
aggr_period=aggr_period,
aggr_scope=aggr_scope,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
msg = "Bad Request Error: {}".format(e)
if c != 200:
return r, c
r, c, geo_query = handle_geo_query(georel, geometry, coords)
if r:
return r, c
if attrs is not None:
attrs = attrs.split(',')
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
try:
with translator_for(fiware_s) as trans:
entities = trans.query(attr_names=attrs,
entity_type=type_,
entity_id=entity_id,
aggr_method=aggr_method,
aggr_period=aggr_period,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
return {
"error": "{}".format(type(e)),
if r:
return r, c
if attrs is not None:
attrs = attrs.split(',')
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
entity_ids = None
if id_:
entity_ids = [s.strip() for s in id_.split(',') if s]
try:
with translator_for(fiware_s) as trans:
entities = trans.query(attr_names=attrs,
entity_type=type_,
entity_ids=entity_ids,
aggr_method=aggr_method,
aggr_period=aggr_period,
aggr_scope=aggr_scope,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
msg = "Bad Request Error: {}".format(e)
def process_path(self, filename, asset_type, sub_texture=False):
archive_asset = bpy.path.abspath(filename)
if '%' in archive_asset:
archive_asset = self._convert_frame_number(archive_asset)
if asset_type == AssetType.SHADER_ASSET:
dir_name, file_name = os.path.split(archive_asset)
self._searchpaths.append(dir_name)
archive_asset = os.path.splitext(file_name)[0]
if asset_type == AssetType.TEXTURE_ASSET and sub_texture:
base_filename = os.path.splitext(archive_asset)[0]
archive_asset = f"{base_filename}.tx"
if asset_type == AssetType.ARCHIVE_ASSET:
archive_dir, archive = os.path.split(archive_asset)
self._searchpaths.append(archive_dir)
archive_asset = archive
return archive_asset
# Extract the meaning of the received line.
mob = TOKEN_X.match(line) # cannot fail
assert mob is not None # something has to match
# Store the parser token representing this line's contents
# for the parser to read.
tok = TOKEN_VALUE[ mob.lastgroup ]
self.input = tok
# If it is an empty line, we are done; we do not make WorkUnits for
# empty lines.
if tok == 'E' : return
# Not an empty line, so make a work unit.
unit = WorkUnit( self.line_number, tok, line )
# If starting a bracketed group, set the switch for "looking for a
# closing bracket". Remove boilerplate from the line text, and put
# optional items in the stuff dict. Put the group-opening unit on
# the WORK_UNITS list and continue with a Line unit.
if tok in 'FIS' :
self.find_bracket = True
open_unit = unit.copy() # make an F/I/S unit with no text.
WORK_UNITS.append( open_unit )
self.input += 'L' # token input is FL, IL, or SL
unit.tok = 'L' # second unit is a LINE
if tok == 'F' :
# remove [Footnote A: from LINE, save "A" in OPEN stuff
hob = self.fnrex.match(line)
open_unit.stuff['key'] = hob.group(1)
elif tok == 'I' :
def event_generator( page_model, edit_model ) :
global WORK_UNITS
# When the actual lnum of a unit exceeds expect_lnum, and we are in a
# no-flow section, we need to generate blank lines to fill in.
in_no_flow = False
expect_lnum = 1
# Stack of container work units. Almost never gets more than 2 deep.
stack = [ WorkUnit(0,XU.Events.LINE, '') ] # outermost context F/L/R
# The "bracket" group (fnote, snote, illo) last seen, to be closed
# when a ']' token appears.
last_bracket = None
# The index of the next scan image, if any, and the position at which it
# starts in the source document. If there are no scan images, store a
# start position that will never be reached.
next_scan = 0
scan_limit = page_model.page_count()
next_scan_starts = page_model.position(0) if page_model.active() else int( 2**31 )
# If in a table, all LINEs get special treatment.
in_table = False
columns = []
for unit in WORK_UNITS :
code = unit.tok