Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def searchItems_addGNSSMetadataFields(args_parser):
# Search ItemIds
gis = GIS(args_parser.url, args_parser.username, args_parser.password)
arcpy.AddMessage("Signed into organization..")
itemId = args_parser.itemId
try:
featureLayerItem = gis.content.get(itemId)
# Construct a FeatureLayerCollection from the portal item.
featureLayerCollection = FeatureLayerCollection.fromitem(featureLayerItem)
# Extract fields from Feature layer service definition
featureLayerFields = featureLayerCollection.manager.layers[args_parser.layerIndex].properties[
'fields'] if args_parser.layerIndex else \
featureLayerCollection.manager.layers[0].properties['fields']
# Feature Layer index
featureLayerIndex = args_parser.layerIndex if args_parser.layerIndex else 0
# Check if the Feature layer is of type esriGeometryPoint
if featureLayerCollection.manager.layers[featureLayerIndex].properties['geometryType'] != 'esriGeometryPoint':
arcpy.AddError("Feature layer is not a point layer")
raise ValueError("Feature layer is not a point layer")
# New fields which need to be added
gnssMetadataFields = {'fields': []}
webmap_json = item.get_data()
return _WebMapDefinition(dict(item), data=webmap_json, thumbnail=None, portal_item=item)
# If the item is a workforce project get the WorkforceProjectDefintion
elif item['type'] == 'Workforce Project':
workforce_json = item.get_data()
return _WorkforceProjectDefinition(dict(item), data=workforce_json, thumbnail=None, portal_item=item)
# If the item is a workforce project get the WorkforceProjectDefintion
elif item['type'] == 'Form':
related_items = _get_related_items(item, 'Survey2Service')
return _FormDefinition(dict(item), related_items=related_items, data=None, thumbnail=None, portal_item=item)
# If the item is a feature service get the FeatureServiceDefintion
elif item['type'] == 'Feature Service':
svc = FeatureLayerCollection.fromitem(item)
service_definition = dict(svc.properties)
# Get the definitions of the the layers and tables
layers_definition = {'layers' : [], 'tables' : []}
for layer in svc.layers:
layers_definition['layers'].append(dict(layer.properties))
for table in svc.tables:
layers_definition['tables'].append(dict(table.properties))
# Get the item data, for example any popup definition associated with the item
data = item.get_data()
return _FeatureServiceDefinition(dict(item), service_definition, layers_definition, features=None, data=data, thumbnail=None, portal_item=item)
# If the item is a feature collection get the FeatureCollectionDefintion
elif item['type'] == 'Feature Collection':
webmap_json = item_definition.data
featurelayer_services = []
feature_collections = []
if 'operationalLayers' in webmap_json:
featurelayer_services += [layer for layer in webmap_json['operationalLayers'] if 'layerType' in layer and layer['layerType'] == "ArcGISFeatureLayer" and 'url' in layer and layer['url'] is not None and ('type' not in layer or layer['type'] != "Feature Collection")]
feature_collections += [layer for layer in webmap_json['operationalLayers'] if 'layerType' in layer and layer['layerType'] == "ArcGISFeatureLayer" and 'type' in layer and layer['type'] == "Feature Collection"]
if 'tables' in webmap_json:
featurelayer_services += [table for table in webmap_json['tables'] if 'url' in table]
for layer in featurelayer_services:
service_url = os.path.dirname(layer['url'])
feature_service = next((definition for definition in item_definitions if 'url' in definition.info and definition.info['url'] == service_url), None)
if not feature_service:
try:
service = FeatureLayerCollection(service_url, source)
except Exception:
_add_message("Feature layer {0} is not a hosted feature service. It will not be cloned.".format(service_url), 'Warning')
continue
if 'serviceItemId' not in service.properties or service.properties['serviceItemId'] is None:
_add_message("Feature layer {0} is not a hosted feature service. It will not be cloned.".format(service_url), 'Warning')
continue
try:
item_id = service.properties['serviceItemId']
feature_service = source.content.get(item_id)
except RuntimeError:
_add_message("Failed to get feature service item {0}".format(item_id), 'Error')
raise
_get_item_definitions(feature_service, item_definitions)
def update_service_definition(args_parser):
try:
gis = GIS(args_parser.url,args_parser.username, args_parser.password)
featureLayerItem = gis.content.get(args_parser.itemId)
featureLayerCollection = FeatureLayerCollection.fromitem(featureLayerItem)
layers = featureLayerCollection.manager.layers
tables = featureLayerCollection.manager.tables
arcpy.AddMessage("Updating Service Definition..")
for layer in layers:
layer_index = layers.index(layer)
update_template(featureLayerCollection,layer,layer_index,False)
for table in tables:
table_index = tables.index(table)
update_template(featureLayerCollection,table,table_index,True)
arcpy.AddMessage("Updated Service Definition..")
finally:
xlsx.close()
if os.path.exists(xlsx_dir):
shutil.rmtree(xlsx_dir)
# Add a relationship between the new survey and the service
for related_item in self.related_items:
for key, value in item_mapping['Feature Services'].items():
if _compare_url(related_item['url'], key):
feature_service = target.content.get(value['id'])
_add_relationship(new_item, feature_service, 'Survey2Service')
break
# If the survey was authored on the web add the web_json to the metadata table in the service
if form_json is not None and feature_service_url is not None:
svc = FeatureLayerCollection(feature_service_url, target)
table = next((t for t in svc.tables if t.properties.name == 'metadata'), None)
if table is not None:
deletes = table.query(where="name = 'form'")
table.edit_features(adds=[{'attributes' : {'name' : 'form', 'value' : form_json}}], deletes=deletes)
# Zip the directory
zip_file = zipfile.ZipFile(form_zip, 'w', zipfile.ZIP_DEFLATED)
_zip_dir(zip_dir, zip_file)
zip_file.close()
# Upload the zip to the item
new_item.update(data=form_zip)
except Exception as ex:
raise Exception("Failed to update {0} {1}: {2}".format(new_item['type'], new_item['title'], str(ex)))
finally:
zip_file.close()