Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if fmt in ["ESRI Shapefile", "GeoJSON"]:
ext = ".shp"
if fmt == "GeoJSON":
ext = ".geojson"
filepath = os.path.join(folderpath, "{0}{1}".format(layername, ext))
self.__dest_folder(folderpath, crtfld)
if fmt == "GeoJSON" and os.path.isfile(filepath):
os.remove(filepath)
out_crs = from_epsg(epsg_cd)
with collection(filepath, "w", fmt, schema, crs=out_crs) as output:
line = LineString(coords)
geom = mapping(line)
if self.__antimeridian:
line_t = self.__antiMeridianCut(geom)
else:
line_t = geom
output.write({
'properties': {
'prop': prop
},
'geometry': line_t
})
def convert(buildingIn, addressIn, osmOut):
# Load all addresses.
addresses = []
with collection(addressIn, "r") as input:
for address in input:
shape = asShape(address['geometry'])
shape.original = address
addresses.append(shape)
# Load and index all buildings.
buildingIdx = index.Index()
buildings = []
with collection(buildingIn, "r") as input:
for building in input:
building['shape'] = asShape(building['geometry'])
if building['properties']['DESCRIPTIO'] != 'Void':
building['properties']['addresses'] = []
buildings.append(building)
buildingIdx.add(len(buildings) - 1, building['shape'].bounds)
Z16 = classify(Ztemp,classNumber,classWeight)
del Ztemp
pixel_size_x = (xmax - xmin)/Z16.shape[0]
pixel_size_y = (ymax - ymin)/Z16.shape[1]
upper_left_x = xmin - pixel_size_x/2.0
upper_left_y = ymax + pixel_size_y/2.0
transform = Affine(
pixel_size_x, 0.0, upper_left_x,
0.0, -pixel_size_y, upper_left_y)
schema = { 'geometry': 'MultiPolygon', 'properties': { 'value': 'int' } }
print "Writing to shp..."
with fiona.collection(args.outfile, "w", "ESRI Shapefile", schema, crs=from_epsg(3857)) as outshp:
for feature, shapes in features.shapes(np.asarray(np.rot90(Z16.astype(np.uint8)),order='C'),transform=transform):
featurelist = []
for f in feature['coordinates']:
featurelist.append(Polygon(f))
poly = MultiPolygon(featurelist)
outshp.write({'geometry': mapping(poly),'properties': {'value': shapes}})
def merge(buildingIn, addressIn, mergedOut):
addresses = []
with collection(addressIn, "r") as input:
for address in input:
shape = asShape(address['geometry'])
shape.original = address
addresses.append(shape)
geoid = re.match('^.*-(\d+)\.shp$', buildingIn).groups(0)[0]
# print "loaded", len(addresses), "addresses"
# Load and index all buildings.
buildings = []
buildingShapes = []
buildingIdx = index.Index()
with collection(buildingIn, "r") as input:
for building in input:
shape = asShape(building['geometry'])
def chunk(featureFileName, sectionFileName, pattern, key = None):
# Load and index
with collection(featureFileName, "r") as featureFile:
featureIdx = index.Index()
features = []
for feature in featureFile:
features.append(feature)
featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds)
# Break up by sections and export
with collection(sectionFileName, "r") as sectionFile:
i = 0
for section in sectionFile:
fileName = pattern % i
if key:
fileName = pattern % section['properties'][key]
with collection(fileName, 'w', 'ESRI Shapefile',
schema = featureFile.schema,
crs = featureFile.crs) as output:
sectionShape = asShape(section['geometry'])
for j in featureIdx.intersection(sectionShape.bounds):
if asShape(features[j]['geometry']).intersects(sectionShape):
output.write(features[j])
print "Exported %s" % fileName
i = i + 1
# Load and index
with collection(featureFileName, "r") as featureFile:
featureIdx = index.Index()
features = []
for feature in featureFile:
features.append(feature)
featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds)
# Break up by sections and export
with collection(sectionFileName, "r") as sectionFile:
i = 0
for section in sectionFile:
fileName = pattern % i
if key:
fileName = pattern % section['properties'][key]
with collection(fileName, 'w', 'ESRI Shapefile',
schema = featureFile.schema,
crs = featureFile.crs) as output:
sectionShape = asShape(section['geometry'])
for j in featureIdx.intersection(sectionShape.bounds):
if asShape(features[j]['geometry']).intersects(sectionShape):
output.write(features[j])
print "Exported %s" % fileName
i = i + 1
@app.route('/listdata///', methods=['GET'])
def get_shpdbf(filename, field):
"""
Extract a column from a shapefile (geom) or dbf (attribute)
"""
files = (os.path.join(UPLOAD_FOLDER, filename))
if field == 'thegeom':
geoms = []
with fiona.collection(files + '.shp', "r") as source:
for feat in source:
geoms.append(feat)
geojson = {
"type": "FeatureCollection",
"features": geoms
}
response = {'status':'success','data':{'geojson':geojson}}
else:
dbf = ps.open(files + '.dbf', 'r')
attr = dbf.by_col(field)
response = {'status':'success','data':{field:attr}}
return jsonify(response)
def apply(self,ds_name,field_names=[]):
p = index.Property()
idx = index.Index(properties=p)
with collection(ds_name, "r") as source:
e = ctrans.extent
for feat in source.filter(bbox=(e.minx, e.miny, e.maxx, e.maxy)):
geom = shape(feat['geometry'])
attrs = feat['properties']
saved_attrs = {}
for field, val in attrs.iteritems():
if field in field_names:
saved_attrs[field] = val
# Break up multipolygons for more efficient index
if geom.type == "MultiPolygon":
for g in geom.geoms:
idx.insert(int(feat['id']), g.bounds, obj=(g, saved_attrs))
else:
idx.insert(int(feat['id']), geom.bounds, obj=(geom, saved_attrs))
def chunk(featureFileName, sectionFileName, pattern, key = None):
# Load and index
with collection(featureFileName, "r") as featureFile:
featureIdx = index.Index()
features = []
for feature in featureFile:
features.append(feature)
featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds)
# Break up by sections and export
with collection(sectionFileName, "r") as sectionFile:
i = 0
for section in sectionFile:
fileName = pattern % i
if key:
fileName = pattern % section['properties'][key]
properties = {}
try:
with collection(fileName, 'w', 'ESRI Shapefile',
schema = featureFile.schema,
crs = featureFile.crs) as output:
sectionShape = asShape(section['geometry'])
for j in featureIdx.intersection(sectionShape.bounds):
if asShape(features[j]['geometry']).intersects(sectionShape):
properties = features[j]['properties']
output.write(features[j])
print "Exported %s" % fileName
def chunk(featureFileName, sectionFileName, pattern, key = None):
# Load and index
with collection(featureFileName, "r") as featureFile:
featureIdx = index.Index()
features = []
for feature in featureFile:
try:
shape = asShape(feature['geometry'])
features.append(feature)
featureIdx.add(len(features) - 1, shape.bounds)
except ValueError:
print "Error parsing feature"
pprint(feature)
# Break up by sections and export
with collection(sectionFileName, "r") as sectionFile:
i = 0
for section in sectionFile:
fileName = pattern % i