Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
c.set_linestyle('solid')
#convert high density contour lines into a set of shapely polygons!
fig12.savefig('contour.png')
global POLYGONS
POLYGONS = []
for c in cset.collections:
if len(c.get_paths()) > 0:
verticies = c.get_paths()
#p = cset.collections[i].get_paths()[0]
for points in verticies:
if len(points) >= 3: #check that poly has min. 3 nodes
points = points.vertices
poly_points = np.column_stack((points[:,0], points[:,1]))
POLY = Polygon(poly_points)
if POLY.is_empty: #DON'T APPEND POLYGON IF EMPTY
print "empty polygon"
else:
POLYGONS.append(POLY)
t0 = datetime.datetime.now()
pool = mp.Pool()
points_outside = pool.map(multipoly_check, [points_contained])
pool.close()
pool.join()
t1 = datetime.datetime.now()
points_outside = np.asarray(points_outside)
print points_outside
print "time to generate coords outside high density contour polys:", t1-t0
outer_way = original_way
else:
outer_way = osm_data.create_way({}, copy.deepcopy(original_tags))
outer_way.polygon = polygon
if polygon.interiors:
# This has created a multipolygon
if outer_way.relations:
raise Exception("Unsupported situation")
relation = osm_data.create_relation({}, {"type": "multipolygon"})
for key,val in outer_way.tags.items():
if key !="source":
relation.tags[key] = val
del(outer_way.tags[key])
relation.add_member(outer_way, "outer")
for interior in polygon.interiors:
interior_polygon = Polygon(interior)
inner_way = osm_data.create_way({}, {})
inner_way.polygon = interior_polygon
relation.add_member(inner_way, "inner")
if "source" in original_tags:
inner_way.tags["source"] = original_tags["source"]
for position in interior.coords:
position = tuple(position)
if position in position_hash:
node_id = position_hash[position]
node = osm_data.nodes[node_id]
else:
node = osm_data.create_node_at_xy(position)
node_id = node.id()
position_hash[position] = node_id
node.ways.add(way.id())
inner_way.add_node(node)
# to avoid redoing things, keep all polygons in a list
polygons1 = []
nno1 = 0
nno1Max = Nests1.shape[0]
for nid1, nest1 in Nests1.iterrows():
nno1 += 1
self._print2("%s: edge1-nest %d of %d" % (
monitorPrefix, nno1, nno1Max))
try:
coords = np.array(_parse_annot_coords(nest1))
coords[:, 0] = coords[:, 0] + self.roiinfos[
edgepair['roi1-name']]['left']
coords[:, 1] = coords[:, 1] + self.roiinfos[
edgepair['roi1-name']]['top']
polygons1.append((nid1, Polygon(coords)))
except Exception as e:
self._print2(
"%s: edge1-nest %d of %d: Shapely Error (below)" % (
monitorPrefix, nno1, nno1Max))
self._print2(e)
# go through the "other" polygons to get merge list
to_merge = DataFrame(columns=[
'nest1-roiname', 'nest1-nid', 'nest2-roiname', 'nest2-nid'])
nno2 = 0
nno2Max = Nests2.shape[0]
for nid2, nest2 in Nests2.iterrows():
nno2 += 1
try:
coords = np.array(_parse_annot_coords(nest2))
coords[:, 0] = coords[:, 0] + self.roiinfos[
def _make_poly_(self,rtup,ctup):
"""
rtup = (row min, row max)
ctup = (col min, col max)
"""
return Polygon(((ctup[0],rtup[0]),
(ctup[0],rtup[1]),
(ctup[1],rtup[1]),
(ctup[1],rtup[0])))
def shapelyToChunks(p, zlevel): #
chunks = []
# p=sortContours(p)
seq = polygon_utils_cam.shapelyToCoords(p)
i = 0
for s in seq:
# progress(p[i])
if len(s) > 1:
chunk = camPathChunk([])
if len(s) == 2:
sgeometry.LineString(s)
else:
chunk.poly = spolygon.Polygon(
s) # this should maybe be LineString? but for sorting, we need polygon inside functions.
for v in s:
# progress (v)
# print(v)
if p.has_z:
chunk.points.append((v[0], v[1], v[2]))
else:
chunk.points.append((v[0], v[1], zlevel))
# chunk.points.append((chunk.points[0][0],chunk.points[0][1],chunk.points[0][2]))#last point =first point
if chunk.points[0] == chunk.points[-1] and len(s) > 2:
chunk.closed = True
chunks.append(chunk)
i += 1
chunks.reverse() # this is for smaller shapes first.
#
def extent(self):
minx = self.min_col.min()
maxx = self.max_col.max()
miny = self.min_row.min()
maxy = self.max_row.max()
poly = Polygon(((minx,miny),(maxx,miny),(maxx,maxy),(minx,maxy)))
return(poly)
def make_poly(rtup,ctup):
return Polygon(((ctup[0],rtup[0]),
(ctup[0],rtup[1]),
(ctup[1],rtup[1]),
(ctup[1],rtup[0])))
q = Queue()
l = Lock()
pl = []
#set the file reset option if the file is local
if not('http:' in dataset or 'www.' in dataset):
if ocgOpts == None:
ocgOpts = {}
ocgOpts['multiReset'] = True
ocgOpts['verbose'] = verbose
ncp = OcgDataset(dataset,**ocgOpts)
#if no polygon was specified
#create a polygon covering the whole area so that the job can be split
if polygons == [None]:
polygons = [Polygon(((ncp.col_bnds.min(),ncp.row_bnds.min()),(ncp.col_bnds.max(),ncp.row_bnds.min()),(ncp.col_bnds.max(),ncp.row_bnds.max()),(ncp.col_bnds.min(),ncp.row_bnds.max())))]
for ii,polygon in enumerate(polygons):
if verbose>1: print(ii)
#skip invalid polygons
if not polygon.is_valid:
if verbose>0: print "Polygon "+repr(ii+1)+" is not valid. "+polygon.wkt
continue
#if polygons have been specified and subdivide is True, each polygon will be subdivided
#into a grid with resolution of subres. If subres is undefined the resolution is half the square root of the area of the polygons envelope, or approximately 4 subpolygons
if subdivide and not(polygons == None):
#figure out the resolution and subdivide
#default value uses sqrt(polygon envelop area)
#generally resulting in 4-6 threads per polygon
def strategy_medial_axis( o ):
print('operation: Medial Axis')
print('doing highly experimental stuff')
from cam.voronoi import Site, computeVoronoiDiagram
chunks=[]
gpoly=spolygon.Polygon()
angle=o.cutter_tip_angle
slope=math.tan(math.pi*(90-angle/2)/180)
if o.cutter_type=='VCARVE':
angle = o.cutter_tip_angle
#start the max depth calc from the "start depth" of the operation.
maxdepth = o.maxz - math.tan(math.pi*(90-angle/2)/180) * o.cutter_diameter/2
#don't cut any deeper than the "end depth" of the operation.
if maxdepth