Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def process_optics(sample):
instance = optics(sample, 1.0, 2)
(ticks, _) = timedcall(instance.process)
return ticks
def template_clustering(file, radius, order, show_dyn = False, show_conn = False, show_clusters = True, ena_conn_weight = False, ccore_flag = True, tolerance = 0.1):
sample = read_sample(file)
syncnet_instance = syncnet(sample, radius, enable_conn_weight = ena_conn_weight, ccore = ccore_flag)
(ticks, analyser) = timedcall(syncnet_instance.process, order, solve_type.FAST, show_dyn)
print("Sample: ", file, "\t\tExecution time: ", ticks)
if show_dyn == True:
sync_visualizer.show_output_dynamic(analyser)
sync_visualizer.animate(analyser)
sync_visualizer.show_local_order_parameter(analyser, syncnet_instance)
#sync_visualizer.animate_output_dynamic(analyser);
#sync_visualizer.animate_correlation_matrix(analyser, colormap = 'hsv')
if show_conn == True:
syncnet_instance.show_network()
if show_clusters == True:
clusters = analyser.allocate_clusters(tolerance)
print("Amount of allocated clusters: ", len(clusters))
draw_clusters(sample, clusters)
def template_clustering(number_clusters, path, number_represent_points=5, compression=0.5, draw=True, ccore_flag=True):
sample = read_sample(path)
cure_instance = cure(sample, number_clusters, number_represent_points, compression, ccore_flag)
(ticks, _) = timedcall(cure_instance.process)
clusters = cure_instance.get_clusters()
representors = cure_instance.get_representors()
means = cure_instance.get_means()
print("Sample: ", path, "\t\tExecution time: ", ticks, "\n")
#print([len(cluster) for cluster in clusters])
if draw is True:
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, sample)
for cluster_index in range(len(clusters)):
visualizer.append_cluster_attribute(0, cluster_index, representors[cluster_index], '*', 10)
visualizer.append_cluster_attribute(0, cluster_index, [ means[cluster_index] ], 'o')
def template_clustering(file, map_size, radius, sync_order = 0.999, show_dyn = False, show_layer1 = False, show_layer2 = False, show_clusters = True):
# Read sample
sample = read_sample(file);
# Create network
network = syncsom(sample, map_size[0], map_size[1], radius);
# Run processing
(ticks, (dyn_time, dyn_phase)) = timedcall(network.process, show_dyn, sync_order);
print("Sample: ", file, "\t\tExecution time: ", ticks, "\n");
# Show dynamic of the last layer.
if (show_dyn == True):
draw_dynamics(dyn_time, dyn_phase, x_title = "Time", y_title = "Phase", y_lim = [0, 3.14]);
if (show_clusters == True):
clusters = network.get_som_clusters();
visualizer = cluster_visualizer();
visualizer.append_clusters(clusters, network.som_layer.weights);
visualizer.show();
# Show network stuff.
if (show_layer1 == True):
network.show_som_layer();
def process_agglomerative(sample):
instance = agglomerative(sample, NUMBER_CLUSTERS)
(ticks, _) = timedcall(instance.process)
return ticks
def template_clustering(file, number_clusters, arg_order = 0.999, arg_collect_dynamic = True, ccore_flag = False):
sample = read_sample(file);
network = hsyncnet(sample, number_clusters, initial_neighbors = int(len(sample) * 0.15), osc_initial_phases = initial_type.EQUIPARTITION, ccore = ccore_flag);
(ticks, analyser) = timedcall(network.process, arg_order, solve_type.FAST, arg_collect_dynamic);
print("Sample: ", file, "\t\tExecution time: ", ticks, "\n");
clusters = analyser.allocate_clusters();
if (arg_collect_dynamic == True):
sync_visualizer.show_output_dynamic(analyser);
draw_clusters(sample, clusters);
def template_segmentation_image(source, map_som_size = [5, 5], radius = 128.0, sync_order = 0.998, show_dyn = False, show_som_map = False):
data = read_image(source);
network = syncsom(data, map_som_size[0], map_som_size[1], 1.0);
(ticks, (dyn_time, dyn_phase)) = timedcall(network.process, show_dyn, sync_order);
print("Sample: ", source, "\t\tExecution time: ", ticks, "\t\tWinners: ", network.som_layer.get_winner_number(), "\n");
if (show_dyn is True):
draw_dynamics(dyn_time, dyn_phase);
clusters = network.get_clusters();
draw_image_mask_segments(source, clusters);
def process_clarans(sample):
instance = clarans(sample, NUMBER_CLUSTERS, 10, 3)
(ticks, _) = timedcall(instance.process)
return ticks
def template_clustering(radius, neighb, path, invisible_axes = False, ccore = True, show = True):
sample = read_sample(path)
dbscan_instance = dbscan(sample, radius, neighb, ccore)
(ticks, _) = timedcall(dbscan_instance.process)
clusters = dbscan_instance.get_clusters()
noise = dbscan_instance.get_noise()
print([len(cluster) for cluster in clusters])
if show:
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, sample)
visualizer.append_cluster(noise, sample, marker = 'x')
visualizer.show()
print("Sample: ", path, "\t\tExecution time: ", ticks, "\n")
return sample, clusters, noise