Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def templateClustering(self, file, radius, order, solver, initial, storage_flag, conn_weigh_flag, tolerance, connection, expected_cluster_length, ccore_flag):
result_testing = False;
# If phases crosses each other because of random part of the network then we should try again.
for attempt in range(0, 3, 1):
sample = read_sample(file);
network = syncnet(sample, radius, connection, initial, conn_weigh_flag, ccore_flag);
network.process(order, solver, storage_flag);
clusters = network.get_clusters(tolerance);
obtained_cluster_sizes = [len(cluster) for cluster in clusters];
if (len(obtained_cluster_sizes) != len(expected_cluster_length)):
continue;
obtained_cluster_sizes.sort();
expected_cluster_length.sort();
if (obtained_cluster_sizes != expected_cluster_length):
continue;
def templateClusteringResults(self, path, radius, neighbors, expected_length_clusters, ccore):
sample = read_sample(path);
optics_instance = optics(sample, radius, neighbors);
optics_instance.process();
clusters = optics_instance.get_clusters();
noise = optics_instance.get_noise();
assert sum([len(cluster) for cluster in clusters]) + len(noise) == len(sample);
assert sum([len(cluster) for cluster in clusters]) == sum(expected_length_clusters);
assert sorted([len(cluster) for cluster in clusters]) == expected_length_clusters;
def templateLengthProcessData(self, path_to_file, radius, cluster_numbers, threshold, expected_cluster_length, ccore = False):
sample = read_sample(path_to_file);
rock_instance = rock(sample, radius, cluster_numbers, threshold, ccore);
rock_instance.process();
clusters = rock_instance.get_clusters();
length = sum([len(cluster) for cluster in clusters]);
assert len(sample) == length;
obtained_cluster_sizes = [len(cluster) for cluster in clusters];
obtained_cluster_sizes.sort();
expected_cluster_length.sort();
assert obtained_cluster_sizes == expected_cluster_length;
def templateClusteringResults(self, path, number_clusters, expected_length_clusters, ccore = False):
sample = read_sample(path);
hierarchical_instance = hierarchical(sample, number_clusters, ccore);
hierarchical_instance.process();
clusters = hierarchical_instance.get_clusters();
assert sum([len(cluster) for cluster in clusters]) == len(sample);
assert sum([len(cluster) for cluster in clusters]) == sum(expected_length_clusters);
assert sorted([len(cluster) for cluster in clusters]) == expected_length_clusters;
def templateLengthProcessData(self, path_to_file, start_centers, expected_cluster_length, type_splitting, ccore = False):
sample = read_sample(path_to_file);
#clusters = xmeans(sample, start_centers, 20, ccore);
xmeans_instance = xmeans(sample, start_centers, 20, 0.025, type_splitting, ccore);
xmeans_instance.process();
clusters = xmeans_instance.get_clusters();
obtained_cluster_sizes = [len(cluster) for cluster in clusters];
assert len(sample) == sum(obtained_cluster_sizes);
obtained_cluster_sizes.sort();
expected_cluster_length.sort();
assert obtained_cluster_sizes == expected_cluster_length;
def templateLengthProcessData(self, file, som_map_size, avg_num_conn, eps, expected_cluster_length):
result_testing = False;
# If phases crosses each other because of random part of the network then we should try again.
for attempt in range(0, 3, 1):
sample = read_sample(file);
network = syncsom(sample, som_map_size[0], som_map_size[1]);
network.process(avg_num_conn, collect_dynamic = False, order = eps);
clusters = network.get_clusters();
obtained_cluster_sizes = [len(cluster) for cluster in clusters];
if (len(sample) != sum(obtained_cluster_sizes)):
continue;
obtained_cluster_sizes.sort();
expected_cluster_length.sort();
#print(obtained_cluster_sizes, expected_cluster_length);
if (obtained_cluster_sizes != expected_cluster_length):
continue;
# Unit-test is passed
def templateLengthProcessData(self, path_to_file, radius, min_number_neighbors, max_number_neighbors, ccore = False):
for number_neighbors in range(min_number_neighbors, max_number_neighbors, 1):
sample = read_sample(path_to_file);
dbscan_instance = dbscan(sample, radius, min_number_neighbors, ccore);
dbscan_instance.process();
clusters = dbscan_instance.get_clusters();
noise = dbscan_instance.get_noise();
length = len(noise);
length += sum([len(cluster) for cluster in clusters]);
assert len(sample) == length;