Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def antmean_clustering_process(params, count_clusters, samples):
""" """
ccore = load_core()
algorithm_params = c_antcolony_clustering_parameters()
algorithm_params.ro = c_double(params.ro)
algorithm_params.pheramone_init = c_double(params.pheramone_init)
algorithm_params.iterations = c_uint(params.iterations)
algorithm_params.count_ants = c_uint(params.count_ants)
algorithm_params = pointer(algorithm_params)
sample_package = package_builder(samples, c_double).create()
ccore.antmean_algorithm.restype = POINTER(pyclustering_package)
package = ccore.antmean_algorithm(sample_package, algorithm_params, count_clusters)
result = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
return result
def silhoeutte(sample, clusters, pointer_metric, data_type):
pointer_data = package_builder(sample, c_double).create()
pointer_clusters = package_builder(clusters, c_size_t).create()
c_data_type = convert_data_type(data_type)
ccore = ccore_library.get()
ccore.silhouette_algorithm.restype = POINTER(pyclustering_package)
package = ccore.silhouette_algorithm(pointer_data, pointer_clusters, pointer_metric, c_data_type)
result = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
return result
def pack_pattern(pattern):
return package_builder(pattern, c_int).create();
def elbow(sample, kmin, kmax, initializer):
pointer_data = package_builder(sample, c_double).create()
ccore = ccore_library.get()
if initializer == elbow_center_initializer.KMEANS_PLUS_PLUS:
ccore.elbow_method_ikpp.restype = POINTER(pyclustering_package)
package = ccore.elbow_method_ikpp(pointer_data, c_size_t(kmin), c_size_t(kmax))
elif initializer == elbow_center_initializer.RANDOM:
ccore.elbow_method_irnd.restype = POINTER(pyclustering_package)
package = ccore.elbow_method_irnd(pointer_data, c_size_t(kmin), c_size_t(kmax))
else:
raise ValueError("Not supported type of center initializer '" + str(initializer) + "'.")
results = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
return (results[elbow_package_indexer.ELBOW_PACKAGE_INDEX_AMOUNT][0],
results[elbow_package_indexer.ELBOW_PACKAGE_INDEX_WCE])
def xmeans(sample, centers, kmax, tolerance, criterion, repeat):
pointer_data = package_builder(sample, c_double).create()
pointer_centers = package_builder(centers, c_double).create()
ccore = ccore_library.get()
ccore.xmeans_algorithm.restype = POINTER(pyclustering_package)
package = ccore.xmeans_algorithm(pointer_data, pointer_centers, c_size_t(kmax), c_double(tolerance), c_uint(criterion), c_size_t(repeat))
result = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
return result
def __call__(self, point1, point2):
point_package1 = package_builder(point1, c_double).create()
point_package2 = package_builder(point2, c_double).create()
ccore = ccore_library.get()
ccore.metric_calculate.restype = c_double
return ccore.metric_calculate(self.__pointer, point_package1, point_package2)
def bsas(sample, amount, threshold, metric_pointer):
pointer_data = package_builder(sample, c_double).create()
ccore = ccore_library.get()
ccore.bsas_algorithm.restype = POINTER(pyclustering_package)
package = ccore.bsas_algorithm(pointer_data, c_size_t(amount), c_double(threshold), metric_pointer)
result = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
return result[0], result[1]
def hsyncnet_create_network(sample, number_clusters, initial_phases, initial_neighbors, increase_persent):
data_package = package_builder(sample, c_double).create();
ccore = ccore_library.get();
ccore.hsyncnet_create_network.restype = POINTER(c_void_p);
pointer_network = ccore.hsyncnet_create_network(data_package, c_uint(number_clusters), c_uint(initial_phases), c_uint(initial_neighbors), c_double(increase_persent));
return pointer_network;
def dbscan(sample, eps, min_neighbors, data_type):
pointer_data = package_builder(sample, c_double).create()
c_data_type = convert_data_type(data_type)
ccore = ccore_library.get()
ccore.dbscan_algorithm.restype = POINTER(pyclustering_package)
package = ccore.dbscan_algorithm(pointer_data, c_double(eps), c_size_t(min_neighbors), c_data_type)
list_of_clusters = package_extractor(package).extract()
ccore.free_pyclustering_package(package)
noise = list_of_clusters[len(list_of_clusters) - 1]
list_of_clusters.remove(noise)
return list_of_clusters, noise
def som_train(som_pointer, data, epochs, autostop):
"""!
@brief Trains self-organized feature map (SOM) using CCORE pyclustering library.
@param[in] data (list): Input data - list of points where each point is represented by list of features, for example coordinates.
@param[in] epochs (uint): Number of epochs for training.
@param[in] autostop (bool): Automatic termination of learining process when adaptation is not occurred.
@return (uint) Number of learining iterations.
"""
pointer_data = package_builder(data, c_double).create()
ccore = ccore_library.get()
ccore.som_train.restype = c_size_t
return ccore.som_train(som_pointer, pointer_data, c_uint(epochs), autostop)