13 #include "Clustering.h"
22 #include "FaissAssert.h"
23 #include "IndexFlat.h"
30 verbose(false), spherical(false),
32 frozen_centroids(false),
33 min_points_per_centroid(39),
34 max_points_per_centroid(256),
48 static double imbalance_factor (
int n,
int k,
long *assign) {
49 std::vector<int> hist(k, 0);
50 for (
int i = 0; i < n; i++)
53 double tot = 0, uf = 0;
55 for (
int i = 0 ; i < k ; i++) {
57 uf += hist[i] * (double) hist[i];
59 uf = uf * k / (tot * tot);
68 FAISS_THROW_IF_NOT_MSG (nx >= k,
69 "need at least as many training points as clusters");
75 for (
size_t i = 0; i < nx *
d; i++) {
76 FAISS_THROW_IF_NOT_MSG (finite (x_in[i]),
77 "input contains NaN's or Inf's");
80 const float *x = x_in;
85 printf(
"Sampling a subset of %ld / %ld for training\n",
86 k * max_points_per_centroid, nx);
87 std::vector<int> perm (nx);
88 rand_perm (perm.data (), nx,
seed);
90 float * x_new =
new float [nx *
d];
91 for (idx_t i = 0; i < nx; i++)
92 memcpy (x_new + i * d, x + perm[i] * d,
sizeof(x_new[0]) *
d);
97 "WARNING clustering %ld points to %ld centroids: "
98 "please provide at least %ld training points\n",
99 nx, k, idx_t(k) * min_points_per_centroid);
104 printf(
"Clustering %d points in %ldD to %ld clusters, "
105 "redo %d times, %d iterations\n",
109 idx_t * assign =
new idx_t[nx];
111 float * dis =
new float[nx];
115 float best_err = 1e50;
116 std::vector<float> best_obj;
117 std::vector<float> best_centroids;
121 FAISS_THROW_IF_NOT_MSG (
123 "size of provided input centroids not a multiple of dimension");
125 size_t n_input_centroids =
centroids.size() /
d;
127 if (verbose && n_input_centroids > 0) {
128 printf (
" Using %zd centroids provided as input (%sfrozen)\n",
132 double t_search_tot = 0;
134 printf(
" Preprocessing in %.2f s\n",
139 for (
int redo = 0; redo <
nredo; redo++) {
141 if (verbose && nredo > 1) {
142 printf(
"Outer iteration %d / %d\n", redo, nredo);
148 std::vector<int> perm (nx);
150 rand_perm (perm.data(), nx,
seed + 1 + redo * 15486557L);
151 for (
int i = n_input_centroids; i <
k ; i++)
152 memcpy (&
centroids[i * d], x + perm[i] * d,
161 FAISS_THROW_IF_NOT (index.
ntotal == 0);
164 for (
int i = 0; i <
niter; i++) {
166 index.
search (nx, x, 1, dis, assign);
170 for (
int j = 0; j < nx; j++)
179 printf (
" Iteration %d (%.2f s, search %.2f s): "
180 "objective=%g imbalance=%.3f nsplit=%d \r",
183 err, imbalance_factor (nx, k, assign),
195 assert (index.
ntotal == 0);
198 if (verbose) printf(
"\n");
200 if (err < best_err) {
202 printf (
"Objective improved: keep new clusters\n");
222 clus.verbose = d * n * k > (1L << 30);
225 clus.
train (n, x, index);
226 memcpy(centroids, clus.
centroids.data(),
sizeof(*centroids) * d * k);
227 return clus.
obj.back();
int km_update_centroids(const float *x, float *centroids, long *assign, size_t d, size_t k, size_t n, size_t k_frozen)
int niter
clustering iterations
int nredo
redo clustering this many times and keep best
ClusteringParameters()
sets reasonable defaults
virtual void reset()=0
removes all elements from the database.
virtual void train(idx_t, const float *)
Clustering(int d, int k)
the only mandatory parameters are k and d
int seed
seed for the random number generator
bool frozen_centroids
use the centroids provided as input and do not change them during iterations
int min_points_per_centroid
otherwise you get a warning
virtual void add(idx_t n, const float *x)=0
float kmeans_clustering(size_t d, size_t n, size_t k, const float *x, float *centroids)
idx_t ntotal
total nb of indexed vectors
double getmillisecs()
ms elapsed since some arbitrary epoch
std::vector< float > centroids
centroids (k * d)
size_t d
dimension of the vectors
virtual void search(idx_t n, const float *x, idx_t k, float *distances, idx_t *labels) const =0
bool update_index
update index after each iteration?
virtual void train(idx_t n, const float *x, faiss::Index &index)
Index is used during the assignment stage.
bool is_trained
set if the Index does not require training, or if training is done already
bool spherical
do we want normalized centroids?
int max_points_per_centroid
to limit size of dataset