11 #include "InvertedLists.h"
16 #include "FaissAssert.h"
20 using ScopedIds = InvertedLists::ScopedIds;
21 using ScopedCodes = InvertedLists::ScopedCodes;
28 InvertedLists::InvertedLists (
size_t nlist,
size_t code_size):
29 nlist (nlist), code_size (code_size)
33 InvertedLists::~InvertedLists ()
37 size_t list_no,
size_t offset)
const
40 return get_ids(list_no)[offset];
54 size_t list_no,
size_t offset)
const
63 return add_entries (list_no, 1, &theid, code);
66 void InvertedLists::update_entry (
size_t list_no,
size_t offset,
67 idx_t
id,
const uint8_t *code)
69 update_entries (list_no, offset, 1, &
id, code);
72 void InvertedLists::reset () {
73 for (
size_t i = 0; i <
nlist; i++) {
80 #pragma omp parallel for
81 for (
long i = 0; i <
nlist; i++) {
85 add_entries (i, list_size, ids.get (),
88 std::vector <idx_t> new_ids (list_size);
91 new_ids [j] = ids[j] + add_id;
93 add_entries (i, list_size, new_ids.data(),
104 ArrayInvertedLists::ArrayInvertedLists (
size_t nlist,
size_t code_size):
108 codes.resize (nlist);
111 size_t ArrayInvertedLists::add_entries (
112 size_t list_no,
size_t n_entry,
113 const idx_t* ids_in,
const uint8_t *code)
115 if (n_entry == 0)
return 0;
116 assert (list_no < nlist);
117 size_t o =
ids [list_no].size();
118 ids [list_no].resize (o + n_entry);
119 memcpy (&
ids[list_no][o], ids_in,
sizeof (ids_in[0]) * n_entry);
120 codes [list_no].resize ((o + n_entry) * code_size);
121 memcpy (&codes[list_no][o * code_size], code, code_size * n_entry);
127 assert (list_no < nlist);
128 return ids[list_no].size();
133 assert (list_no < nlist);
134 return codes[list_no].data();
140 assert (list_no < nlist);
141 return ids[list_no].data();
144 void ArrayInvertedLists::resize (
size_t list_no,
size_t new_size)
146 ids[list_no].resize (new_size);
147 codes[list_no].resize (new_size * code_size);
150 void ArrayInvertedLists::update_entries (
151 size_t list_no,
size_t offset,
size_t n_entry,
152 const idx_t *ids_in,
const uint8_t *codes_in)
154 assert (list_no < nlist);
155 assert (n_entry + offset <=
ids[list_no].size());
156 memcpy (&
ids[list_no][offset], ids_in,
sizeof(ids_in[0]) * n_entry);
157 memcpy (&codes[list_no][offset * code_size], codes_in, code_size * n_entry);
161 ArrayInvertedLists::~ArrayInvertedLists ()
172 nil > 0 ? ils_in[0]->code_size : 0)
174 FAISS_THROW_IF_NOT (nil > 0);
175 for (
int i = 0; i < nil; i++) {
176 ils.push_back (ils_in[i]);
177 FAISS_THROW_IF_NOT (ils_in[i]->code_size == code_size &&
178 ils_in[i]->nlist == nlist);
185 for (
int i = 0; i < ils.size(); i++) {
194 uint8_t *codes =
new uint8_t [code_size *
list_size(list_no)], *c = codes;
196 for (
int i = 0; i < ils.size(); i++) {
208 size_t list_no,
size_t offset)
const
210 for (
int i = 0; i < ils.size(); i++) {
215 uint8_t * code =
new uint8_t [
code_size];
216 memcpy (code,
ScopedCodes (il, list_no, offset).
get(), code_size);
221 FAISS_THROW_FMT (
"offset %ld unknown", offset);
231 idx_t *ids =
new idx_t [
list_size(list_no)], *c = ids;
233 for (
int i = 0; i < ils.size(); i++) {
237 memcpy (c,
ScopedIds (il, list_no).
get(), sz *
sizeof(idx_t));
245 size_t list_no,
size_t offset)
const
248 for (
int i = 0; i < ils.size(); i++) {
256 FAISS_THROW_FMT (
"offset %ld unknown", offset);
264 size_t ConcatenatedInvertedLists::add_entries (
266 const idx_t* ,
const uint8_t *)
268 FAISS_THROW_MSG (
"not implemented");
271 void ConcatenatedInvertedLists::update_entries (
size_t,
size_t ,
size_t ,
272 const idx_t *,
const uint8_t *)
274 FAISS_THROW_MSG (
"not implemented");
277 void ConcatenatedInvertedLists::resize (
size_t ,
size_t )
279 FAISS_THROW_MSG (
"not implemented");
const uint8_t * get_codes(size_t list_no) const override
const idx_t * get_ids(size_t list_no) const override
virtual const idx_t * get_ids(size_t list_no) const =0
virtual size_t list_size(size_t list_no) const =0
get the size of a list
void release_codes(const uint8_t *codes) const override
release codes returned by get_codes (default implementation is nop
void release_ids(const idx_t *ids) const override
release ids returned by get_ids
ConcatenatedInvertedLists(int nil, const InvertedLists **ils)
build InvertedLists by concatenating nil of them
void merge_from(InvertedLists *oivf, size_t add_id)
move all entries from oivf (empty on output)
virtual idx_t get_single_id(size_t list_no, size_t offset) const
size_t code_size
code size per vector in bytes
virtual const uint8_t * get_single_code(size_t list_no, size_t offset) const
const idx_t * get_ids(size_t list_no) const override
virtual size_t add_entry(size_t list_no, idx_t theid, const uint8_t *code)
add one entry to an inverted list
long idx_t
all indices are this type
const uint8_t * get_single_code(size_t list_no, size_t offset) const override
idx_t get_single_id(size_t list_no, size_t offset) const override
virtual void prefetch_lists(const long *list_nos, int nlist) const
size_t nlist
number of possible key values
size_t list_size(size_t list_no) const override
get the size of a list
std::vector< std::vector< idx_t > > ids
Inverted lists for indexes.
const uint8_t * get_codes(size_t list_no) const override
virtual void release_codes(const uint8_t *codes) const
release codes returned by get_codes (default implementation is nop
virtual const uint8_t * get_codes(size_t list_no) const =0
size_t list_size(size_t list_no) const override
get the size of a list
virtual void release_ids(const idx_t *ids) const
release ids returned by get_ids