Struct faiss::InvertedLists

struct InvertedLists

Table of inverted lists multithreading rules:

  • concurrent read accesses are allowed

  • concurrent update accesses are allowed

  • for resize and add_entries, only concurrent access to different lists are allowed

Subclassed by faiss::ArrayInvertedLists, faiss::BlockInvertedLists, faiss::OnDiskInvertedLists, faiss::ReadOnlyInvertedLists

Public Types

enum subset_type_t

Values:

enumerator SUBSET_TYPE_ID_RANGE
enumerator SUBSET_TYPE_ID_MOD
enumerator SUBSET_TYPE_ELEMENT_RANGE
enumerator SUBSET_TYPE_INVLIST_FRACTION
enumerator SUBSET_TYPE_INVLIST

Public Functions

InvertedLists(size_t nlist, size_t code_size)
virtual ~InvertedLists()
virtual size_t list_size(size_t list_no) const = 0

get the size of a list

virtual const uint8_t *get_codes(size_t list_no) const = 0

get the codes for an inverted list must be released by release_codes

Returns:

codes size list_size * code_size

virtual const idx_t *get_ids(size_t list_no) const = 0

get the ids for an inverted list must be released by release_ids

Returns:

ids size list_size

virtual void release_codes(size_t list_no, const uint8_t *codes) const

release codes returned by get_codes (default implementation is nop

virtual void release_ids(size_t list_no, const idx_t *ids) const

release ids returned by get_ids

virtual idx_t get_single_id(size_t list_no, size_t offset) const
Returns:

a single id in an inverted list

virtual const uint8_t *get_single_code(size_t list_no, size_t offset) const
Returns:

a single code in an inverted list (should be deallocated with release_codes)

virtual void prefetch_lists(const idx_t *list_nos, int nlist) const

prepare the following lists (default does nothing) a list can be -1 hence the signed long

virtual bool is_empty(size_t list_no, void *inverted_list_context = nullptr) const

check if the list is empty

virtual InvertedListsIterator *get_iterator(size_t list_no, void *inverted_list_context = nullptr) const

get iterable for lists that use_iterator

virtual size_t add_entry(size_t list_no, idx_t theid, const uint8_t *code, void *inverted_list_context = nullptr)

add one entry to an inverted list

virtual size_t add_entries(size_t list_no, size_t n_entry, const idx_t *ids, const uint8_t *code) = 0
virtual void update_entry(size_t list_no, size_t offset, idx_t id, const uint8_t *code)
virtual void update_entries(size_t list_no, size_t offset, size_t n_entry, const idx_t *ids, const uint8_t *code) = 0
virtual void resize(size_t list_no, size_t new_size) = 0
virtual void reset()
void merge_from(InvertedLists *oivf, size_t add_id)

move all entries from oivf (empty on output)

size_t copy_subset_to(InvertedLists &other, subset_type_t subset_type, idx_t a1, idx_t a2) const

copy a subset of the entries index to the other index

Returns:

number of entries copied

double imbalance_factor() const

1= perfectly balanced, >1: imbalanced

void print_stats() const

display some stats about the inverted lists

size_t compute_ntotal() const

sum up list sizes

Public Members

size_t nlist

number of possible key values

size_t code_size

code size per vector in bytes

bool use_iterator = false

request to use iterator rather than get_codes / get_ids

Public Static Attributes

static const size_t INVALID_CODE_SIZE = static_cast<size_t>(-1)

used for BlockInvertedLists, where the codes are packed into groups and the individual code size is meaningless

struct ScopedCodes

Public Functions

inline ScopedCodes(const InvertedLists *il, size_t list_no)
inline ScopedCodes(const InvertedLists *il, size_t list_no, size_t offset)
inline const uint8_t *get()
inline ~ScopedCodes()

Public Members

const InvertedLists *il
const uint8_t *codes
size_t list_no
struct ScopedIds

Public Functions

inline ScopedIds(const InvertedLists *il, size_t list_no)
inline const idx_t *get()
inline idx_t operator[](size_t i) const
inline ~ScopedIds()

Public Members

const InvertedLists *il
const idx_t *ids
size_t list_no