1
0
Fork 0

sparse indexes, initial commit, untested

This commit is contained in:
Jan Steemann 2015-02-07 03:07:41 +01:00
parent acc354839d
commit 003d7e9112
26 changed files with 380 additions and 303 deletions

View File

@ -1248,8 +1248,15 @@ std::vector<EnumerateCollectionNode::IndexMatch>
std::vector<IndexMatch> out;
auto&& indexes = _collection->getIndexes();
for (auto idx : indexes) {
if (idx->sparse) {
// sparse indexes cannot be used for replacing an EnumerateCollection node
continue;
}
IndexMatch match = CompareIndex(this, idx, attrs);
if (match.index != nullptr) {
out.push_back(match);
}

View File

@ -56,6 +56,7 @@ namespace triagens {
: id(idx->_iid),
type(idx->_type),
unique(idx->_unique),
sparse(idx->_sparse),
fields(),
internals(idx) {
@ -74,6 +75,7 @@ namespace triagens {
: id(triagens::basics::StringUtils::uint64(triagens::basics::JsonHelper::checkAndGetStringValue(json, "id"))),
type(TRI_TypeIndex(triagens::basics::JsonHelper::checkAndGetStringValue(json, "type").c_str())),
unique(triagens::basics::JsonHelper::checkAndGetBooleanValue(json, "unique")),
sparse(triagens::basics::JsonHelper::getBooleanValue(json, "sparse", false)),
fields(),
internals(nullptr) {
@ -103,7 +105,8 @@ namespace triagens {
json("type", triagens::basics::Json(TRI_TypeNameIndex(type)))
("id", triagens::basics::Json(triagens::basics::StringUtils::itoa(id)))
("unique", triagens::basics::Json(unique));
("unique", triagens::basics::Json(unique))
("sparse", triagens::basics::Json(sparse));
if (hasSelectivityEstimate()) {
json("selectivityEstimate", triagens::basics::Json(selectivityEstimate()));
@ -154,10 +157,13 @@ namespace triagens {
// --SECTION-- public variables
// -----------------------------------------------------------------------------
TRI_idx_iid_t const id;
TRI_idx_type_e const type;
bool const unique;
std::vector<std::string> fields;
public:
TRI_idx_iid_t const id;
TRI_idx_type_e const type;
bool const unique;
bool const sparse;
std::vector<std::string> fields;
private:

View File

@ -1817,8 +1817,8 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
}
}
else if (idx->type == TRI_IDX_TYPE_HASH_INDEX) {
//each valid orCondition should match every field of the given index
for (size_t k = 0; k < validPos.size() && !indexOrCondition.empty(); k++) {
// each valid orCondition should match every field of the given index
for (size_t k = 0; k < validPos.size() && ! indexOrCondition.empty(); k++) {
auto const map = _rangeInfoMapVec->find(var->name, validPos[k]);
for (size_t j = 0; j < idx->fields.size(); j++) {
auto range = map->find(idx->fields[j]);
@ -1828,6 +1828,28 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
break;
}
else {
if (idx->sparse) {
bool mustClear = false;
auto const& rib = range->second;
if (rib.isConstant()) {
auto const& value = rib._lowConst.bound();
if (value.isEmpty() || value.isNull()) {
mustClear = true;
}
}
else {
// non-constant range
mustClear = true;
}
if (mustClear) {
indexOrCondition.clear(); // not usable
std::cout << "CANNOT USE SPARSE HASH INDEX\n";
break;
}
}
indexOrCondition.at(k).push_back(range->second);
}
}
@ -1893,6 +1915,34 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
equality = equality && range->second.is1ValueRangeInfo();
}
// check if index is sparse and exclude it if required
if (idx->sparse) {
bool mustClear = false;
auto const& rib = range->second;
if (rib.isConstant()) {
if (rib._lowConst.isDefined()) {
if (rib._lowConst.inclusive() &&
rib._lowConst.bound().isNull()) {
mustClear = true;
}
}
else {
mustClear = true;
}
}
else {
// non-constant range
mustClear = true;
}
if (mustClear) {
indexOrCondition.clear();
handled = true;
}
}
if (handled) {
// exit the for loop, too. otherwise it will crash because
// indexOrCondition is empty now
@ -1903,6 +1953,7 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
// check if there are all positions are non-empty
bool isEmpty = indexOrCondition.empty();
if (! isEmpty) {
for (size_t k = 0; k < validPos.size(); k++) {
if (indexOrCondition.at(k).empty()) {

View File

@ -478,8 +478,7 @@ TRI_index_t* TRI_CreateGeo1Index (TRI_document_collection_t* document,
TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO1_INDEX, document, unique, false);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO1_INDEX, document, false, unique);
idx->_ignoreNull = ignoreNull;
idx->memory = MemoryGeoIndex;
@ -528,7 +527,7 @@ TRI_index_t* TRI_CreateGeo2Index (TRI_document_collection_t* document,
TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO2_INDEX, document, unique, false);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_GEO2_INDEX, document, false, unique);
idx->_ignoreNull = ignoreNull;
idx->memory = MemoryGeoIndex;

View File

@ -422,7 +422,7 @@ int TRI_InsertKeyHashArray (TRI_hash_array_t* array,
bool found = (arrayElement->_document != nullptr);
if (found) {
return TRI_RESULT_KEY_EXISTS;
return TRI_ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED;
}
*arrayElement = *element;

View File

@ -84,24 +84,6 @@ static int FillIndexSearchValueByHashIndexElement (TRI_hash_index_t* hashIndex,
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief creates space for sub-objects in the hash index element
////////////////////////////////////////////////////////////////////////////////
template<typename T>
static int AllocateSubObjectsHashIndexElement (TRI_hash_index_t const* idx,
T* element) {
TRI_ASSERT_EXPENSIVE(element->_subObjects == nullptr);
element->_subObjects = static_cast<TRI_shaped_sub_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, KeyEntrySize(idx), false));
if (element->_subObjects == nullptr) {
return TRI_ERROR_OUT_OF_MEMORY;
}
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief frees space for sub-objects in the hash index element
////////////////////////////////////////////////////////////////////////////////
@ -128,9 +110,7 @@ static int HashIndexHelper (TRI_hash_index_t const* hashIndex,
T* hashElement,
TRI_doc_mptr_t const* document) {
TRI_shaper_t* shaper; // underlying shaper
TRI_shaped_json_t shapedObject; // the sub-object
TRI_shaped_json_t shapedJson; // the object behind document
TRI_shaped_sub_t shapedSub; // the relative sub-object
shaper = hashIndex->base._collection->getShaper(); // ONLY IN INDEX, PROTECTED by RUNTIME
@ -151,6 +131,7 @@ static int HashIndexHelper (TRI_hash_index_t const* hashIndex,
int res = TRI_ERROR_NO_ERROR;
size_t const n = NumPaths(hashIndex);
for (size_t j = 0; j < n; ++j) {
TRI_shape_pid_t path = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths, j)));
@ -159,31 +140,27 @@ static int HashIndexHelper (TRI_hash_index_t const* hashIndex,
// field not part of the object
if (acc == nullptr || acc->_resultSid == TRI_SHAPE_ILLEGAL) {
shapedSub._sid = TRI_LookupBasicSidShaper(TRI_SHAPE_NULL);
shapedSub._length = 0;
shapedSub._offset = 0;
hashElement->_subObjects[j]._sid = TRI_LookupBasicSidShaper(TRI_SHAPE_NULL);
hashElement->_subObjects[j]._length = 0;
hashElement->_subObjects[j]._offset = 0;
res = TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
continue;
}
// extract the field
else {
if (! TRI_ExecuteShapeAccessor(acc, &shapedJson, &shapedObject)) {
// hashElement->fields: memory deallocated in the calling procedure
return TRI_ERROR_INTERNAL;
}
if (shapedObject._sid == TRI_LookupBasicSidShaper(TRI_SHAPE_NULL)) {
res = TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
}
shapedSub._sid = shapedObject._sid;
shapedSub._length = shapedObject._data.length;
shapedSub._offset = static_cast<uint32_t>(((char const*) shapedObject._data.data) - ptr);
TRI_shaped_json_t shapedObject;
if (! TRI_ExecuteShapeAccessor(acc, &shapedJson, &shapedObject)) {
return TRI_ERROR_INTERNAL;
}
// store the json shaped sub-object -- this is what will be hashed
hashElement->_subObjects[j] = shapedSub;
if (shapedObject._sid == TRI_LookupBasicSidShaper(TRI_SHAPE_NULL)) {
res = TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
}
hashElement->_subObjects[j]._sid = shapedObject._sid;
hashElement->_subObjects[j]._length = shapedObject._data.length;
hashElement->_subObjects[j]._offset = static_cast<uint32_t>(((char const*) shapedObject._data.data) - ptr);
}
return res;
@ -202,16 +179,14 @@ static int HashIndexHelperAllocate (TRI_hash_index_t const* hashIndex,
// will be used for hashing. Fill the json field list from the document.
// .............................................................................
hashElement->_subObjects = nullptr;
int res = AllocateSubObjectsHashIndexElement<T>(hashIndex, hashElement);
hashElement->_subObjects = static_cast<TRI_shaped_sub_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, KeyEntrySize(hashIndex), false));
if (res != TRI_ERROR_NO_ERROR) {
// out of memory
return res;
if (hashElement->_subObjects == nullptr) {
return TRI_ERROR_OUT_OF_MEMORY;
}
res = HashIndexHelper<T>(hashIndex, hashElement, document);
int res = HashIndexHelper<T>(hashIndex, hashElement, document);
// .............................................................................
// It may happen that the document does not have the necessary attributes to
// have particpated within the hash index. If the index is unique, we do not
@ -219,7 +194,9 @@ static int HashIndexHelperAllocate (TRI_hash_index_t const* hashIndex,
// the index is not unique, we ignore this error.
// .............................................................................
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING && ! hashIndex->base._unique) {
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING &&
! hashIndex->base._unique &&
! hashIndex->base._sparse) {
res = TRI_ERROR_NO_ERROR;
}
@ -262,10 +239,6 @@ static int HashIndex_insert (TRI_hash_index_t* hashIndex,
TRI_Free(TRI_UNKNOWN_MEM_ZONE, key._values);
}
if (res == TRI_RESULT_KEY_EXISTS) {
return TRI_set_errno(TRI_ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED);
}
return res;
}
@ -426,10 +399,9 @@ size_t MemoryHashIndex (TRI_index_t const* idx) {
return static_cast<size_t>(KeyEntrySize(hashIndex) * hashIndex->_hashArray._nrUsed +
TRI_MemoryUsageHashArray(&hashIndex->_hashArray));
}
else {
return static_cast<size_t>(KeyEntrySize(hashIndex) * hashIndex->_hashArrayMulti._nrUsed +
TRI_MemoryUsageHashArrayMulti(&hashIndex->_hashArrayMulti));
}
return static_cast<size_t>(KeyEntrySize(hashIndex) * hashIndex->_hashArrayMulti._nrUsed +
TRI_MemoryUsageHashArrayMulti(&hashIndex->_hashArrayMulti));
}
////////////////////////////////////////////////////////////////////////////////
@ -444,14 +416,22 @@ static TRI_json_t* JsonHashIndex (TRI_index_t const* idx) {
TRI_hash_index_t const* hashIndex = (TRI_hash_index_t const*) idx;
TRI_document_collection_t* document = idx->_collection;
// .............................................................................
// ..........................................................................
// Allocate sufficent memory for the field list
// .............................................................................
// ..........................................................................
char const** fieldList = TRI_FieldListByPathList(document->getShaper(), &hashIndex->_paths); // ONLY IN INDEX, PROTECTED by RUNTIME
char const** fieldList = static_cast<char const**>(TRI_Allocate(TRI_CORE_MEM_ZONE, (sizeof(char*) * hashIndex->_paths._length) , false));
if (fieldList == nullptr) {
return nullptr;
for (size_t j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*) TRI_AtVector(&hashIndex->_paths, j));
TRI_shape_path_t const* path = document->getShaper()->lookupAttributePathByPid(document->getShaper(), shape); // ONLY IN INDEX, PROTECTED by RUNTIME
if (path == nullptr) {
TRI_Free(TRI_CORE_MEM_ZONE, (void*) fieldList);
return nullptr;
}
fieldList[j] = ((const char*) path) + sizeof(TRI_shape_path_t) + path->_aidLength * sizeof(TRI_shape_aid_t);
}
// ..........................................................................
@ -619,6 +599,7 @@ TRI_index_t* TRI_CreateHashIndex (TRI_document_collection_t* document,
TRI_idx_iid_t iid,
TRI_vector_pointer_t* fields,
TRI_vector_t* paths,
bool sparse,
bool unique) {
// ...........................................................................
// Initialize the index and the callback functions
@ -627,7 +608,7 @@ TRI_index_t* TRI_CreateHashIndex (TRI_document_collection_t* document,
TRI_hash_index_t* hashIndex = static_cast<TRI_hash_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_hash_index_t), false));
TRI_index_t* idx = &hashIndex->base;
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_HASH_INDEX, document, unique, false);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_HASH_INDEX, document, sparse, unique);
idx->_hasSelectivityEstimate = true;
idx->selectivityEstimate = SelectivityEstimateHashIndex;
@ -672,9 +653,8 @@ TRI_index_t* TRI_CreateHashIndex (TRI_document_collection_t* document,
////////////////////////////////////////////////////////////////////////////////
void TRI_DestroyHashIndex (TRI_index_t* idx) {
TRI_hash_index_t* hashIndex;
TRI_hash_index_t* hashIndex = (TRI_hash_index_t*) idx;
hashIndex = (TRI_hash_index_t*) idx;
if (hashIndex->base._unique) {
TRI_DestroyHashArray(&hashIndex->_hashArray);
}

View File

@ -105,6 +105,7 @@ TRI_index_t* TRI_CreateHashIndex (struct TRI_document_collection_t*,
TRI_idx_iid_t,
TRI_vector_pointer_t*,
TRI_vector_t*,
bool,
bool);
////////////////////////////////////////////////////////////////////////////////

View File

@ -178,6 +178,7 @@ static int ProcessIndexFields (v8::Isolate* isolate,
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process the geojson flag and add it to the json
////////////////////////////////////////////////////////////////////////////////
@ -192,6 +193,20 @@ static int ProcessIndexGeoJsonFlag (v8::Isolate* isolate,
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process the sparse flag and add it to the json
////////////////////////////////////////////////////////////////////////////////
static int ProcessIndexSparseFlag (v8::Isolate* isolate,
v8::Handle<v8::Object> const obj,
TRI_json_t* json) {
v8::HandleScope scope(isolate);
bool sparse = ExtractBoolFlag(isolate, obj, TRI_V8_ASCII_STRING("sparse"), false);
TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, json, "sparse", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, sparse));
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process the unique flag and add it to the json
////////////////////////////////////////////////////////////////////////////////
@ -262,6 +277,7 @@ static int EnhanceJsonIndexHash (v8::Isolate* isolate,
TRI_json_t* json,
bool create) {
int res = ProcessIndexFields(isolate, obj, json, 0, create);
ProcessIndexSparseFlag(isolate, obj, json);
ProcessIndexUniqueFlag(isolate, obj, json);
return res;
}
@ -275,6 +291,7 @@ static int EnhanceJsonIndexSkiplist (v8::Isolate* isolate,
TRI_json_t* json,
bool create) {
int res = ProcessIndexFields(isolate, obj, json, 0, create);
ProcessIndexSparseFlag(isolate, obj, json);
ProcessIndexUniqueFlag(isolate, obj, json);
return res;
}
@ -514,7 +531,6 @@ static void EnsureIndexLocal (const v8::FunctionCallbackInfo<v8::Value>& args,
v8::Isolate* isolate = args.GetIsolate();
v8::HandleScope scope(isolate);
TRI_ASSERT(collection != nullptr);
TRI_ASSERT(json != nullptr);
@ -526,12 +542,19 @@ static void EnsureIndexLocal (const v8::FunctionCallbackInfo<v8::Value>& args,
TRI_idx_type_e type = TRI_TypeIndex(value->_value._string.data);
// extract unique
// extract unique flag
bool unique = false;
value = TRI_LookupObjectJson(json, "unique");
if (TRI_IsBooleanJson(value)) {
unique = value->_value._boolean;
}
// extract sparse flag
bool sparse = false;
value = TRI_LookupObjectJson(json, "sparse");
if (TRI_IsBooleanJson(value)) {
sparse = value->_value._boolean;
}
TRI_vector_pointer_t attributes;
TRI_InitVectorPointer(&attributes, TRI_CORE_MEM_ZONE);
@ -689,12 +712,14 @@ static void EnsureIndexLocal (const v8::FunctionCallbackInfo<v8::Value>& args,
idx = TRI_EnsureHashIndexDocumentCollection(document,
iid,
&attributes,
sparse,
unique,
&created);
}
else {
idx = TRI_LookupHashIndexDocumentCollection(document,
&attributes,
sparse,
unique);
}
@ -714,12 +739,14 @@ static void EnsureIndexLocal (const v8::FunctionCallbackInfo<v8::Value>& args,
idx = TRI_EnsureSkiplistIndexDocumentCollection(document,
iid,
&attributes,
sparse,
unique,
&created);
}
else {
idx = TRI_LookupSkiplistIndexDocumentCollection(document,
&attributes,
sparse,
unique);
}
break;
@ -851,7 +878,6 @@ static void EnsureIndex (const v8::FunctionCallbackInfo<v8::Value>& args,
TRI_json_t* json = nullptr;
int res = EnhanceIndexJson(args, json, create);
if (res == TRI_ERROR_NO_ERROR &&
ServerState::instance()->isCoordinator()) {
string const dbname(collection->_dbName);

View File

@ -3055,6 +3055,7 @@ static int FillIndex (TRI_document_collection_t* document,
static TRI_index_t* LookupPathIndexDocumentCollection (TRI_document_collection_t* collection,
TRI_vector_t const* paths,
TRI_idx_type_e type,
bool sparse,
bool unique,
bool allowAnyAttributeOrder) {
TRI_vector_t* indexPaths = nullptr;
@ -3067,10 +3068,12 @@ static TRI_index_t* LookupPathIndexDocumentCollection (TRI_document_collection_t
TRI_index_t* idx = static_cast<TRI_index_t*>(collection->_allIndexes._buffer[j]);
// .........................................................................
// check if the type of the index matches
// check if the type, uniqueness and sparsity of the indexes match
// .........................................................................
if (idx->_type != type || idx->_unique != unique) {
if (idx->_type != type ||
idx->_unique != unique ||
idx->_sparse != sparse) {
continue;
}
@ -3172,11 +3175,11 @@ static int PathBasedIndexFromJson (TRI_document_collection_t* document,
TRI_vector_pointer_t const*,
TRI_idx_iid_t,
bool,
bool,
bool*),
TRI_index_t** dst) {
TRI_json_t* bv;
TRI_vector_pointer_t attributes;
bool unique;
size_t fieldCount;
if (dst != nullptr) {
@ -3192,12 +3195,12 @@ static int PathBasedIndexFromJson (TRI_document_collection_t* document,
// extract the list of fields
if (fieldCount < 1) {
LOG_ERROR("ignoring index %llu, need at least one attribute path",(unsigned long long) iid);
LOG_ERROR("ignoring index %llu, need at least one attribute path", (unsigned long long) iid);
return TRI_set_errno(TRI_ERROR_BAD_PARAMETER);
}
// determine if the hash index is unique or non-unique
// determine if the index is unique or non-unique
bv = TRI_LookupObjectJson(definition, "unique");
if (! TRI_IsBooleanJson(bv)) {
@ -3205,7 +3208,16 @@ static int PathBasedIndexFromJson (TRI_document_collection_t* document,
return TRI_set_errno(TRI_ERROR_BAD_PARAMETER);
}
unique = bv->_value._boolean;
bool unique = bv->_value._boolean;
// determine sparsity
bool sparse = false;
bv = TRI_LookupObjectJson(definition, "sparse");
if (TRI_IsBooleanJson(bv)) {
sparse = bv->_value._boolean;
}
// Initialise the vector in which we store the fields on which the hashing
// will be based.
@ -3219,7 +3231,7 @@ static int PathBasedIndexFromJson (TRI_document_collection_t* document,
}
// create the index
TRI_index_t* idx = creator(document, &attributes, iid, unique, nullptr);
TRI_index_t* idx = creator(document, &attributes, iid, sparse, unique, nullptr);
if (dst != nullptr) {
*dst = idx;
@ -4118,6 +4130,7 @@ TRI_index_t* TRI_EnsureGeoIndex2DocumentCollection (TRI_document_collection_t* d
static TRI_index_t* CreateHashIndexDocumentCollection (TRI_document_collection_t* document,
TRI_vector_pointer_t const* attributes,
TRI_idx_iid_t iid,
bool sparse,
bool unique,
bool* created) {
TRI_vector_pointer_t fields;
@ -4145,7 +4158,7 @@ static TRI_index_t* CreateHashIndexDocumentCollection (TRI_document_collection_t
// a new one.
// ...........................................................................
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_HASH_INDEX, unique, false);
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_HASH_INDEX, sparse, unique, false);
if (idx != nullptr) {
TRI_DestroyVector(&paths);
@ -4165,6 +4178,7 @@ static TRI_index_t* CreateHashIndexDocumentCollection (TRI_document_collection_t
iid,
&fields,
&paths,
sparse,
unique);
if (idx == nullptr) {
@ -4183,6 +4197,7 @@ static TRI_index_t* CreateHashIndexDocumentCollection (TRI_document_collection_t
if (res != TRI_ERROR_NO_ERROR) {
TRI_FreeHashIndex(idx);
TRI_set_errno(res);
return nullptr;
}
@ -4225,6 +4240,7 @@ static int HashIndexFromJson (TRI_document_collection_t* document,
TRI_index_t* TRI_LookupHashIndexDocumentCollection (TRI_document_collection_t* document,
TRI_vector_pointer_t const* attributes,
bool sparse,
bool unique) {
TRI_vector_pointer_t fields;
TRI_vector_t paths;
@ -4241,7 +4257,7 @@ TRI_index_t* TRI_LookupHashIndexDocumentCollection (TRI_document_collection_t* d
return nullptr;
}
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_HASH_INDEX, unique, true);
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_HASH_INDEX, sparse, unique, true);
// release memory allocated to vector
TRI_DestroyVector(&paths);
@ -4257,6 +4273,7 @@ TRI_index_t* TRI_LookupHashIndexDocumentCollection (TRI_document_collection_t* d
TRI_index_t* TRI_EnsureHashIndexDocumentCollection (TRI_document_collection_t* document,
TRI_idx_iid_t iid,
TRI_vector_pointer_t const* attributes,
bool sparse,
bool unique,
bool* created) {
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
@ -4268,7 +4285,7 @@ TRI_index_t* TRI_EnsureHashIndexDocumentCollection (TRI_document_collection_t* d
TRI_WRITE_LOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION(document);
// given the list of attributes (as strings)
TRI_index_t* idx = CreateHashIndexDocumentCollection(document, attributes, iid, unique, created);
TRI_index_t* idx = CreateHashIndexDocumentCollection(document, attributes, iid, sparse, unique, created);
if (idx != nullptr) {
if (created) {
@ -4306,6 +4323,7 @@ TRI_index_t* TRI_EnsureHashIndexDocumentCollection (TRI_document_collection_t* d
static TRI_index_t* CreateSkiplistIndexDocumentCollection (TRI_document_collection_t* document,
TRI_vector_pointer_t const* attributes,
TRI_idx_iid_t iid,
bool sparse,
bool unique,
bool* created) {
TRI_vector_pointer_t fields;
@ -4332,7 +4350,7 @@ static TRI_index_t* CreateSkiplistIndexDocumentCollection (TRI_document_collecti
// a new one.
// ...........................................................................
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_SKIPLIST_INDEX, unique, false);
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_SKIPLIST_INDEX, sparse, unique, false);
if (idx != nullptr) {
TRI_DestroyVector(&paths);
@ -4347,7 +4365,7 @@ static TRI_index_t* CreateSkiplistIndexDocumentCollection (TRI_document_collecti
}
// Create the skiplist index
idx = TRI_CreateSkiplistIndex(document, iid, &fields, &paths, unique);
idx = TRI_CreateSkiplistIndex(document, iid, &fields, &paths, sparse, unique);
if (idx == nullptr) {
TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
@ -4406,6 +4424,7 @@ static int SkiplistIndexFromJson (TRI_document_collection_t* document,
TRI_index_t* TRI_LookupSkiplistIndexDocumentCollection (TRI_document_collection_t* document,
TRI_vector_pointer_t const* attributes,
bool sparse,
bool unique) {
TRI_vector_pointer_t fields;
TRI_vector_t paths;
@ -4422,7 +4441,7 @@ TRI_index_t* TRI_LookupSkiplistIndexDocumentCollection (TRI_document_collection_
return nullptr;
}
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_SKIPLIST_INDEX, unique, true);
TRI_index_t* idx = LookupPathIndexDocumentCollection(document, &paths, TRI_IDX_TYPE_SKIPLIST_INDEX, sparse, unique, true);
// release memory allocated to vector
TRI_DestroyVector(&paths);
@ -4438,6 +4457,7 @@ TRI_index_t* TRI_LookupSkiplistIndexDocumentCollection (TRI_document_collection_
TRI_index_t* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_collection_t* document,
TRI_idx_iid_t iid,
TRI_vector_pointer_t const* attributes,
bool sparse,
bool unique,
bool* created) {
TRI_ReadLockReadWriteLock(&document->_vocbase->_inventoryLock);
@ -4448,7 +4468,7 @@ TRI_index_t* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_collection_
TRI_WRITE_LOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION(document);
TRI_index_t* idx = CreateSkiplistIndexDocumentCollection(document, attributes, iid, unique, created);
TRI_index_t* idx = CreateSkiplistIndexDocumentCollection(document, attributes, iid, sparse, unique, created);
if (idx != nullptr) {
if (created) {
@ -4480,8 +4500,8 @@ TRI_index_t* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_collection_
// -----------------------------------------------------------------------------
static TRI_index_t* LookupFulltextIndexDocumentCollection (TRI_document_collection_t* document,
const char* attributeName,
const bool indexSubstrings,
char const* attributeName,
bool indexSubstrings,
int minWordLength) {
TRI_ASSERT(attributeName != nullptr);

View File

@ -876,6 +876,7 @@ struct TRI_index_s* TRI_EnsureGeoIndex2DocumentCollection (TRI_document_collecti
struct TRI_index_s* TRI_LookupHashIndexDocumentCollection (TRI_document_collection_t*,
TRI_vector_pointer_t const*,
bool,
bool);
////////////////////////////////////////////////////////////////////////////////
@ -886,6 +887,7 @@ struct TRI_index_s* TRI_EnsureHashIndexDocumentCollection (TRI_document_collecti
TRI_idx_iid_t,
TRI_vector_pointer_t const*,
bool,
bool,
bool*);
// -----------------------------------------------------------------------------
@ -904,6 +906,7 @@ struct TRI_index_s* TRI_EnsureHashIndexDocumentCollection (TRI_document_collecti
struct TRI_index_s* TRI_LookupSkiplistIndexDocumentCollection (TRI_document_collection_t*,
TRI_vector_pointer_t const*,
bool,
bool);
////////////////////////////////////////////////////////////////////////////////
@ -914,6 +917,7 @@ struct TRI_index_s* TRI_EnsureSkiplistIndexDocumentCollection (TRI_document_coll
TRI_idx_iid_t,
TRI_vector_pointer_t const*,
bool,
bool,
bool*);
// -----------------------------------------------------------------------------

View File

@ -70,8 +70,8 @@ void TRI_InitIndex (TRI_index_t* idx,
TRI_idx_iid_t iid,
TRI_idx_type_e type,
TRI_document_collection_t* document,
bool unique,
bool sparse) {
bool sparse,
bool unique) {
TRI_ASSERT(idx != nullptr);
if (iid > 0) {
@ -435,6 +435,12 @@ TRI_json_t* TRI_JsonIndex (TRI_memory_zone_t* zone,
TRI_Insert3ObjectJson(zone, json, "id", TRI_CreateStringCopyJson(zone, number, strlen(number)));
TRI_Insert3ObjectJson(zone, json, "type", TRI_CreateStringCopyJson(zone, TRI_TypeNameIndex(idx->_type), strlen(TRI_TypeNameIndex(idx->_type))));
TRI_Insert3ObjectJson(zone, json, "unique", TRI_CreateBooleanJson(zone, idx->_unique));
if (idx->_type == TRI_IDX_TYPE_HASH_INDEX ||
idx->_type == TRI_IDX_TYPE_SKIPLIST_INDEX) {
// only show sparse flag for these index types, as it can't be set on others
TRI_Insert3ObjectJson(zone, json, "sparse", TRI_CreateBooleanJson(zone, idx->_sparse));
}
if (idx->_hasSelectivityEstimate) {
TRI_Insert3ObjectJson(zone, json, "selectivityEstimate", TRI_CreateNumberJson(zone, idx->selectivityEstimate(idx)));
}
@ -459,41 +465,6 @@ void TRI_CopyPathVector (TRI_vector_t* dst, TRI_vector_t* src) {
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief converts a path vector into a field list
///
/// Note that you must free the field list itself, but not the fields. The
/// belong to the shaper.
////////////////////////////////////////////////////////////////////////////////
char const** TRI_FieldListByPathList (TRI_shaper_t const* shaper,
TRI_vector_t const* paths) {
// .............................................................................
// Allocate sufficent memory for the field list
// .............................................................................
char const** fieldList = static_cast<char const**>(TRI_Allocate(TRI_CORE_MEM_ZONE, (sizeof(char const*) * paths->_length), false));
// ..........................................................................
// Convert the attributes (field list of the hash index) into strings
// ..........................................................................
for (size_t j = 0; j < paths->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(paths, j)));
TRI_shape_path_t const* path = shaper->lookupAttributePathByPid(const_cast<TRI_shaper_t*>(shaper), shape);
if (path == nullptr) {
TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
TRI_Free(TRI_CORE_MEM_ZONE, (void*) fieldList);
return nullptr;
}
fieldList[j] = ((const char*) path) + sizeof(TRI_shape_path_t) + path->_aidLength * sizeof(TRI_shape_aid_t);
}
return fieldList;
}
// -----------------------------------------------------------------------------
// --SECTION-- PRIMARY INDEX
// -----------------------------------------------------------------------------
@ -576,7 +547,7 @@ TRI_index_t* TRI_CreatePrimaryIndex (TRI_document_collection_t* document) {
TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE);
TRI_PushBackVectorString(&idx->_fields, id);
TRI_InitIndex(idx, 0, TRI_IDX_TYPE_PRIMARY_INDEX, document, true, false);
TRI_InitIndex(idx, 0, TRI_IDX_TYPE_PRIMARY_INDEX, document, false, true);
idx->_hasSelectivityEstimate = true;
idx->selectivityEstimate = &SelectivityEstimatePrimary;
@ -1224,9 +1195,9 @@ TRI_skiplist_iterator_t* TRI_LookupSkiplistIndex (TRI_index_t* idx,
/// @brief helper for skiplist methods
////////////////////////////////////////////////////////////////////////////////
static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
static int SkiplistIndexHelper (TRI_skiplist_index_t const* skiplistIndex,
TRI_skiplist_index_element_t* skiplistElement,
const TRI_doc_mptr_t* document) {
TRI_doc_mptr_t const* document) {
// ..........................................................................
// Assign the document to the SkiplistIndexElement structure so that it can
// be retrieved later.
@ -1243,6 +1214,8 @@ static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
return TRI_ERROR_INTERNAL;
}
int res = TRI_ERROR_NO_ERROR;
skiplistElement->_document = const_cast<TRI_doc_mptr_t*>(document);
char const* ptr = skiplistElement->_document->getShapedJsonPtr(); // ONLY IN INDEX, PROTECTED by RUNTIME
@ -1259,16 +1232,13 @@ static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
if (acc == nullptr || acc->_resultSid == TRI_SHAPE_ILLEGAL) {
// OK, the document does not contain the attributed needed by
// the index, are we sparse?
if (! skiplistIndex->base._sparse) {
// No, so let's fake a JSON null:
skiplistElement->_subObjects[j]._sid = TRI_LookupBasicSidShaper(TRI_SHAPE_NULL);
skiplistElement->_subObjects[j]._length = 0;
skiplistElement->_subObjects[j]._offset = 0;
continue;
}
return TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
}
skiplistElement->_subObjects[j]._sid = TRI_LookupBasicSidShaper(TRI_SHAPE_NULL);
skiplistElement->_subObjects[j]._length = 0;
skiplistElement->_subObjects[j]._offset = 0;
res = TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
continue;
}
// ..........................................................................
// Extract the field
@ -1279,6 +1249,10 @@ static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
return TRI_ERROR_INTERNAL;
}
if (shapedObject._sid == TRI_LookupBasicSidShaper(TRI_SHAPE_NULL)) {
res = TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING;
}
// .........................................................................
// Store the field
// .........................................................................
@ -1288,7 +1262,7 @@ static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
skiplistElement->_subObjects[j]._offset = static_cast<uint32_t>(((char const*) shapedObject._data.data) - ptr);
}
return TRI_ERROR_NO_ERROR;
return res;
}
////////////////////////////////////////////////////////////////////////////////
@ -1298,15 +1272,7 @@ static int SkiplistIndexHelper (const TRI_skiplist_index_t* skiplistIndex,
static int InsertSkiplistIndex (TRI_index_t* idx,
TRI_doc_mptr_t const* doc,
bool isRollback) {
// ...........................................................................
// Obtain the skip listindex structure
// ...........................................................................
if (idx == nullptr) {
LOG_WARNING("internal error in InsertSkiplistIndex");
return TRI_ERROR_INTERNAL;
}
TRI_skiplist_index_t* skiplistIndex = (TRI_skiplist_index_t*) idx;
// ...........................................................................
@ -1318,7 +1284,6 @@ static int InsertSkiplistIndex (TRI_index_t* idx,
skiplistElement._subObjects = static_cast<TRI_shaped_sub_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_sub_t) * skiplistIndex->_paths._length, false));
if (skiplistElement._subObjects == nullptr) {
LOG_WARNING("out-of-memory in InsertSkiplistIndex");
return TRI_ERROR_OUT_OF_MEMORY;
}
@ -1333,24 +1298,22 @@ static int InsertSkiplistIndex (TRI_index_t* idx,
// all.
// ...........................................................................
if (res != TRI_ERROR_NO_ERROR) {
// .........................................................................
// It may happen that the document does not have the necessary
// attributes to be included within the hash index, in this case do
// not report back an error.
// .........................................................................
// ..........................................................................
// Deallocated the memory already allocated to skiplistElement.fields
// ..........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, skiplistElement._subObjects);
// .........................................................................
// It may happen that the document does not have the necessary
// attributes to be included within the hash index, in this case do
// not report back an error.
// .........................................................................
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING) {
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING) {
if (idx->_sparse) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, skiplistElement._subObjects);
return TRI_ERROR_NO_ERROR;
}
res = TRI_ERROR_NO_ERROR;
}
if (res != TRI_ERROR_NO_ERROR) {
return res;
}
@ -1413,7 +1376,7 @@ static TRI_json_t* JsonSkiplistIndex (TRI_index_t const* idx) {
for (size_t j = 0; j < skiplistIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*) TRI_AtVector(&skiplistIndex->_paths, j));
const TRI_shape_path_t* path = document->getShaper()->lookupAttributePathByPid(document->getShaper(), shape); // ONLY IN INDEX, PROTECTED by RUNTIME
TRI_shape_path_t const* path = document->getShaper()->lookupAttributePathByPid(document->getShaper(), shape); // ONLY IN INDEX, PROTECTED by RUNTIME
if (path == nullptr) {
TRI_Free(TRI_CORE_MEM_ZONE, (void*) fieldList);
@ -1462,7 +1425,6 @@ static int RemoveSkiplistIndex (TRI_index_t* idx,
skiplistElement._subObjects = static_cast<TRI_shaped_sub_t*>(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_sub_t) * skiplistIndex->_paths._length, false));
if (skiplistElement._subObjects == nullptr) {
LOG_WARNING("out-of-memory in InsertSkiplistIndex");
return TRI_ERROR_OUT_OF_MEMORY;
}
@ -1476,25 +1438,16 @@ static int RemoveSkiplistIndex (TRI_index_t* idx,
// Error returned generally implies that the document never was part of the
// skiplist index
// ..........................................................................
if (res != TRI_ERROR_NO_ERROR) {
// ........................................................................
// Deallocate memory allocated to skiplistElement.fields above
// ........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, skiplistElement._subObjects);
// ........................................................................
// It may happen that the document does not have the necessary attributes
// to have particpated within the hash index. In this case, we do not
// report an error to the calling procedure.
// ........................................................................
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING) {
if (res == TRI_ERROR_ARANGO_INDEX_DOCUMENT_ATTRIBUTE_MISSING) {
if (idx->_sparse) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, skiplistElement._subObjects);
return TRI_ERROR_NO_ERROR;
}
res = TRI_ERROR_NO_ERROR;
}
if (res != TRI_ERROR_NO_ERROR) {
return res;
}
@ -1521,6 +1474,7 @@ TRI_index_t* TRI_CreateSkiplistIndex (TRI_document_collection_t* document,
TRI_idx_iid_t iid,
TRI_vector_pointer_t* fields,
TRI_vector_t* paths,
bool sparse,
bool unique) {
TRI_skiplist_index_t* skiplistIndex = static_cast<TRI_skiplist_index_t*>(TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_skiplist_index_t), false));
@ -1530,7 +1484,7 @@ TRI_index_t* TRI_CreateSkiplistIndex (TRI_document_collection_t* document,
TRI_index_t* idx = &skiplistIndex->base;
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_SKIPLIST_INDEX, document, unique, false);
TRI_InitIndex(idx, iid, TRI_IDX_TYPE_SKIPLIST_INDEX, document, sparse, unique);
idx->memory = MemorySkiplistIndex;
idx->json = JsonSkiplistIndex;
@ -1541,21 +1495,10 @@ TRI_index_t* TRI_CreateSkiplistIndex (TRI_document_collection_t* document,
// Copy the contents of the shape list vector into a new vector and store this
// ...........................................................................
TRI_InitVector(&skiplistIndex->_paths, TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shape_pid_t));
for (size_t j = 0; j < paths->_length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(paths,j)));
TRI_PushBackVector(&skiplistIndex->_paths, &shape);
}
TRI_CopyPathVector(&skiplistIndex->_paths, paths);
TRI_InitVectorString(&idx->_fields, TRI_CORE_MEM_ZONE);
for (size_t j = 0; j < fields->_length; ++j) {
char const* name = static_cast<char const*>(fields->_buffer[j]);
char* copy = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, name);
TRI_PushBackVectorString(&idx->_fields, copy);
}
TRI_CopyDataFromVectorPointerVectorString(TRI_CORE_MEM_ZONE, &idx->_fields, fields);
skiplistIndex->_skiplistIndex = SkiplistIndex_new(document,
paths->_length,

View File

@ -227,8 +227,8 @@ void TRI_InitIndex (TRI_index_t*,
TRI_idx_iid_t,
TRI_idx_type_e,
struct TRI_document_collection_t*,
bool, // unique
bool); // sparse
bool, // sparse
bool); // unique
// -----------------------------------------------------------------------------
// --SECTION-- public functions
@ -303,23 +303,6 @@ TRI_json_t* TRI_JsonIndex (TRI_memory_zone_t*,
void TRI_CopyPathVector (TRI_vector_t*,
TRI_vector_t*);
////////////////////////////////////////////////////////////////////////////////
/// @brief copies all pointers from a vector
////////////////////////////////////////////////////////////////////////////////
void TRI_CopyFieldsVector (TRI_vector_string_t*,
TRI_vector_pointer_t const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief converts a path vector into a field list
///
/// Note that you must free the field list itself, but not the fields. The
/// belong to the shaper.
////////////////////////////////////////////////////////////////////////////////
char const** TRI_FieldListByPathList (TRI_shaper_t const*,
TRI_vector_t const*);
// -----------------------------------------------------------------------------
// --SECTION-- PRIMARY INDEX
// -----------------------------------------------------------------------------
@ -379,6 +362,7 @@ TRI_index_t* TRI_CreateSkiplistIndex (struct TRI_document_collection_t*,
TRI_idx_iid_t,
TRI_vector_pointer_t*,
TRI_vector_t*,
bool,
bool);
////////////////////////////////////////////////////////////////////////////////

View File

@ -63,7 +63,7 @@ var API = "_api/index";
/// @EXAMPLE_ARANGOSH_RUN{RestIndexAllIndexes}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
///
@ -120,8 +120,8 @@ function get_api_indexes (req, res) {
/// - *type*: the index type
///
/// All other attributes are type-dependent. For example, some indexes provide
/// a *unique* flag, whereas others don't. Some indexes can also provide a
/// selectivity estimate in the *selectivityEstimate* attribute.
/// *unique* or *sparse* flags, whereas others don't. Some indexes also provide
/// a selectivity estimate in the *selectivityEstimate* attribute.
///
/// @RESTRETURNCODES
///
@ -254,7 +254,7 @@ function get_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateNewCapConstraint}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = {
@ -343,7 +343,7 @@ function get_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateGeoLocation}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "geo", "fields" : [ "b" ] }';
@ -360,7 +360,7 @@ function get_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateGeoLatitudeLongitude}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "geo", "fields" : [ "e", "f" ] }';
@ -399,13 +399,14 @@ function get_api_index (req, res) {
///
/// - *unique*: If *true*, then create a unique index.
///
/// - *sparse*: If *true*, then create a sparse index.
///
/// **Note**: unique indexes on non-shard keys are not supported in a cluster.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{200}
/// If the index already exists, then a *HTTP 200* is
/// returned.
/// If the index already exists, then a *HTTP 200* is returned.
///
/// @RESTRETURNCODE{201}
/// If the index does not already exist and could be created, then a *HTTP 201*
@ -426,7 +427,7 @@ function get_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateNewUniqueConstraint}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "hash", "unique" : true, "fields" : [ "a", "b" ] }';
@ -438,12 +439,12 @@ function get_api_index (req, res) {
/// logJsonResponse(response);
/// @END_EXAMPLE_ARANGOSH_RUN
///
/// Creating a hash index:
/// Creating a non-unique hash index:
///
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateNewHashIndex}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "hash", "unique" : false, "fields" : [ "a", "b" ] }';
@ -454,6 +455,23 @@ function get_api_index (req, res) {
///
/// logJsonResponse(response);
/// @END_EXAMPLE_ARANGOSH_RUN
///
/// Creating a sparse index:
///
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateSparseHashIndex}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "hash", "unique" : false, "sparse" : true, "fields" : [ "a" ] }';
///
/// var response = logCurlRequest('POST', url, body);
///
/// assert(response.code === 201);
///
/// logJsonResponse(response);
/// @END_EXAMPLE_ARANGOSH_RUN
/// @endDocuBlock
////////////////////////////////////////////////////////////////////////////////
@ -482,6 +500,8 @@ function get_api_index (req, res) {
///
/// - *unique*: If *true*, then create a unique index.
///
/// - *sparse*: If *true*, then create a sparse index.
///
/// **Note**: unique indexes on non-shard keys are not supported in a cluster.
///
/// @RESTRETURNCODES
@ -504,12 +524,12 @@ function get_api_index (req, res) {
///
/// @EXAMPLES
///
/// Creating a skiplist:
/// Creating a skiplist index:
///
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateNewSkiplist}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "skiplist", "unique" : false, "fields" : [ "a", "b" ] }';
@ -520,6 +540,23 @@ function get_api_index (req, res) {
///
/// logJsonResponse(response);
/// @END_EXAMPLE_ARANGOSH_RUN
///
/// Creating a sparse skiplist index:
///
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateSparseSkiplist}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type": "skiplist", "unique" : false, "sparse" : true, "fields" : [ "a" ] }';
///
/// var response = logCurlRequest('POST', url, body);
///
/// assert(response.code === 201);
///
/// logJsonResponse(response);
/// @END_EXAMPLE_ARANGOSH_RUN
/// @endDocuBlock
////////////////////////////////////////////////////////////////////////////////
@ -572,7 +609,7 @@ function get_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexCreateNewFulltext}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index?collection=" + cn;
/// var body = '{ "type" : "fulltext", "fields" : [ "text" ] }';
@ -632,6 +669,13 @@ function get_api_index (req, res) {
/// **Note**: Unique indexes on non-shard keys are not supported in a
/// cluster.
///
/// Hash and skiplist indexes can optionally be created in a sparse
/// variant. A sparse index will be created if the *sparse* attribute in
/// the index details is set to *true*. Sparse indexes do not index documents
/// for which the index attributes are null or not set. This can lead to
/// smaller index sizes, but disables the use of sparse indexes for
/// certain types of queries.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{200}
@ -720,7 +764,7 @@ function post_api_index (req, res) {
/// @EXAMPLE_ARANGOSH_RUN{RestIndexDeleteUniqueSkiplist}
/// var cn = "products";
/// db._drop(cn);
/// db._create(cn, { waitForSync: true });
/// db._create(cn);
///
/// var url = "/_api/index/" + db.products.ensureSkiplist("a","b").id;
///

View File

@ -247,10 +247,8 @@
"SIMPLE_CLIENT_COULD_NOT_CONNECT" : { "code" : 2001, "message" : "could not connect to server" },
"SIMPLE_CLIENT_COULD_NOT_WRITE" : { "code" : 2002, "message" : "could not write to server" },
"SIMPLE_CLIENT_COULD_NOT_READ" : { "code" : 2003, "message" : "could not read from server" },
"RESULT_KEY_EXISTS" : { "code" : 10000, "message" : "element not inserted into structure, because key already exists" },
"RESULT_ELEMENT_EXISTS" : { "code" : 10001, "message" : "element not inserted into structure, because it already exists" },
"RESULT_KEY_NOT_FOUND" : { "code" : 10002, "message" : "key not found in structure" },
"RESULT_ELEMENT_NOT_FOUND" : { "code" : 10003, "message" : "element not found in structure" },
"RESULT_ELEMENT_EXISTS" : { "code" : 10000, "message" : "element not inserted into structure, because it already exists" },
"RESULT_ELEMENT_NOT_FOUND" : { "code" : 10001, "message" : "element not found in structure" },
"ERROR_APP_ALREADY_EXISTS" : { "code" : 20000, "message" : "newest version of app already installed" },
"ERROR_QUEUE_ALREADY_EXISTS" : { "code" : 21000, "message" : "named queue already exists" },
"ERROR_DISPATCHER_IS_STOPPING" : { "code" : 21001, "message" : "dispatcher stopped" },

View File

@ -103,10 +103,10 @@
processData: false,
async: false,
success: function() {
returnVal = true;
returnVal = true;
},
error: function(data) {
returnVal = data;
returnVal = data;
}
});
return returnVal;
@ -120,10 +120,10 @@
url: "/_api/index/"+ this.get("name") +"/"+encodeURIComponent(id),
async: false,
success: function () {
returnval = true;
returnval = true;
},
error: function () {
returnval = false;
returnval = false;
}
});
return returnval;

View File

@ -165,6 +165,7 @@ function printIndexes (indexes) {
var maxIdLen = String("Id").length;
var maxCollectionLen = String("Collection").length;
var maxUniqueLen = String("Unique").length;
var maxSparseLen = String("Sparse").length;
var maxTypeLen = String("Type").length;
var maxSelectivityLen = String("Selectivity Est.").length;
var maxFieldsLen = String("Fields").length;
@ -188,8 +189,9 @@ function printIndexes (indexes) {
});
var line = " " + pad(1 + maxIdLen - String("Id").length) + header("Id") + " " +
header("Type") + pad(1 + maxTypeLen - "Type".length) + " " +
header("Unique") + pad(1 + maxUniqueLen - "Unique".length) + " " +
header("Collection") + pad(1 + maxCollectionLen - "Collection".length) + " " +
header("Unique") + pad(1 + maxUniqueLen - "Unique".length) + " " +
header("Sparse") + pad(1 + maxSparseLen - "Sparse".length) + " " +
header("Selectivity Est.") + " " +
header("Fields") + pad(1 + maxFieldsLen - "Fields".length) + " " +
header("Ranges");
@ -197,6 +199,7 @@ function printIndexes (indexes) {
for (var i = 0; i < indexes.length; ++i) {
var uniqueness = (indexes[i].unique ? "true" : "false");
var sparsity = (indexes[i].hasOwnProperty("sparse") ? (indexes[i].sparse ? "true" : "false") : "n/a");
var fields = indexes[i].fields.map(attribute).join(", ");
var fieldsLen = indexes[i].fields.map(passthru).join(", ").length;
var ranges = "[ " + indexes[i].ranges + " ]";
@ -207,8 +210,9 @@ function printIndexes (indexes) {
line = " " +
pad(1 + maxIdLen - String(indexes[i].node).length) + variable(String(indexes[i].node)) + " " +
keyword(indexes[i].type) + pad(1 + maxTypeLen - indexes[i].type.length) + " " +
value(uniqueness) + pad(1 + maxUniqueLen - uniqueness.length) + " " +
collection(indexes[i].collection) + pad(1 + maxCollectionLen - indexes[i].collection.length) + " " +
value(uniqueness) + pad(1 + maxUniqueLen - uniqueness.length) + " " +
value(sparsity) + pad(1 + maxSparseLen - sparsity.length) + " " +
pad(1 + maxSelectivityLen - selectivity.length) + value(selectivity) + " " +
fields + pad(1 + maxFieldsLen - fieldsLen) + " " +
ranges;
@ -286,7 +290,7 @@ function processQuery (query, explain) {
}
return variableName(node);
case "collection":
return node.name + " " + annotation("/* all documents from collection */");
return collection(node.name) + " " + annotation("/* all collection documents */");
case "value":
return value(JSON.stringify(node.value));
case "object":
@ -421,7 +425,7 @@ function processQuery (query, explain) {
index.collection = node.collection;
index.node = node.id;
indexes.push(index);
return keyword("FOR") + " " + variableName(node.outVariable) + " " + keyword("IN") + " " + collection(node.collection) + " " + annotation("/* " + (node.reverse ? "reverse " : "") + "index scan using " + node.index.type + " index") + annotation("*/");
return keyword("FOR") + " " + variableName(node.outVariable) + " " + keyword("IN") + " " + collection(node.collection) + " " + annotation("/* " + (node.reverse ? "reverse " : "") + node.index.type + " index scan") + annotation("*/");
case "CalculationNode":
return keyword("LET") + " " + variableName(node.outVariable) + " = " + buildExpression(node.expression);
case "FilterNode":

View File

@ -98,6 +98,7 @@
<th class="collectionInfoTh">ID</th>
<th class="collectionInfoTh">Type</th>
<th class="collectionInfoTh">Unique</th>
<th class="collectionInfoTh">Sparse</th>
<th class="collectionInfoTh">Selectivity Est.</th>
<th class="collectionInfoTh">Fields</th>
<th class="collectionInfoTh">Action</th>
@ -233,6 +234,19 @@
</div>
</th>
</tr>
<tr>
<th class="collectionTh">Sparse:</th>
<th>
<input id="newHashSparse" type="checkbox" name="newHashSparse" value="true">
</th>
<th class="tooltipInfoTh">
<div>
<a class="index-tooltip" data-toggle="tooltip" data-placement="left" title="If true, then create a sparse index.">
<span rel="tooltip" class="arangoicon icon_arangodb_info"></span>
</a>
</div>
</th>
</tr>
</table>
</div>
<div id="newIndexTypeFulltext" class="newIndexClass" style="display:none">
@ -289,9 +303,21 @@
</div>
</th>
</tr>
<tr>
<th class="collectionTh">Sparse:</th>
<th>
<input id="newSkiplistSparse" type="checkbox" name="newSkiplistSparse" value="true">
</th>
<th class="tooltipInfoTh">
<div>
<a class="index-tooltip" data-toggle="tooltip" data-placement="left" title="If true, then create a sparse index.">
<span rel="tooltip" class="arangoicon icon_arangodb_info"></span>
</a>
</div>
</th>
</tr>
</table>
</div>
<div class="index-button-bar">
<button id="createIndex" class="button-success">Save</button>
<button id="cancelIndex" class="button-danger">Cancel</button>

View File

@ -230,6 +230,7 @@
<th class="collectionInfoTh">ID</th>
<th class="collectionInfoTh">Type</th>
<th class="collectionInfoTh">Unique</th>
<th class="collectionInfoTh">Sparse</th>
<th class="collectionInfoTh">Selectivity Est.</th>
<th class="collectionInfoTh">Fields</th>
</tr>
@ -249,11 +250,13 @@
(v.selectivityEstimate * 100).toFixed(2) + "%" :
"n/a"
);
var sparse = (v.hasOwnProperty("sparse") ? v.sparse : "n/a");
%>
<tr>
<th class="collectionInfoTh modal-text"><%=indexId%></th>
<th class="collectionInfoTh modal-text"><%=v.type%></th>
<th class="collectionInfoTh modal-text"><%=v.unique%></th>
<th class="collectionInfoTh modal-text"><%=sparse%></th>
<th class="collectionInfoTh modal-text"><%=selectivity%></th>
<th class="collectionInfoTh modal-text"><%=fieldString%></th>
</tr>

View File

@ -988,6 +988,7 @@
var postParameter = {};
var fields;
var unique;
var sparse;
switch (indexType) {
case 'Cap':
@ -1016,10 +1017,12 @@
case 'Hash':
fields = $('#newHashFields').val();
unique = self.checkboxToValue('#newHashUnique');
sparse = self.checkboxToValue('#newHashSparse');
postParameter = {
type: 'hash',
fields: self.stringToArray(fields),
unique: unique
unique: unique,
sparse: sparse
};
break;
case 'Fulltext':
@ -1034,10 +1037,12 @@
case 'Skiplist':
fields = $('#newSkiplistFields').val();
unique = self.checkboxToValue('#newSkiplistUnique');
sparse = self.checkboxToValue('#newSkiplistSparse');
postParameter = {
type: 'skiplist',
fields: self.stringToArray(fields),
unique: unique
unique: unique,
sparse: sparse
};
break;
}
@ -1117,12 +1122,14 @@
(v.selectivityEstimate * 100).toFixed(2) + "%" :
"n/a"
);
var sparse = (v.hasOwnProperty("sparse") ? v.sparse : "n/a");
$('#collectionEditIndexTable').append(
'<tr>' +
'<th class=' + JSON.stringify(cssClass) + '>' + indexId + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + v.type + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + v.unique + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + sparse + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + selectivity + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + fieldString + '</th>' +
'<th class=' + JSON.stringify(cssClass) + '>' + actionString + '</th>' +

View File

@ -188,7 +188,7 @@
var sizeBox = $('#querySize');
sizeBox.empty();
[ 100, 250, 500, 1000, 2500, 5000 ].forEach(function (value) {
[ 100, 250, 500, 1000, 2500, 5000, 10000 ].forEach(function (value) {
sizeBox.append('<option value="' + _.escape(value) + '"' +
(querySize === value ? ' selected' : '') +
'>' + _.escape(value) + ' results</option>');

View File

@ -247,10 +247,8 @@
"SIMPLE_CLIENT_COULD_NOT_CONNECT" : { "code" : 2001, "message" : "could not connect to server" },
"SIMPLE_CLIENT_COULD_NOT_WRITE" : { "code" : 2002, "message" : "could not write to server" },
"SIMPLE_CLIENT_COULD_NOT_READ" : { "code" : 2003, "message" : "could not read from server" },
"RESULT_KEY_EXISTS" : { "code" : 10000, "message" : "element not inserted into structure, because key already exists" },
"RESULT_ELEMENT_EXISTS" : { "code" : 10001, "message" : "element not inserted into structure, because it already exists" },
"RESULT_KEY_NOT_FOUND" : { "code" : 10002, "message" : "key not found in structure" },
"RESULT_ELEMENT_NOT_FOUND" : { "code" : 10003, "message" : "element not found in structure" },
"RESULT_ELEMENT_EXISTS" : { "code" : 10000, "message" : "element not inserted into structure, because it already exists" },
"RESULT_ELEMENT_NOT_FOUND" : { "code" : 10001, "message" : "element not found in structure" },
"ERROR_APP_ALREADY_EXISTS" : { "code" : 20000, "message" : "newest version of app already installed" },
"ERROR_QUEUE_ALREADY_EXISTS" : { "code" : 21000, "message" : "named queue already exists" },
"ERROR_DISPATCHER_IS_STOPPING" : { "code" : 21001, "message" : "dispatcher stopped" },

View File

@ -164,6 +164,7 @@ function printIndexes (indexes) {
var maxIdLen = String("Id").length;
var maxCollectionLen = String("Collection").length;
var maxUniqueLen = String("Unique").length;
var maxSparseLen = String("Sparse").length;
var maxTypeLen = String("Type").length;
var maxSelectivityLen = String("Selectivity Est.").length;
var maxFieldsLen = String("Fields").length;
@ -187,8 +188,9 @@ function printIndexes (indexes) {
});
var line = " " + pad(1 + maxIdLen - String("Id").length) + header("Id") + " " +
header("Type") + pad(1 + maxTypeLen - "Type".length) + " " +
header("Unique") + pad(1 + maxUniqueLen - "Unique".length) + " " +
header("Collection") + pad(1 + maxCollectionLen - "Collection".length) + " " +
header("Unique") + pad(1 + maxUniqueLen - "Unique".length) + " " +
header("Sparse") + pad(1 + maxSparseLen - "Sparse".length) + " " +
header("Selectivity Est.") + " " +
header("Fields") + pad(1 + maxFieldsLen - "Fields".length) + " " +
header("Ranges");
@ -196,6 +198,7 @@ function printIndexes (indexes) {
for (var i = 0; i < indexes.length; ++i) {
var uniqueness = (indexes[i].unique ? "true" : "false");
var sparsity = (indexes[i].hasOwnProperty("sparse") ? (indexes[i].sparse ? "true" : "false") : "n/a");
var fields = indexes[i].fields.map(attribute).join(", ");
var fieldsLen = indexes[i].fields.map(passthru).join(", ").length;
var ranges = "[ " + indexes[i].ranges + " ]";
@ -206,8 +209,9 @@ function printIndexes (indexes) {
line = " " +
pad(1 + maxIdLen - String(indexes[i].node).length) + variable(String(indexes[i].node)) + " " +
keyword(indexes[i].type) + pad(1 + maxTypeLen - indexes[i].type.length) + " " +
value(uniqueness) + pad(1 + maxUniqueLen - uniqueness.length) + " " +
collection(indexes[i].collection) + pad(1 + maxCollectionLen - indexes[i].collection.length) + " " +
value(uniqueness) + pad(1 + maxUniqueLen - uniqueness.length) + " " +
value(sparsity) + pad(1 + maxSparseLen - sparsity.length) + " " +
pad(1 + maxSelectivityLen - selectivity.length) + value(selectivity) + " " +
fields + pad(1 + maxFieldsLen - fieldsLen) + " " +
ranges;
@ -285,7 +289,7 @@ function processQuery (query, explain) {
}
return variableName(node);
case "collection":
return node.name + " " + annotation("/* all documents from collection */");
return collection(node.name) + " " + annotation("/* all collection documents */");
case "value":
return value(JSON.stringify(node.value));
case "object":
@ -420,7 +424,7 @@ function processQuery (query, explain) {
index.collection = node.collection;
index.node = node.id;
indexes.push(index);
return keyword("FOR") + " " + variableName(node.outVariable) + " " + keyword("IN") + " " + collection(node.collection) + " " + annotation("/* " + (node.reverse ? "reverse " : "") + "index scan using " + node.index.type + " index") + annotation("*/");
return keyword("FOR") + " " + variableName(node.outVariable) + " " + keyword("IN") + " " + collection(node.collection) + " " + annotation("/* " + (node.reverse ? "reverse " : "") + node.index.type + " index scan") + annotation("*/");
case "CalculationNode":
return keyword("LET") + " " + variableName(node.outVariable) + " = " + buildExpression(node.expression);
case "FilterNode":

View File

@ -303,19 +303,19 @@ function isContained (doc, example) {
}
////////////////////////////////////////////////////////////////////////////////
/// @brief whether or not a unique index can be used
/// @brief whether or not the example contains null attributes
////////////////////////////////////////////////////////////////////////////////
function isUnique (example) {
function containsNullAttributes (example) {
var k;
for (k in example) {
if (example.hasOwnProperty(k)) {
if (example[k] === null) {
return false;
if (example[k] === null || example[k] === undefined) {
return true;
}
}
}
return true;
return false;
}
////////////////////////////////////////////////////////////////////////////////
@ -414,13 +414,15 @@ function byExample (data) {
else if (keys.length > 0) {
// try these index types
var checks = [
{ type: "hash", fields: keys, unique: false },
{ type: "skiplist", fields: keys, unique: false }
{ type: "hash", fields: keys },
{ type: "skiplist", fields: keys }
];
if (isUnique(example)) {
checks.push({ type: "hash", fields: keys, unique: true });
checks.push({ type: "skiplist", fields: keys, unique: true });
if (containsNullAttributes(example)) {
checks.forEach(function(check) {
check.sparse = false;
check.unique = false;
});
}
for (k = 0; k < checks.length; ++k) {

View File

@ -348,10 +348,8 @@ SIMPLE_CLIENT_COULD_NOT_READ,2003,"could not read from server","Will be raised w
## results, which are not errors
################################################################################
RESULT_KEY_EXISTS,10000,"element not inserted into structure, because key already exists","Will be returned if the element was not insert because the key already exists."
RESULT_ELEMENT_EXISTS,10001,"element not inserted into structure, because it already exists","Will be returned if the element was not insert because it already exists."
RESULT_KEY_NOT_FOUND,10002,"key not found in structure","Will be returned if the key was not found in the structure."
RESULT_ELEMENT_NOT_FOUND,10003,"element not found in structure","Will be returned if the element was not found in the structure."
RESULT_ELEMENT_EXISTS,10000,"element not inserted into structure, because it already exists","Will be returned if the element was not insert because it already exists."
RESULT_ELEMENT_NOT_FOUND,10001,"element not found in structure","Will be returned if the element was not found in the structure."
################################################################################
## foxx app update via github

View File

@ -243,9 +243,7 @@ void TRI_InitialiseErrorMessages () {
REG_ERROR(SIMPLE_CLIENT_COULD_NOT_CONNECT, "could not connect to server");
REG_ERROR(SIMPLE_CLIENT_COULD_NOT_WRITE, "could not write to server");
REG_ERROR(SIMPLE_CLIENT_COULD_NOT_READ, "could not read from server");
REG_ERROR(RESULT_KEY_EXISTS, "element not inserted into structure, because key already exists");
REG_ERROR(RESULT_ELEMENT_EXISTS, "element not inserted into structure, because it already exists");
REG_ERROR(RESULT_KEY_NOT_FOUND, "key not found in structure");
REG_ERROR(RESULT_ELEMENT_NOT_FOUND, "element not found in structure");
REG_ERROR(ERROR_APP_ALREADY_EXISTS, "newest version of app already installed");
REG_ERROR(ERROR_QUEUE_ALREADY_EXISTS, "named queue already exists");

View File

@ -581,14 +581,9 @@
/// Will be raised when the client could not write data.
/// - 2003: @LIT{could not read from server}
/// Will be raised when the client could not read data.
/// - 10000: @LIT{element not inserted into structure, because key already exists}
/// Will be returned if the element was not insert because the key already
/// exists.
/// - 10001: @LIT{element not inserted into structure, because it already exists}
/// - 10000: @LIT{element not inserted into structure, because it already exists}
/// Will be returned if the element was not insert because it already exists.
/// - 10002: @LIT{key not found in structure}
/// Will be returned if the key was not found in the structure.
/// - 10003: @LIT{element not found in structure}
/// - 10001: @LIT{element not found in structure}
/// Will be returned if the element was not found in the structure.
/// - 20000: @LIT{newest version of app already installed}
/// newest version of app already installed
@ -3075,45 +3070,24 @@ void TRI_InitialiseErrorMessages ();
#define TRI_SIMPLE_CLIENT_COULD_NOT_READ (2003)
////////////////////////////////////////////////////////////////////////////////
/// @brief 10000: RESULT_KEY_EXISTS
///
/// element not inserted into structure, because key already exists
///
/// Will be returned if the element was not insert because the key already
/// exists.
////////////////////////////////////////////////////////////////////////////////
#define TRI_RESULT_KEY_EXISTS (10000)
////////////////////////////////////////////////////////////////////////////////
/// @brief 10001: RESULT_ELEMENT_EXISTS
/// @brief 10000: RESULT_ELEMENT_EXISTS
///
/// element not inserted into structure, because it already exists
///
/// Will be returned if the element was not insert because it already exists.
////////////////////////////////////////////////////////////////////////////////
#define TRI_RESULT_ELEMENT_EXISTS (10001)
#define TRI_RESULT_ELEMENT_EXISTS (10000)
////////////////////////////////////////////////////////////////////////////////
/// @brief 10002: RESULT_KEY_NOT_FOUND
///
/// key not found in structure
///
/// Will be returned if the key was not found in the structure.
////////////////////////////////////////////////////////////////////////////////
#define TRI_RESULT_KEY_NOT_FOUND (10002)
////////////////////////////////////////////////////////////////////////////////
/// @brief 10003: RESULT_ELEMENT_NOT_FOUND
/// @brief 10001: RESULT_ELEMENT_NOT_FOUND
///
/// element not found in structure
///
/// Will be returned if the element was not found in the structure.
////////////////////////////////////////////////////////////////////////////////
#define TRI_RESULT_ELEMENT_NOT_FOUND (10003)
#define TRI_RESULT_ELEMENT_NOT_FOUND (10001)
////////////////////////////////////////////////////////////////////////////////
/// @brief 20000: ERROR_APP_ALREADY_EXISTS