1
0
Fork 0

Merge branch 'devel' of github.com:triAGENS/AvocadoDB into devel

This commit is contained in:
Heiko Kernbach 2012-05-09 11:17:26 +02:00
commit 59a9c98db1
20 changed files with 881 additions and 687 deletions

View File

@ -37,26 +37,29 @@
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief hash a field name
/// @brief create a vector with an attribute access struct in it
////////////////////////////////////////////////////////////////////////////////
static uint64_t HashFieldAccess (TRI_associative_pointer_t* array,
void const* element) {
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) element;
static TRI_vector_pointer_t* Vectorize (TRI_aql_context_t* const context,
TRI_aql_field_access_t* fieldAccess) {
TRI_vector_pointer_t* vector;
return TRI_FnvHashString(fieldAccess->_fieldName);
}
assert(context);
if (!fieldAccess) {
return NULL;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief comparison function used to determine field name equality
////////////////////////////////////////////////////////////////////////////////
vector = (TRI_vector_pointer_t*) TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_vector_pointer_t), false);
if (!vector) {
// OOM
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return NULL;
}
static bool EqualFieldAccess (TRI_associative_pointer_t* array,
void const* key,
void const* element) {
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) element;
TRI_InitVectorPointer(vector, TRI_UNKNOWN_MEM_ZONE);
TRI_PushBackVectorPointer(vector, fieldAccess);
return TRI_EqualString(key, fieldAccess->_fieldName);
return vector;
}
////////////////////////////////////////////////////////////////////////////////
@ -130,7 +133,8 @@ static void FreeAccess (TRI_aql_context_t* const context,
TRI_Free(TRI_UNKNOWN_MEM_ZONE, fieldAccess->_fieldName);
TRI_Free(TRI_UNKNOWN_MEM_ZONE, fieldAccess);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief merge two access structures using a logical AND
///
@ -1071,18 +1075,17 @@ static TRI_aql_field_access_t* MergeOrRangeDouble (TRI_aql_context_t* const cont
}
////////////////////////////////////////////////////////////////////////////////
/// @brief merge two access structures using either logical AND or OR
/// @brief free access structure with its members and the pointer
///
/// TODO: fix docs
////////////////////////////////////////////////////////////////////////////////
static TRI_aql_field_access_t* MergeAccess (TRI_aql_context_t* const context,
const TRI_aql_logical_e logicalType,
TRI_aql_field_access_t* lhs,
TRI_aql_field_access_t* rhs) {
assert(context);
assert(lhs);
assert(rhs);
assert(logicalType == TRI_AQL_LOGICAL_AND || logicalType == TRI_AQL_LOGICAL_OR);
static TRI_aql_field_access_t* MergeAttributeAccessAnd (TRI_aql_context_t* const context,
TRI_aql_field_access_t* lhs,
TRI_aql_field_access_t* rhs) {
assert(context);
assert(lhs);
assert(rhs);
assert(lhs->_fieldName != NULL);
assert(rhs->_fieldName != NULL);
@ -1095,45 +1098,176 @@ static TRI_aql_field_access_t* MergeAccess (TRI_aql_context_t* const context,
assert(lhs->_type <= rhs->_type);
if (logicalType == TRI_AQL_LOGICAL_AND) {
// logical AND
switch (lhs->_type) {
case TRI_AQL_ACCESS_IMPOSSIBLE:
return MergeAndImpossible(context, lhs, rhs);
case TRI_AQL_ACCESS_ALL:
return MergeAndAll(context, lhs, rhs);
case TRI_AQL_ACCESS_EXACT:
return MergeAndExact(context, lhs, rhs);
case TRI_AQL_ACCESS_LIST:
return MergeAndList(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_SINGLE:
return MergeAndRangeSingle(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_DOUBLE:
return MergeAndRangeDouble(context, lhs, rhs);
}
}
else {
// logical OR
switch (lhs->_type) {
case TRI_AQL_ACCESS_IMPOSSIBLE:
return MergeOrImpossible(context, lhs, rhs);
case TRI_AQL_ACCESS_ALL:
return MergeOrAll(context, lhs, rhs);
case TRI_AQL_ACCESS_EXACT:
return MergeOrExact(context, lhs, rhs);
case TRI_AQL_ACCESS_LIST:
return MergeOrList(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_SINGLE:
return MergeOrRangeSingle(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_DOUBLE:
return MergeOrRangeDouble(context, lhs, rhs);
}
switch (lhs->_type) {
case TRI_AQL_ACCESS_IMPOSSIBLE:
return MergeAndImpossible(context, lhs, rhs);
case TRI_AQL_ACCESS_ALL:
return MergeAndAll(context, lhs, rhs);
case TRI_AQL_ACCESS_EXACT:
return MergeAndExact(context, lhs, rhs);
case TRI_AQL_ACCESS_LIST:
return MergeAndList(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_SINGLE:
return MergeAndRangeSingle(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_DOUBLE:
return MergeAndRangeDouble(context, lhs, rhs);
}
assert(false);
return NULL;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free access structure with its members and the pointer
///
/// TODO: fix docs
////////////////////////////////////////////////////////////////////////////////
static TRI_aql_field_access_t* MergeAttributeAccessOr (TRI_aql_context_t* const context,
TRI_aql_field_access_t* lhs,
TRI_aql_field_access_t* rhs) {
assert(context);
assert(lhs);
assert(rhs);
assert(lhs->_fieldName != NULL);
assert(rhs->_fieldName != NULL);
if (lhs->_type > rhs->_type) {
// swap operands so they are always sorted
TRI_aql_field_access_t* tmp = lhs;
lhs = rhs;
rhs = tmp;
}
assert(lhs->_type <= rhs->_type);
switch (lhs->_type) {
case TRI_AQL_ACCESS_IMPOSSIBLE:
return MergeOrImpossible(context, lhs, rhs);
case TRI_AQL_ACCESS_ALL:
return MergeOrAll(context, lhs, rhs);
case TRI_AQL_ACCESS_EXACT:
return MergeOrExact(context, lhs, rhs);
case TRI_AQL_ACCESS_LIST:
return MergeOrList(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_SINGLE:
return MergeOrRangeSingle(context, lhs, rhs);
case TRI_AQL_ACCESS_RANGE_DOUBLE:
return MergeOrRangeDouble(context, lhs, rhs);
}
assert(false);
return NULL;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process a logical NOT
///
/// TODO: fix docs
////////////////////////////////////////////////////////////////////////////////
static TRI_vector_pointer_t* MakeAllVector (TRI_aql_context_t* const context,
TRI_vector_pointer_t* const fieldAccesses) {
size_t i, n;
if (!fieldAccesses) {
return NULL;
}
n = fieldAccesses->_length;
for (i = 0; i < n; ++i) {
// turn all field access values into an all items access
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) TRI_AtVectorPointer(fieldAccesses, i);
// modify the element in place
FreeAccessMembers(fieldAccess);
fieldAccess->_type = TRI_AQL_ACCESS_ALL;
}
return fieldAccesses;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process a logical OR
///
/// TODO: fix docs
////////////////////////////////////////////////////////////////////////////////
static TRI_vector_pointer_t* MergeVectors (TRI_aql_context_t* const context,
const TRI_aql_logical_e logicalType,
TRI_vector_pointer_t* const lhs,
TRI_vector_pointer_t* const rhs) {
TRI_vector_pointer_t* result;
size_t i, n;
assert(context);
assert(logicalType == TRI_AQL_LOGICAL_AND || logicalType == TRI_AQL_LOGICAL_OR);
// if one of the vectors is empty, simply return the other one
if (!lhs) {
return rhs;
}
if (!rhs) {
return lhs;
}
// both vectors are non empty
result = (TRI_vector_pointer_t*) TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_vector_pointer_t), false);
if (!result) {
// OOM
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return NULL;
}
TRI_InitVectorPointer(result, TRI_UNKNOWN_MEM_ZONE);
// copy elements from lhs into result vector
n = lhs->_length;
for (i = 0; i < n; ++i) {
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) TRI_AtVectorPointer(lhs, i);
TRI_PushBackVectorPointer(result, fieldAccess);
}
// can now free lhs vector
TRI_Free(TRI_UNKNOWN_MEM_ZONE, lhs);
// copy elements from rhs into result vector
n = rhs->_length;
for (i = 0; i < n; ++i) {
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) TRI_AtVectorPointer(rhs, i);
size_t j, len;
bool found = false;
// check if element is in result vector already
len = result->_length;
for (j = 0; j < len; ++j) {
TRI_aql_field_access_t* compareAccess = (TRI_aql_field_access_t*) TRI_AtVectorPointer(result, j);
if (TRI_EqualString(fieldAccess->_fieldName, compareAccess->_fieldName)) {
// found the element
if (logicalType == TRI_AQL_LOGICAL_AND) {
result->_buffer[i] = MergeAttributeAccessAnd(context, fieldAccess, compareAccess);
}
else {
result->_buffer[i] = MergeAttributeAccessOr(context, fieldAccess, compareAccess);
}
found = true;
break;
}
}
if (!found) {
TRI_PushBackVectorPointer(result, fieldAccess);
}
}
// can now free rhs vector
TRI_Free(TRI_UNKNOWN_MEM_ZONE, rhs);
return result;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief create an access structure for the given node and operator
////////////////////////////////////////////////////////////////////////////////
@ -1175,6 +1309,10 @@ static TRI_aql_field_access_t* CreateAccessForNode (TRI_aql_context_t* const con
fieldAccess->_type = TRI_AQL_ACCESS_EXACT;
fieldAccess->_value._value = value;
}
else if (operator == AQL_NODE_OPERATOR_BINARY_NE) {
// create an all items access
fieldAccess->_type = TRI_AQL_ACCESS_ALL;
}
else if (operator == AQL_NODE_OPERATOR_BINARY_LT) {
// create a single range access
fieldAccess->_type = TRI_AQL_ACCESS_RANGE_SINGLE;
@ -1223,54 +1361,32 @@ static TRI_aql_field_access_t* CreateAccessForNode (TRI_aql_context_t* const con
////////////////////////////////////////////////////////////////////////////////
/// @brief create an access structure for the given node and operator,
/// merge it with potential others already found for the same variable
///
/// TODO fix docs
////////////////////////////////////////////////////////////////////////////////
static void NoteAttributeAccess (TRI_aql_context_t* const context,
const TRI_aql_logical_e logicalType,
const TRI_aql_attribute_name_t* const field,
const TRI_aql_node_type_e operator,
const TRI_aql_node_t* const node) {
TRI_aql_field_access_t* previous;
static TRI_aql_field_access_t* GetAttributeAccess (TRI_aql_context_t* const context,
const TRI_aql_attribute_name_t* const field,
const TRI_aql_node_type_e operator,
const TRI_aql_node_t* const node) {
TRI_aql_field_access_t* fieldAccess;
assert(logicalType == TRI_AQL_LOGICAL_AND || logicalType == TRI_AQL_LOGICAL_OR);
assert(context);
assert(node);
if (!field || !field->_name._buffer) {
return;
// this is ok if the node type is not supported
return NULL;
}
fieldAccess = CreateAccessForNode(context, field, operator, node);
if (!fieldAccess) {
// OOM
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return;
return NULL;
}
// look up previous range first
previous = (TRI_aql_field_access_t*) TRI_LookupByKeyAssociativePointer(context->_ranges, fieldAccess->_fieldName);
if (previous) {
TRI_aql_field_access_t* merged;
// previous range exists, now merge new access type with previous one
// remove from hash first
TRI_RemoveKeyAssociativePointer(context->_ranges, fieldAccess->_fieldName);
// MergeAccess() will free previous and/or fieldAccess
merged = MergeAccess(context, logicalType, fieldAccess, previous);
if (!merged) {
// OOM
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return;
}
TRI_InsertKeyAssociativePointer(context->_ranges, merged->_fieldName, merged, true);
}
else {
// no previous access exists, no need to merge
TRI_InsertKeyAssociativePointer(context->_ranges, fieldAccess->_fieldName, fieldAccess, false);
}
return fieldAccess;
}
////////////////////////////////////////////////////////////////////////////////
@ -1316,67 +1432,85 @@ static TRI_aql_attribute_name_t* GetAttributeName (TRI_aql_context_t* const cont
}
////////////////////////////////////////////////////////////////////////////////
/// @}
/// @brief inspect a condition node
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- constructors / destructors
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup Ahuacatl
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief init the optimizer
////////////////////////////////////////////////////////////////////////////////
bool TRI_InitOptimizerAql (TRI_aql_context_t* const context) {
static TRI_vector_pointer_t* ProcessNode (TRI_aql_context_t* const context,
TRI_aql_node_t* node) {
assert(context);
assert(context->_ranges == NULL);
assert(node);
context->_ranges = (TRI_associative_pointer_t*) TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_associative_pointer_t), false);
if (!context->_ranges) {
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return false;
if (node->_type == AQL_NODE_OPERATOR_UNARY_NOT) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
assert(lhs);
return MakeAllVector(context, ProcessNode(context, lhs));
}
TRI_InitAssociativePointer(context->_ranges,
TRI_UNKNOWN_MEM_ZONE,
&TRI_HashStringKeyAssociativePointer,
&HashFieldAccess,
&EqualFieldAccess,
NULL);
if (node->_type == AQL_NODE_OPERATOR_BINARY_OR) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
return true;
}
assert(lhs);
assert(rhs);
////////////////////////////////////////////////////////////////////////////////
/// @brief shutdown the optimizer
////////////////////////////////////////////////////////////////////////////////
// recurse into next level
return MergeVectors(context,
TRI_AQL_LOGICAL_OR,
ProcessNode(context, lhs),
ProcessNode(context, rhs));
}
void TRI_FreeOptimizerAql (TRI_aql_context_t* const context) {
assert(context);
if (node->_type == AQL_NODE_OPERATOR_BINARY_AND) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
assert(lhs);
assert(rhs);
if (context->_ranges) {
size_t i, n;
// recurse into next level
return MergeVectors(context,
TRI_AQL_LOGICAL_AND,
ProcessNode(context, lhs),
ProcessNode(context, rhs));
}
// free all remaining access elements
n = context->_ranges->_nrAlloc;
for (i = 0; i < n; ++i) {
TRI_aql_field_access_t* fieldAccess = (TRI_aql_field_access_t*) context->_ranges->_table[i];
if (!fieldAccess) {
continue;
if (node->_type == AQL_NODE_OPERATOR_BINARY_EQ ||
node->_type == AQL_NODE_OPERATOR_BINARY_NE ||
node->_type == AQL_NODE_OPERATOR_BINARY_LT ||
node->_type == AQL_NODE_OPERATOR_BINARY_LE ||
node->_type == AQL_NODE_OPERATOR_BINARY_GT ||
node->_type == AQL_NODE_OPERATOR_BINARY_GE ||
node->_type == AQL_NODE_OPERATOR_BINARY_IN) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
if (lhs->_type == AQL_NODE_ATTRIBUTE_ACCESS) {
TRI_aql_attribute_name_t* field = GetAttributeName(context, lhs);
if (field) {
TRI_aql_field_access_t* attributeAccess = GetAttributeAccess(context, field, node->_type, rhs);
TRI_DestroyStringBuffer(&field->_name);
TRI_Free(TRI_UNKNOWN_MEM_ZONE, field);
return Vectorize(context, attributeAccess);
}
FreeAccess(context, fieldAccess);
}
else if (rhs->_type == AQL_NODE_ATTRIBUTE_ACCESS) {
TRI_aql_attribute_name_t* field = GetAttributeName(context, rhs);
// free hash array
TRI_FreeAssociativePointer(TRI_UNKNOWN_MEM_ZONE, context->_ranges);
context->_ranges = NULL;
if (field) {
TRI_aql_field_access_t* attributeAccess = GetAttributeAccess(context, field, node->_type, lhs);
TRI_DestroyStringBuffer(&field->_name);
TRI_Free(TRI_UNKNOWN_MEM_ZONE, field);
return Vectorize(context, attributeAccess);
}
}
}
return NULL;
}
////////////////////////////////////////////////////////////////////////////////
@ -1396,17 +1530,14 @@ void TRI_FreeOptimizerAql (TRI_aql_context_t* const context) {
/// @brief dump ranges found for debugging purposes
////////////////////////////////////////////////////////////////////////////////
void TRI_DumpRangesAql (TRI_aql_context_t* const context) {
size_t i;
void TRI_DumpRangesAql (const TRI_vector_pointer_t* const ranges) {
size_t i, n;
assert(context);
assert(ranges);
for (i = 0; i < context->_ranges->_nrAlloc; ++i) {
TRI_aql_field_access_t* fieldAccess = context->_ranges->_table[i];
if (!fieldAccess) {
continue;
}
n = ranges->_length;
for (i = 0; i < n; ++i) {
TRI_aql_field_access_t* fieldAccess = TRI_AtVectorPointer(ranges, i);
printf("\nFIELD ACCESS\n- FIELD: %s\n",fieldAccess->_fieldName);
printf("- TYPE: %s\n", AccessName(fieldAccess->_type));
@ -1438,67 +1569,17 @@ void TRI_DumpRangesAql (TRI_aql_context_t* const context) {
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief inspect a condition and note all accesses found for it
////////////////////////////////////////////////////////////////////////////////
void TRI_InspectConditionAql (TRI_aql_context_t* const context,
const TRI_aql_logical_e type,
TRI_aql_node_t* node) {
if (node->_type == AQL_NODE_OPERATOR_UNARY_NOT) {
return;
}
if (node->_type == AQL_NODE_OPERATOR_BINARY_OR) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
// recurse into next level
TRI_InspectConditionAql(context, TRI_AQL_LOGICAL_OR, lhs);
TRI_InspectConditionAql(context, TRI_AQL_LOGICAL_OR, rhs);
}
if (node->_type == AQL_NODE_OPERATOR_BINARY_AND) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
// recurse into next level
TRI_InspectConditionAql(context, TRI_AQL_LOGICAL_AND, lhs);
TRI_InspectConditionAql(context, TRI_AQL_LOGICAL_AND, rhs);
}
if (node->_type == AQL_NODE_OPERATOR_BINARY_EQ ||
// node->_type == AQL_NODE_OPERATOR_BINARY_NE ||
node->_type == AQL_NODE_OPERATOR_BINARY_LT ||
node->_type == AQL_NODE_OPERATOR_BINARY_LE ||
node->_type == AQL_NODE_OPERATOR_BINARY_GT ||
node->_type == AQL_NODE_OPERATOR_BINARY_GE ||
node->_type == AQL_NODE_OPERATOR_BINARY_IN) {
TRI_aql_node_t* lhs = TRI_AQL_NODE_MEMBER(node, 0);
TRI_aql_node_t* rhs = TRI_AQL_NODE_MEMBER(node, 1);
if (lhs->_type == AQL_NODE_ATTRIBUTE_ACCESS) {
TRI_aql_attribute_name_t* field = GetAttributeName(context, lhs);
if (field) {
NoteAttributeAccess(context, type, field, node->_type, rhs);
TRI_DestroyStringBuffer(&field->_name);
TRI_Free(TRI_UNKNOWN_MEM_ZONE, field);
}
}
else if (rhs->_type == AQL_NODE_ATTRIBUTE_ACCESS) {
TRI_aql_attribute_name_t* field = GetAttributeName(context, rhs);
if (field) {
NoteAttributeAccess(context, type, field, node->_type, lhs);
TRI_DestroyStringBuffer(&field->_name);
TRI_Free(TRI_UNKNOWN_MEM_ZONE, field);
}
}
}
TRI_vector_pointer_t* TRI_InspectConditionAql (TRI_aql_context_t* const context,
TRI_aql_node_t* node,
const TRI_vector_pointer_t* const parentRestrictions) {
return ProcessNode(context, node);
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////

View File

@ -134,31 +134,6 @@ TRI_aql_attribute_name_t;
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- constructors / destructors
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup Ahuacatl
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief init the optimizer
////////////////////////////////////////////////////////////////////////////////
bool TRI_InitOptimizerAql (TRI_aql_context_t* const);
////////////////////////////////////////////////////////////////////////////////
/// @brief shutdown the optimizer
////////////////////////////////////////////////////////////////////////////////
void TRI_FreeOptimizerAql (TRI_aql_context_t* const);
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- public functions
// -----------------------------------------------------------------------------
@ -172,15 +147,15 @@ void TRI_FreeOptimizerAql (TRI_aql_context_t* const);
/// @brief dump ranges found for debugging purposes
////////////////////////////////////////////////////////////////////////////////
void TRI_DumpRangesAql (TRI_aql_context_t* const);
void TRI_DumpRangesAql (const TRI_vector_pointer_t* const);
////////////////////////////////////////////////////////////////////////////////
/// @brief inspect a condition and note all accesses found for it
////////////////////////////////////////////////////////////////////////////////
void TRI_InspectConditionAql (TRI_aql_context_t* const,
const TRI_aql_logical_e,
TRI_aql_node_t*);
TRI_vector_pointer_t* TRI_InspectConditionAql (TRI_aql_context_t* const,
TRI_aql_node_t*,
const TRI_vector_pointer_t* const);
////////////////////////////////////////////////////////////////////////////////
/// @}

View File

@ -31,7 +31,7 @@
#include "V8/v8-execution.h"
#undef RANGE_OPTIMIZER
#undef RANGE_OPTIMIZER
// -----------------------------------------------------------------------------
// --SECTION-- private functions
@ -268,7 +268,12 @@ static TRI_aql_node_t* OptimiseFilter (TRI_aql_context_t* const context,
if (!TRI_IsConstantValueNodeAql(expression)) {
#ifdef RANGE_OPTIMIZER
TRI_InspectConditionAql(context, TRI_AQL_LOGICAL_AND, expression);
TRI_vector_pointer_t* ranges = TRI_InspectConditionAql(context, expression, NULL);
if (ranges) {
TRI_DumpRangesAql(ranges);
TRI_FreeVectorPointer(TRI_UNKNOWN_MEM_ZONE, ranges);
}
#endif
return node;
}
@ -663,13 +668,6 @@ TRI_aql_node_t* TRI_FoldConstantsAql (TRI_aql_context_t* const context,
TRI_aql_node_t* node) {
TRI_aql_modify_tree_walker_t* walker;
#ifdef RANGE_OPTIMIZER
if (!TRI_InitOptimizerAql(context)) {
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
return node;
}
#endif
walker = TRI_CreateModifyTreeWalkerAql((void*) context, &ModifyNode);
if (!walker) {
TRI_SetErrorContextAql(context, TRI_ERROR_OUT_OF_MEMORY, NULL);
@ -680,10 +678,6 @@ TRI_aql_node_t* TRI_FoldConstantsAql (TRI_aql_context_t* const context,
TRI_FreeModifyTreeWalkerAql(walker);
#ifdef RANGE_OPTIMIZER
TRI_DumpRangesAql(context);
#endif
return node;
}

View File

@ -78,7 +78,6 @@ TRI_aql_context_t* TRI_CreateContextAql (TRI_vocbase_t* vocbase,
}
context->_vocbase = vocbase;
context->_ranges = NULL;
context->_variableIndex = 0;
@ -164,9 +163,6 @@ void TRI_FreeContextAql (TRI_aql_context_t* const context) {
}
TRI_DestroyVectorPointer(&context->_scopes);
// free range optimizer
TRI_FreeOptimizerAql(context);
// free all strings registered
i = context->_strings._length;
while (i--) {

View File

@ -82,7 +82,6 @@ typedef struct TRI_aql_context_s {
TRI_associative_pointer_t _parameterValues;
TRI_associative_pointer_t _parameterNames;
TRI_associative_pointer_t _collectionNames;
TRI_associative_pointer_t* _ranges;
size_t _variableIndex;
void* _first;
char* _query;

View File

@ -0,0 +1,18 @@
avocado> db.four.ensureUniqueConstraint("a", "b.c");
{ "id" : "164405/1147445", "unique" : true, "type" : "hash", "fields" : ["a", "b.c"], "isNewlyCreated" : true }
avocado> db.four.save({ a : 1, b : { c : 1 } });
{ "_id" : "164405/1868341", "_rev" : 1868341 }
avocado> db.four.save({ a : 1, b : { c : 1 } });
JavaScript exception in file '(avocado)' at 1,9: [AvocadoError 1210: cannot save document]
!db.four.save({ a : 1, b : { c : 1 } });
! ^
stacktrace: [AvocadoError 1210: cannot save document]
at (avocado):1:9
avocado> db.four.save({ a : 1, b : { c : null } });
{ "_id" : "164405/2196021", "_rev" : 2196021 }
avocado> db.four.save({ a : 1 });
{ "_id" : "164405/2196023", "_rev" : 2196023 }

View File

@ -416,22 +416,26 @@ int main (int argc, char* argv[]) {
ParseProgramOptions(argc, argv);
// http://www.network-science.de/ascii/ Font: ogre
if (NoColors) {
printf(" _ _ \n");
printf(" __ ___ _____ ___(_)_ __| |__ \n");
printf(" / _` \\ \\ / / _ \\ / __| | '__| '_ \\ \n");
printf(" | (_| |\\ V / (_) | (__| | | | |_) | \n");
printf(" \\__,_| \\_/ \\___/ \\___|_|_| |_.__/ \n");
printf(" \n");
}
else {
printf("%s %s _ _ %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
printf("%s __ ___ _____ ___%s(_)_ __| |__ %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
printf("%s / _` \\ \\ / / _ \\ / __%s| | '__| '_ \\ %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
printf("%s | (_| |\\ V / (_) | (__%s| | | | |_) | %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
printf("%s \\__,_| \\_/ \\___/ \\___%s|_|_| |_.__/ %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
printf("%s %s %s\n", DEF_GREEN, DEF_RED, DEF_RESET);
{
char const* g = DEF_GREEN;
char const* r = DEF_RED;
char const* z = DEF_RESET;
if (NoColors) {
g = "";
r = "";
z = "";
}
printf("%s %s _ _ %s\n", g, r, z);
printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s(_)_ __| |__ %s\n", g, r, z);
printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s| | '__| '_ \\ %s\n", g, r, z);
printf("%s| (_| | | | (_| | | | | (_| | (_) %s| | | | |_) |%s\n", g, r, z);
printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|_|_| |_.__/ %s\n", g, r, z);
printf("%s |___/ %s %s\n", g, r, z);
}
printf("\n");
printf("Welcome to avocirb %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION);
#ifdef TRI_V8_VERSION

View File

@ -289,7 +289,6 @@ JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \
js/client/js-client.h \
js/server/js-server.h \
js/server/js-ahuacatl.h

View File

@ -921,7 +921,6 @@ JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \
js/client/js-client.h \
js/server/js-server.h \
js/server/js-ahuacatl.h

View File

@ -184,9 +184,12 @@ HttpHandler::status_e RestDocumentHandler::execute () {
/// @REST{POST /document?collection=@FA{collection-name}&createCollection=@FA{create}}
///
/// Instead of a @FA{collection-identifier}, a @FA{collection-name} can be
/// used. If @FA{createCollection} is true, then the collection is created if it does not
/// exists.
/// used. If @FA{createCollection} is true, then the collection is created if it
/// does not exists.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// Create a document given a collection identifier @LIT{161039} for the collection
@ -367,6 +370,9 @@ bool RestDocumentHandler::readDocument () {
/// given etag. Otherwise a @LIT{HTTP 412} is returned. As an alternative
/// you can supply the etag in an attribute @LIT{rev} in the URL.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// Use a document handle:
@ -474,6 +480,9 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) {
///
/// Instead of a @FA{collection-identifier}, a collection name can be given.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// @verbinclude rest_read-document-all
@ -577,6 +586,9 @@ bool RestDocumentHandler::readAllDocuments () {
/// can use this call to get the current revision of a document or check if
/// the document was deleted.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// @verbinclude rest_read-document-head
@ -634,6 +646,9 @@ bool RestDocumentHandler::checkDocument () {
/// header. You must never supply both the "ETag" header and the @LIT{rev}
/// parameter.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// Using document handle:
@ -785,6 +800,9 @@ bool RestDocumentHandler::updateDocument () {
/// "If-Match" header. You must never supply both the "If-Match" header and the
/// @LIT{rev} parameter.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// Using document handle:

View File

@ -84,6 +84,9 @@ RestEdgeHandler::RestEdgeHandler (HttpRequest* request, TRI_vocbase_t* vocbase)
/// If you request such an edge, the returned document will also contain the
/// attributes @LIT{_from} and @LIT{_to}.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES
///
/// Create an edge:

View File

@ -642,9 +642,13 @@ int AvocadoServer::startupServer () {
_applicationAdminServer->addBasicHandlers(factory);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator<RestImportHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator<RestImportHandler>::createData<TRI_vocbase_t*>, _vocbase);
if (shareAdminPort) {
_applicationAdminServer->addHandlers(factory, "/_admin");
_applicationUserManager->addHandlers(factory, "/_admin");
@ -681,6 +685,10 @@ int AvocadoServer::startupServer () {
adminFactory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/",
RestHandlerCreator<RestActionHandler>::createData< pair< TRI_vocbase_t*, set<string>* >* >,
(void*) &handlerDataAdmin);

View File

@ -29,7 +29,16 @@
/// @page IndexHashTOC
///
/// <ol>
/// <li>MISSING</li>
/// <li>@ref IndexHashHttp
/// <ol>
/// <li>@ref IndexHashHttpEnsureUniqueConstraint "POST /_api/index"</li>
/// </ol>
/// </li>
/// <li>@ref IndexHashShell
/// <ol>
/// <li>@ref IndexHashShellEnsureCapConstraint "collection.ensureUniqueConstraint"</li>
/// </ol>
/// </li>
/// </ol>
////////////////////////////////////////////////////////////////////////////////
@ -41,6 +50,23 @@
/// <hr>
/// @copydoc IndexHashTOC
/// <hr>
///
/// It is possible to define a hash index on one or more attributes (or paths)
/// of a documents. This hash is then used in queries to locate documents in
/// O(1) operations. If the hash is unique, then no two documents are allowed
/// to have the same set of attribute values.
///
/// @section IndexHashHttp Accessing Hash Indexes via Http
//////////////////////////////////////////////////////////
///
/// @anchor IndexHashHttpEnsureUniqueConstraint
/// @copydetails JSF_POST_api_index_hash
///
/// @section IndexHashShell Accessing hash Indexes from the Shell
/////////////////////////////////////////////////////////////////
///
/// @anchor IndexHashShellEnsureCapConstraint
/// @copydetails JS_EnsureUniqueConstraintVocbaseCol
////////////////////////////////////////////////////////////////////////////////
// Local Variables:

View File

@ -5,13 +5,58 @@ require './avocadodb.rb'
describe AvocadoDB do
api = "/_api/index"
prefix = "api-index-unique-constraint"
prefix = "api-index-hash"
################################################################################
## unique constraints during create
################################################################################
context "creating:" do
context "creating index:" do
context "dealing with unique constraints violation:" do
before do
@cn = "UnitTestsCollectionIndexes"
AvocadoDB.drop_collection(@cn)
@cid = AvocadoDB.create_collection(@cn)
end
after do
AvocadoDB.drop_collection(@cn)
end
it "does not create the index in case of violation" do
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201)
# create another document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201)
# try to create the index
cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}-fail", cmd, :body => body)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(1203)
end
end
end
################################################################################
## unique constraints during create
################################################################################
context "creating documents:" do
context "dealing with unique constraints:" do
before do
@cn = "UnitTestsCollectionIndexes"
@ -24,9 +69,9 @@ describe AvocadoDB do
end
it "rolls back in case of violation" do
cmd = "/_api/index/#{@cid}"
cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body)
doc = AvocadoDB.log_post("#{prefix}-create1", cmd, :body => body)
doc.code.should eq(201)
doc.parsed_response['type'].should eq("hash")
@ -35,7 +80,7 @@ describe AvocadoDB do
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201)
@ -57,7 +102,7 @@ describe AvocadoDB do
# create a unique constraint violation
body = "{ \"a\" : 1, \"b\" : 2 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc = AvocadoDB.log_post("#{prefix}-create3", cmd1, :body => body)
doc.code.should eq(409)
@ -72,7 +117,7 @@ describe AvocadoDB do
# third try (make sure the rollback has not destroyed anything)
body = "{ \"a\" : 1, \"b\" : 3 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc = AvocadoDB.log_post("#{prefix}-create4", cmd1, :body => body)
doc.code.should eq(409)
@ -116,7 +161,7 @@ describe AvocadoDB do
## unique constraints during update
################################################################################
context "updating:" do
context "updating documents:" do
context "dealing with unique constraints:" do
before do
@cn = "UnitTestsCollectionIndexes"
@ -129,9 +174,9 @@ describe AvocadoDB do
end
it "rolls back in case of violation" do
cmd = "/_api/index/#{@cid}"
cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body)
doc = AvocadoDB.log_post("#{prefix}-update1", cmd, :body => body)
doc.code.should eq(201)
doc.parsed_response['type'].should eq("hash")
@ -140,7 +185,7 @@ describe AvocadoDB do
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc = AvocadoDB.log_post("#{prefix}-update2", cmd1, :body => body)
doc.code.should eq(201)
@ -162,7 +207,7 @@ describe AvocadoDB do
# create a second document
body = "{ \"a\" : 2, \"b\" : 2 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body)
doc = AvocadoDB.log_post("#{prefix}-update3", cmd1, :body => body)
doc.code.should eq(201)

View File

@ -9,5 +9,6 @@ rspec --format d \
rest-delete-document-spec.rb \
rest-edge-spec.rb \
api-index-spec.rb \
api-index-hash-spec.rb \
api-cursor-spec.rb \
api-simple-spec.rb

View File

@ -4504,17 +4504,24 @@ static v8::Handle<v8::Value> JS_EnsureGeoConstraintVocbaseCol (v8::Arguments con
///
/// @FUN{ensureUniqueConstraint(@FA{field1}, @FA{field2}, ...,@FA{fieldn})}
///
/// Creates a hash index on all documents using attributes as paths to the
/// fields. At least one attribute must be given. The value of this attribute
/// must be a list. All documents, which do not have the attribute path or where
/// one or more values that are not suitable, are ignored.
/// Creates a unique hash index on all documents using @FA{field1}, @FA{field2},
/// ... as attribute paths. At least one attribute path must be given.
///
/// In case that the index was successfully created, the index identifier
/// is returned.
/// When a unique constraint is in effect for a collection, then all documents
/// which contain the given attributes must differ in the attribute
/// values. Creating a new document or updating a document will fail, if the
/// uniqueness is violated. If any attribute value is null for a document, this
/// document is ignored by the index.
///
/// In case that the index was successfully created, the index identifier is
/// returned.
///
/// Note that non-existing attribute paths in a document are treat as if the
/// value were @LIT{null}.
///
/// @EXAMPLES
///
/// @verbinclude admin5
/// @verbinclude shell-index-create-unique-constraint
////////////////////////////////////////////////////////////////////////////////
static v8::Handle<v8::Value> JS_EnsureUniqueConstraintVocbaseCol (v8::Arguments const& argv) {

View File

@ -1227,20 +1227,26 @@ int main (int argc, char* argv[]) {
// http://www.network-science.de/ascii/ Font: ogre
if (noColors) {
printf(" " " _ \n");
printf(" __ ___ _____ ___ " "___| |__ \n");
printf(" / _` \\ \\ / / _ \\ / __" "/ __| '_ \\ \n");
printf(" | (_| |\\ V / (_) | (__" "\\__ \\ | | | \n");
printf(" \\__,_| \\_/ \\___/ \\___" "|___/_| |_| \n\n");
}
else {
printf( " " "\x1b[31m _ \x1b[0m\n");
printf("\x1b[32m __ ___ _____ ___ " "\x1b[31m___| |__ \x1b[0m\n");
printf("\x1b[32m / _` \\ \\ / / _ \\ / __" "\x1b[31m/ __| '_ \\ \x1b[0m\n");
printf("\x1b[32m | (_| |\\ V / (_) | (__" "\x1b[31m\\__ \\ | | | \x1b[0m\n");
printf("\x1b[32m \\__,_| \\_/ \\___/ \\___" "\x1b[31m|___/_| |_| \x1b[0m\n\n");
{
char const* g = DEF_GREEN;
char const* r = DEF_RED;
char const* z = DEF_RESET;
if (noColors) {
g = "";
r = "";
z = "";
}
printf("%s %s _ %s\n", g, r, z);
printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s ___| |__ %s\n", g, r, z);
printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s/ __| '_ \\ %s\n", g, r, z);
printf("%s| (_| | | | (_| | | | | (_| | (_) %s\\__ \\ | | |%s\n", g, r, z);
printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|___/_| |_|%s\n", g, r, z);
printf("%s |___/ %s %s\n", g, r, z);
}
printf("\n");
printf("Welcome to avocsh %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION);
#ifdef TRI_V8_VERSION

View File

@ -1211,214 +1211,88 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t* idx,
/// @brief helper for hashing
////////////////////////////////////////////////////////////////////////////////
static int HashIndexHelper (const TRI_hash_index_t* hashIndex,
static int HashIndexHelper (TRI_hash_index_t const* hashIndex,
HashIndexElement* hashElement,
const TRI_doc_mptr_t* document,
const TRI_shaped_json_t* shapedDoc) {
TRI_doc_mptr_t const* document,
TRI_shaped_json_t const* shapedDoc) {
union { void* p; void const* c; } cnv;
TRI_shaped_json_t shapedObject;
TRI_shape_access_t* acc;
TRI_shaped_json_t shapedObject;
TRI_shaper_t* shaper;
int res;
size_t j;
shaper = hashIndex->base._collection->_shaper;
// .............................................................................
// Attempting to locate a hash entry using TRI_shaped_json_t object. Use this
// when we wish to remove a hash entry and we only have the "keys" rather than
// having the document (from which the keys would follow).
// .............................................................................
if (shapedDoc != NULL) {
// ..........................................................................
// Attempting to locate a hash entry using TRI_shaped_json_t object. Use this
// when we wish to remove a hash entry and we only have the "keys" rather than
// having the document (from which the keys would follow).
// ..........................................................................
hashElement->data = NULL;
}
for (j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j)));
// .............................................................................
// Assign the document to the HashIndexElement structure - so that it can
// later be retreived.
// .............................................................................
else if (document != NULL) {
cnv.c = document;
hashElement->data = cnv.p;
shapedDoc = &document->_document;
}
else {
return TRI_ERROR_INTERNAL;
}
// .............................................................................
// Extract the attribute values
// .............................................................................
res = TRI_ERROR_NO_ERROR;
for (j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths, j)));
// ..........................................................................
// Determine if document has that particular shape
// ..........................................................................
// determine if document has that particular shape
acc = TRI_ShapeAccessor(shaper, shapedDoc->_sid, shape);
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, shapedDoc->_sid, shape);
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
}
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
}
shapedObject._sid = shaper->_sidNull;
shapedObject._data.length = 0;
shapedObject._data.data = NULL;
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_WARNING_AVOCADO_INDEX_HASH_UPDATE_ATTRIBUTE_MISSING;
}
res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
}
else {
// ..........................................................................
// Extract the field
// ..........................................................................
// extract the field
if (! TRI_ExecuteShapeAccessor(acc, shapedDoc, &shapedObject)) {
TRI_FreeShapeAccessor(acc);
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_ERROR_INTERNAL;
}
// ..........................................................................
// Store the json shaped Object -- this is what will be hashed
// ..........................................................................
hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
}
else if (document != NULL) {
// ..........................................................................
// Assign the document to the HashIndexElement structure - so that it can later
// be retreived.
// ..........................................................................
cnv.c = document;
hashElement->data = cnv.p;
for (j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j)));
// ..........................................................................
// Determine if document has that particular shape
// It is not an error if the document DOES NOT have the particular shape
// ..........................................................................
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, document->_document._sid, shape);
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
}
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
}
// ..........................................................................
// Extract the field
// ..........................................................................
if (! TRI_ExecuteShapeAccessor(acc, &(document->_document), &shapedObject)) {
TRI_FreeShapeAccessor(acc);
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_ERROR_INTERNAL;
if (shapedObject._sid == shaper->_sidNull) {
res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
}
// ..........................................................................
// Store the field
// ..........................................................................
hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
}
else {
return TRI_ERROR_INTERNAL;
}
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hash indexes a document
////////////////////////////////////////////////////////////////////////////////
static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// ............................................................................
// Obtain the hash index structure
// ............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL);
}
// ............................................................................
// Allocate storage to shaped json objects stored as a simple list.
// These will be used for hashing.
// ............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
}
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// ............................................................................
// It is possible that this document does not have the necessary attributes
// (keys) to participate in this index.
// ............................................................................
// ............................................................................
// If an error occurred in the called procedure HashIndexHelper, we must
// now exit -- and deallocate memory assigned to hashElement.
// ............................................................................
if (res != TRI_ERROR_NO_ERROR) { // some sort of error occurred
// ..........................................................................
// Deallocated the memory already allocated to hashElement.fields
// ..........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// ..........................................................................
// It may happen that the document does not have the necessary attributes to
// be included within the hash index, in this case do not report back an error.
// ..........................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR;
}
return res;
}
// ............................................................................
// Fill the json field list from the document for unique hash index
// ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Fill the json field list from the document for non-unique hash index
// ............................................................................
else {
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Memory which has been allocated to hashElement.fields remains allocated
// contents of which are stored in the hash array.
// ............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// store the json shaped Object -- this is what will be hashed
hashElement->fields[j] = shapedObject;
}
return res;
}
@ -1510,71 +1384,52 @@ static void RemoveIndexHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
}
////////////////////////////////////////////////////////////////////////////////
/// @brief removes a document from a hash index
/// @brief hash indexes a document
////////////////////////////////////////////////////////////////////////////////
static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// ............................................................................
// .............................................................................
// Obtain the hash index structure
// ............................................................................
// .............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in RemoveHashIndex");
LOG_WARNING("internal error in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL);
}
// ............................................................................
// Allocate some memory for the HashIndexElement structure
// ............................................................................
}
// .............................................................................
// Allocate storage to shaped json objects stored as a simple list.
// These will be used for hashing.
// .............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate( TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
}
// ..........................................................................
// Fill the json field list from the document
// ..........................................................................
hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// ..........................................................................
// It may happen that the document does not have attributes which match
// For now return internal error, there needs to be its own error number
// and the appropriate action needs to be taken by the calling function in
// such cases.
// ..........................................................................
// .............................................................................
// It is possible that this document does not have the necessary attributes
// (keys) to participate in this index.
//
// If an error occurred in the called procedure HashIndexHelper, we must
// now exit -- and deallocate memory assigned to hashElement.
// .............................................................................
if (res != TRI_ERROR_NO_ERROR) {
// ........................................................................
// Deallocate memory allocated to hashElement.fields above
// ........................................................................
// Deallocated the memory already allocated to hashElement.fields
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// ........................................................................
// It may happen that the document does not have the necessary attributes
// to have particpated within the hash index. In this case, we do not
// report an error to the calling procedure.
// ........................................................................
// ........................................................................
// -1 from the called procedure HashIndexHelper implies that we do not
// propagate the error to the parent function. However for removal
// we advice the parent function. TODO: return a proper error code.
// ........................................................................
// .............................................................................
// It may happen that the document does not have the necessary attributes to
// be included within the hash index, in this case do not report back an error.
// .............................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR;
@ -1583,25 +1438,104 @@ static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
return res;
}
// ............................................................................
// Attempt the removal for unique hash indexes
// ............................................................................
// .............................................................................
// Fill the json field list from the document for unique or non-unique index
// .............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
else {
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// .............................................................................
// Memory which has been allocated to hashElement.fields remains allocated
// contents of which are stored in the hash array.
// .............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
return res;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief removes a document from a hash index
////////////////////////////////////////////////////////////////////////////////
static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// .............................................................................
// Obtain the hash index structure
// .............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in RemoveHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL);
}
// .............................................................................
// Allocate some memory for the HashIndexElement structure
// .............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
// .............................................................................
// Fill the json field list from the document
// .............................................................................
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// .............................................................................
// It may happen that the document does not have attributes which match
// For now return internal error, there needs to be its own error number
// and the appropriate action needs to be taken by the calling function in
// such cases.
// .............................................................................
if (res != TRI_ERROR_NO_ERROR) {
// Deallocate memory allocated to hashElement.fields above
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// .............................................................................
// It may happen that the document does not have the necessary attributes to
// have particpated within the hash index. In this case, we do not report an
// error to the calling procedure.
//
// TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING from the called
// procedure HashIndexHelper implies that we do not propagate the error to
// the parent function. However for removal we advice the parent
// function. TODO: return a proper error code.
// .............................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR;
}
return res;
}
// .............................................................................
// Attempt the removal for unique or non-unique hash indexes
// .............................................................................
if (hashIndex->base._unique) {
res = HashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Attempt the removal for non-unique hash indexes
// ............................................................................
else {
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// .............................................................................
// Deallocate memory allocated to hashElement.fields above
// ............................................................................
// .............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
@ -1616,20 +1550,20 @@ static int UpdateHashIndex (TRI_index_t* idx,
const TRI_doc_mptr_t* newDoc,
const TRI_shaped_json_t* oldDoc) {
// ..........................................................................
// Note: The oldDoc is represented by the TRI_shaped_json_t rather than by
// a TRI_doc_mptr_t object. However for non-unique indexes we must
// pass the document shape to the hash remove function.
// ..........................................................................
// .............................................................................
// Note: The oldDoc is represented by the TRI_shaped_json_t rather than by a
// TRI_doc_mptr_t object. However for non-unique indexes we must pass the
// document shape to the hash remove function.
// .............................................................................
union { void* p; void const* c; } cnv;
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// ............................................................................
// .............................................................................
// Obtain the hash index structure
// ............................................................................
// .............................................................................
hashIndex = (TRI_hash_index_t*) idx;
@ -1638,179 +1572,98 @@ static int UpdateHashIndex (TRI_index_t* idx,
return TRI_ERROR_INTERNAL;
}
// ............................................................................
// .............................................................................
// Allocate some memory for the HashIndexElement structure
// ............................................................................
// .............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in UpdateHashIndex");
return TRI_ERROR_OUT_OF_MEMORY;
}
hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
// ............................................................................
// .............................................................................
// Update for unique hash index
// ............................................................................
// ............................................................................
//
// Fill in the fields with the values from oldDoc
// ............................................................................
// .............................................................................
if (hashIndex->base._unique) {
assert(oldDoc != NULL);
assert(oldDoc != NULL);
res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc);
res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc);
if (res == TRI_ERROR_NO_ERROR) {
if (res == TRI_ERROR_NO_ERROR) {
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
cnv.c = newDoc; // we are assuming here that the doc ptr does not change
hashElement.data = cnv.p;
cnv.c = newDoc; // we are assuming here that the doc ptr does not change
hashElement.data = cnv.p;
// ............................................................................
// Remove the hash index entry and return.
// ............................................................................
// ............................................................................
// Remove the old hash index entry
// ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_remove(hashIndex->_hashIndex, &hashElement);
if (res != TRI_ERROR_NO_ERROR) {
// ..........................................................................
// This error is common, when a document 'update' occurs, but fails
// due to the fact that a duplicate entry already exists, when the 'rollback'
// is applied, there is no document to remove -- so we get this error.
// ..........................................................................
LOG_WARNING("could not remove existing document from hash index in UpdateHashIndex");
}
}
// ..............................................................................
// Here we are assuming that the existing document could not be removed, because
// the doc did not have the correct attributes. TODO: do not make this assumption.
// ..............................................................................
}
else {
LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex");
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Fill the json simple list from the document
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL);
// ............................................................................
// Deal with any errors reported back.
// ............................................................................
// ..........................................................................
// This error is common, when a document 'update' occurs, but fails
// due to the fact that a duplicate entry already exists, when the 'rollback'
// is applied, there is no document to remove -- so we get this error.
// ..........................................................................
if (res != TRI_ERROR_NO_ERROR) {
LOG_DEBUG("could not remove existing document from hash index in UpdateHashIndex");
}
}
else if (res != TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex");
}
// ............................................................................
// Fill the json simple list from the document
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL);
// ............................................................................
// Deal with any errors reported back.
// ............................................................................
if (res != TRI_ERROR_NO_ERROR) {
// ..........................................................................
// Deallocated memory given to hashElement.fields
// ..........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// probably fields do not match.
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
// ........................................................................
// probably fields do not match.
// ........................................................................
return TRI_ERROR_NO_ERROR;
}
return res;
}
}
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Update for non-unique hash index
// ............................................................................
else {
// ............................................................................
// Fill in the fields with the values from oldDoc
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc);
if (res == TRI_ERROR_NO_ERROR) {
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
cnv.c = newDoc;
hashElement.data = cnv.p;
// ............................................................................
// Remove the hash index entry and return.
// ............................................................................
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
if (res != TRI_ERROR_NO_ERROR) {
LOG_WARNING("could not remove old document from (non-unique) hash index in UpdateHashIndex");
}
}
else {
LOG_WARNING("existing document was not removed from (non-unique) hash index in UpdateHashIndex");
}
// ............................................................................
// Fill the shaped json simple list from the document
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL);
if (res != TRI_ERROR_NO_ERROR) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
// ........................................................................
// probably fields do not match -- report internal error for now
// ........................................................................
return TRI_ERROR_NO_ERROR;
}
return res;
}
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Deallocate memory given to hashElement.fields
// ............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
TRI_Free(TRI_CORE_MEM_ZONE, hashElement.fields);
return res;
}
@ -1941,7 +1794,7 @@ void TRI_FreeHashIndex (TRI_index_t* idx) {
/// HashIndexElements* results
////////////////////////////////////////////////////////////////////////////////
HashIndexElements* TRI_LookupHashIndex(TRI_index_t* idx, TRI_json_t* parameterList) {
HashIndexElements* TRI_LookupHashIndex (TRI_index_t* idx, TRI_json_t* parameterList) {
TRI_hash_index_t* hashIndex;
HashIndexElements* result;
HashIndexElement element;

View File

@ -337,6 +337,39 @@ function POST_api_index_geo (req, res, collection, body) {
////////////////////////////////////////////////////////////////////////////////
/// @brief creates a hash index
///
/// @REST{POST /_api/index?collection=@FA{collection-identifier}}
///
/// Creates a hash index for the collection @FA{collection-identifier}, if it
/// does not already exist. The call expects an object containing the index
/// details.
///
/// - @LIT{type}: must be equal to @LIT{"hash"}.
///
/// - @LIT{fields}: A list of attribute paths.
///
/// - @LIT{unique}: If @LIT{true}, then create a unique index.
///
/// If the index does not already exists and could be created, then a @LIT{HTTP
/// 201} is returned. If the index already exists, then a @LIT{HTTP 200} is
/// returned.
///
/// If the @FA{collection-identifier} is unknown, then a @LIT{HTTP 404} is
/// returned. It is possible to specify a name instead of an identifier.
///
/// If the collection already contains documents and you try to create a unique
/// hash index in such a way that there are documents violating the uniqueness,
/// then a @LIT{HTTP 400} is returned.
///
/// @EXAMPLES
///
/// Creating an unique constraint:
///
/// @verbinclude api-index-create-new-unique-constraint
///
/// Creating a hash index:
///
/// @verbinclude api-index-create-new-hash-index
////////////////////////////////////////////////////////////////////////////////
function POST_api_index_hash (req, res, collection, body) {

View File

@ -37,10 +37,10 @@ var actions = require("actions");
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_system_status
/// @fn JSF_GET_admin_status
/// @brief returns system status information for the server
///
/// @REST{GET /_system/status}
/// @REST{GET /_admin/status}
///
/// The call returns an object with the following attributes:
///
@ -70,7 +70,7 @@ var actions = require("actions");
////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({
url : "_system/status",
url : "_admin/status",
context : "admin",
callback : function (req, res) {
@ -86,6 +86,135 @@ actions.defineHttp({
}
});
////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_admin_config_description
/// @brief returns configuration description
///
/// @REST{GET /_admin/config/desciption}
///
/// The call returns an object describing the configuration.
////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({
url : "_admin/config/description",
context : "admin",
callback : function (req, res) {
try {
result = {
database : {
name : "Database",
type : "section",
path : {
name : "Path",
type : "string",
readonly : true
},
access : {
name : "Combined Access",
type : "string",
readonly : true
}
},
logging : {
name : "Logging",
type : "section",
level : {
name : "Log Level",
type : "pull-down",
values : [ "fatal", "error", "warning", "info", "debug", "trace" ]
},
syslog : {
name : "Use Syslog",
type : "boolean"
},
bufferSize : {
name : "Log Buffer Size",
type : "integer"
},
output : {
name : "Output",
type : "section",
file : {
name : "Log File",
type : "string",
readonly : true
}
}
}
};
actions.resultOk(req, res, 200, result);
}
catch (err) {
actions.resultError(req, res, err);
}
}
});
////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_admin_config_configuration
/// @brief returns configuration description
///
/// @REST{GET /_admin/config/configuration}
///
/// The call returns an object containing configuration.
////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({
url : "_admin/config/configuration",
context : "admin",
callback : function (req, res) {
try {
result = {
database : {
path : {
value : "/tmp/emil/vocbase"
},
access : {
value : "localhost:8529"
}
},
logging : {
level : {
value : "info"
},
syslog : {
value : true
},
bufferSize : {
value : 100
},
output : {
file : {
value : "/var/log/message/arango.log"
}
}
}
};
actions.resultOk(req, res, 200, result);
}
catch (err) {
actions.resultError(req, res, err);
}
}
});
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////