mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'feature/rocksdb-geoindex' of https://github.com/arangodb/arangodb into devel
# Conflicts: # arangod/RocksDBEngine/RocksDBKeyBounds.cpp
This commit is contained in:
commit
c320498540
|
@ -0,0 +1,24 @@
|
|||
1. Create new GeoIndex. - partial
|
||||
2. Attach to existing GeoIndex - parital
|
||||
3. Drop GeoIndex
|
||||
4. Disconnect from GeoIndex
|
||||
5. PotRead - partial
|
||||
6. PotWrite - partial
|
||||
7. SlotRead - partial
|
||||
8. SlotWrite - partial
|
||||
9. CreatePot (take the next number)
|
||||
10. CreateSlot (take the next number)
|
||||
11. Also needs start and stop transaction routine and to use them
|
||||
|
||||
Das sind die
|
||||
Punkte, die Richard eingeplant hatte zum Anschluss. Über die obere Ebene
|
||||
weiß er praktisch nichts.
|
||||
|
||||
Questions for Richard:
|
||||
|
||||
//- How to communicate non existent values in SlotRead, PotRead ?
|
||||
// - Return value != 0 ?
|
||||
|
||||
- The GeoIx struct contains some fields, `potct` and `slotct` which we would need to store and recover from RocksDB.
|
||||
|
||||
- Above the functions `CreatePot` and `CreateSlot` are mentioned, but they are not in the source code ?
|
File diff suppressed because it is too large
Load Diff
|
@ -198,6 +198,8 @@ void prepareTraversalsRule(Optimizer* opt, std::unique_ptr<ExecutionPlan> plan,
|
|||
|
||||
/// @brief moves simple subqueries one level higher
|
||||
void inlineSubqueriesRule(Optimizer*, std::unique_ptr<ExecutionPlan>, OptimizerRule const*);
|
||||
|
||||
void geoIndexRule(aql::Optimizer* opt, std::unique_ptr<aql::ExecutionPlan> plan, aql::OptimizerRule const* rule);
|
||||
|
||||
} // namespace aql
|
||||
} // namespace arangodb
|
||||
|
|
|
@ -221,6 +221,10 @@ void OptimizerRulesFeature::addRules() {
|
|||
// patch update statements
|
||||
registerRule("patch-update-statements", patchUpdateStatementsRule,
|
||||
OptimizerRule::patchUpdateStatementsRule_pass9, DoesNotCreateAdditionalPlans, CanBeDisabled);
|
||||
|
||||
// patch update statements
|
||||
OptimizerRulesFeature::registerRule("geo-index-optimizer", geoIndexRule,
|
||||
OptimizerRule::applyGeoIndexRule, false, true);
|
||||
|
||||
if (arangodb::ServerState::instance()->isCoordinator()) {
|
||||
// distribute operations in cluster
|
||||
|
|
|
@ -41,549 +41,12 @@ using namespace arangodb::aql;
|
|||
using EN = arangodb::aql::ExecutionNode;
|
||||
|
||||
void MMFilesOptimizerRules::registerResources() {
|
||||
// patch update statements
|
||||
OptimizerRulesFeature::registerRule("geo-index-optimizer", geoIndexRule,
|
||||
OptimizerRule::applyGeoIndexRule, false, true);
|
||||
|
||||
// remove SORT RAND() if appropriate
|
||||
OptimizerRulesFeature::registerRule("remove-sort-rand", removeSortRandRule,
|
||||
OptimizerRule::removeSortRandRule_pass5, false, true);
|
||||
}
|
||||
|
||||
struct MMFilesGeoIndexInfo {
|
||||
operator bool() const { return distanceNode && valid; }
|
||||
void invalidate() { valid = false; }
|
||||
MMFilesGeoIndexInfo()
|
||||
: collectionNode(nullptr)
|
||||
, executionNode(nullptr)
|
||||
, indexNode(nullptr)
|
||||
, setter(nullptr)
|
||||
, expressionParent(nullptr)
|
||||
, expressionNode(nullptr)
|
||||
, distanceNode(nullptr)
|
||||
, index(nullptr)
|
||||
, range(nullptr)
|
||||
, executionNodeType(EN::NORESULTS)
|
||||
, within(false)
|
||||
, lessgreaterequal(false)
|
||||
, valid(true)
|
||||
, constantPair{nullptr,nullptr}
|
||||
{}
|
||||
EnumerateCollectionNode* collectionNode; // node that will be replaced by (geo) IndexNode
|
||||
ExecutionNode* executionNode; // start node that is a sort or filter
|
||||
IndexNode* indexNode; // AstNode that is the parent of the Node
|
||||
CalculationNode* setter; // node that has contains the condition for filter or sort
|
||||
AstNode* expressionParent; // AstNode that is the parent of the Node
|
||||
AstNode* expressionNode; // AstNode that contains the sort/filter condition
|
||||
AstNode* distanceNode; // AstNode that contains the distance parameters
|
||||
std::shared_ptr<arangodb::Index> index; //pointer to geoindex
|
||||
AstNode const* range; // range for within
|
||||
ExecutionNode::NodeType executionNodeType; // type of execution node sort or filter
|
||||
bool within; // is this a within lookup
|
||||
bool lessgreaterequal; // is this a check for le/ge (true) or lt/gt (false)
|
||||
bool valid; // contains this node a valid condition
|
||||
std::vector<std::string> longitude; // access path to longitude
|
||||
std::vector<std::string> latitude; // access path to latitude
|
||||
std::pair<AstNode*,AstNode*> constantPair;
|
||||
};
|
||||
|
||||
//candidate checking
|
||||
|
||||
AstNode* isValueOrRefNode(AstNode* node){
|
||||
//TODO - implement me
|
||||
return node;
|
||||
}
|
||||
|
||||
MMFilesGeoIndexInfo isDistanceFunction(AstNode* distanceNode, AstNode* expressionParent){
|
||||
// the expression must exist and it must be a function call
|
||||
auto rv = MMFilesGeoIndexInfo{};
|
||||
if(distanceNode->type != NODE_TYPE_FCALL) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
//get the ast node of the expression
|
||||
auto func = static_cast<Function const*>(distanceNode->getData());
|
||||
|
||||
// we're looking for "DISTANCE()", which is a function call
|
||||
// with an empty parameters array
|
||||
if ( func->externalName != "DISTANCE" || distanceNode->numMembers() != 1 ) {
|
||||
return rv;
|
||||
}
|
||||
rv.distanceNode = distanceNode;
|
||||
rv.expressionNode = distanceNode;
|
||||
rv.expressionParent = expressionParent;
|
||||
return rv;
|
||||
}
|
||||
|
||||
MMFilesGeoIndexInfo isGeoFilterExpression(AstNode* node, AstNode* expressionParent){
|
||||
// binary compare must be on top
|
||||
bool dist_first = true;
|
||||
bool lessEqual = true;
|
||||
auto rv = MMFilesGeoIndexInfo{};
|
||||
if( node->type != NODE_TYPE_OPERATOR_BINARY_GE
|
||||
&& node->type != NODE_TYPE_OPERATOR_BINARY_GT
|
||||
&& node->type != NODE_TYPE_OPERATOR_BINARY_LE
|
||||
&& node->type != NODE_TYPE_OPERATOR_BINARY_LT) {
|
||||
|
||||
return rv;
|
||||
}
|
||||
if (node->type == NODE_TYPE_OPERATOR_BINARY_GE || node->type == NODE_TYPE_OPERATOR_BINARY_GT) {
|
||||
dist_first = false;
|
||||
}
|
||||
if (node->type == NODE_TYPE_OPERATOR_BINARY_GT || node->type == NODE_TYPE_OPERATOR_BINARY_LT) {
|
||||
lessEqual = false;
|
||||
}
|
||||
|
||||
if (node->numMembers() != 2){
|
||||
return rv;
|
||||
}
|
||||
|
||||
AstNode* first = node->getMember(0);
|
||||
AstNode* second = node->getMember(1);
|
||||
|
||||
auto eval_stuff = [](bool dist_first, bool lessEqual, MMFilesGeoIndexInfo&& dist_fun, AstNode* value_node){
|
||||
if (dist_first && dist_fun && value_node) {
|
||||
dist_fun.within = true;
|
||||
dist_fun.range = value_node;
|
||||
dist_fun.lessgreaterequal = lessEqual;
|
||||
} else {
|
||||
dist_fun.invalidate();
|
||||
}
|
||||
return dist_fun;
|
||||
};
|
||||
|
||||
rv = eval_stuff(dist_first, lessEqual, isDistanceFunction(first, expressionParent), isValueOrRefNode(second));
|
||||
if (!rv) {
|
||||
rv = eval_stuff(dist_first, lessEqual, isDistanceFunction(second, expressionParent), isValueOrRefNode(first));
|
||||
}
|
||||
|
||||
if(rv){
|
||||
//this must be set after checking if the node contains a distance node.
|
||||
rv.expressionNode = node;
|
||||
}
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
MMFilesGeoIndexInfo iterativePreorderWithCondition(EN::NodeType type, AstNode* root, MMFilesGeoIndexInfo(*condition)(AstNode*, AstNode*)){
|
||||
// returns on first hit
|
||||
if (!root){
|
||||
return MMFilesGeoIndexInfo{};
|
||||
}
|
||||
std::vector<std::pair<AstNode*,AstNode*>> nodestack;
|
||||
nodestack.push_back({root, nullptr});
|
||||
|
||||
while(nodestack.size()){
|
||||
auto current = nodestack.back();
|
||||
nodestack.pop_back();
|
||||
MMFilesGeoIndexInfo rv = condition(current.first,current.second);
|
||||
if (rv) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
if (type == EN::FILTER){
|
||||
if (current.first->type == NODE_TYPE_OPERATOR_BINARY_AND || current.first->type == NODE_TYPE_OPERATOR_NARY_AND ){
|
||||
for (std::size_t i = 0; i < current.first->numMembers(); ++i){
|
||||
nodestack.push_back({current.first->getMember(i),current.first});
|
||||
}
|
||||
}
|
||||
} else if (type == EN::SORT) {
|
||||
// must be the only sort condition
|
||||
}
|
||||
}
|
||||
return MMFilesGeoIndexInfo{};
|
||||
}
|
||||
|
||||
MMFilesGeoIndexInfo geoDistanceFunctionArgCheck(std::pair<AstNode const*, AstNode const*> const& pair,
|
||||
ExecutionPlan* plan, MMFilesGeoIndexInfo info){
|
||||
std::pair<Variable const*, std::vector<arangodb::basics::AttributeName>> attributeAccess1;
|
||||
std::pair<Variable const*, std::vector<arangodb::basics::AttributeName>> attributeAccess2;
|
||||
|
||||
// first and second should be based on the same document - need to provide the document
|
||||
// in order to see which collection is bound to it and if that collections supports geo-index
|
||||
if (!pair.first->isAttributeAccessForVariable(attributeAccess1) ||
|
||||
!pair.second->isAttributeAccessForVariable(attributeAccess2)) {
|
||||
info.invalidate();
|
||||
return info;
|
||||
}
|
||||
|
||||
TRI_ASSERT(attributeAccess1.first != nullptr);
|
||||
TRI_ASSERT(attributeAccess2.first != nullptr);
|
||||
|
||||
// expect access of the for doc.attribute
|
||||
auto setter1 = plan->getVarSetBy(attributeAccess1.first->id);
|
||||
auto setter2 = plan->getVarSetBy(attributeAccess2.first->id);
|
||||
|
||||
if (setter1 != nullptr &&
|
||||
setter2 != nullptr &&
|
||||
setter1 == setter2 &&
|
||||
setter1->getType() == EN::ENUMERATE_COLLECTION) {
|
||||
auto collNode = reinterpret_cast<EnumerateCollectionNode*>(setter1);
|
||||
auto coll = collNode->collection(); //what kind of indexes does it have on what attributes
|
||||
auto lcoll = coll->getCollection();
|
||||
// TODO - check collection for suitable geo-indexes
|
||||
for(auto indexShardPtr : lcoll->getIndexes()){
|
||||
// get real index
|
||||
arangodb::Index& index = *indexShardPtr.get();
|
||||
|
||||
// check if current index is a geo-index
|
||||
if( index.type() != arangodb::Index::IndexType::TRI_IDX_TYPE_GEO1_INDEX
|
||||
&& index.type() != arangodb::Index::IndexType::TRI_IDX_TYPE_GEO2_INDEX) {
|
||||
continue;
|
||||
}
|
||||
|
||||
TRI_ASSERT(index.fields().size() == 2);
|
||||
|
||||
//check access paths of attributes in ast and those in index match
|
||||
if (index.fields()[0] == attributeAccess1.second &&
|
||||
index.fields()[1] == attributeAccess2.second) {
|
||||
info.collectionNode = collNode;
|
||||
info.index = indexShardPtr;
|
||||
TRI_AttributeNamesJoinNested(attributeAccess1.second, info.longitude, true);
|
||||
TRI_AttributeNamesJoinNested(attributeAccess2.second, info.latitude, true);
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info.invalidate();
|
||||
return info;
|
||||
}
|
||||
|
||||
bool checkDistanceArguments(MMFilesGeoIndexInfo& info, ExecutionPlan* plan){
|
||||
if(!info){
|
||||
return false;
|
||||
}
|
||||
|
||||
auto const& functionArguments = info.distanceNode->getMember(0);
|
||||
if(functionArguments->numMembers() < 4){
|
||||
return false;
|
||||
}
|
||||
|
||||
std::pair<AstNode*,AstNode*> argPair1 = { functionArguments->getMember(0), functionArguments->getMember(1) };
|
||||
std::pair<AstNode*,AstNode*> argPair2 = { functionArguments->getMember(2), functionArguments->getMember(3) };
|
||||
|
||||
MMFilesGeoIndexInfo result1 = geoDistanceFunctionArgCheck(argPair1, plan, info /*copy*/);
|
||||
MMFilesGeoIndexInfo result2 = geoDistanceFunctionArgCheck(argPair2, plan, info /*copy*/);
|
||||
//info now conatins access path to collection
|
||||
|
||||
// xor only one argument pair shall have a geoIndex
|
||||
if ( ( !result1 && !result2 ) || ( result1 && result2 ) ){
|
||||
info.invalidate();
|
||||
return false;
|
||||
}
|
||||
|
||||
MMFilesGeoIndexInfo res;
|
||||
if(result1){
|
||||
info = std::move(result1);
|
||||
info.constantPair = std::move(argPair2);
|
||||
} else {
|
||||
info = std::move(result2);
|
||||
info.constantPair = std::move(argPair1);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
//checks a single sort or filter node
|
||||
MMFilesGeoIndexInfo identifyGeoOptimizationCandidate(ExecutionNode::NodeType type, ExecutionPlan* plan, ExecutionNode* n){
|
||||
ExecutionNode* setter = nullptr;
|
||||
auto rv = MMFilesGeoIndexInfo{};
|
||||
switch(type){
|
||||
case EN::SORT: {
|
||||
auto node = static_cast<SortNode*>(n);
|
||||
auto& elements = node->getElements();
|
||||
|
||||
// we're looking for "SORT DISTANCE(x,y,a,b) ASC", which has just one sort criterion
|
||||
if ( !(elements.size() == 1 && elements[0].ascending)) {
|
||||
//test on second makes sure the SORT is ascending
|
||||
return rv;
|
||||
}
|
||||
|
||||
//variable of sort expression
|
||||
auto variable = elements[0].var;
|
||||
TRI_ASSERT(variable != nullptr);
|
||||
|
||||
//// find the expression that is bound to the variable
|
||||
// get the expression node that holds the calculation
|
||||
setter = plan->getVarSetBy(variable->id);
|
||||
}
|
||||
break;
|
||||
|
||||
case EN::FILTER: {
|
||||
auto node = static_cast<FilterNode*>(n);
|
||||
|
||||
// filter nodes always have one input variable
|
||||
auto varsUsedHere = node->getVariablesUsedHere();
|
||||
TRI_ASSERT(varsUsedHere.size() == 1);
|
||||
|
||||
// now check who introduced our variable
|
||||
auto variable = varsUsedHere[0];
|
||||
setter = plan->getVarSetBy(variable->id);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
return rv;
|
||||
}
|
||||
|
||||
// common part - extract astNode from setter witch is a calculation node
|
||||
if (setter == nullptr || setter->getType() != EN::CALCULATION) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
auto expression = static_cast<CalculationNode*>(setter)->expression();
|
||||
|
||||
// the expression must exist and it must have an astNode
|
||||
if (expression == nullptr || expression->node() == nullptr){
|
||||
// not the right type of node
|
||||
return rv;
|
||||
}
|
||||
AstNode* node = expression->nodeForModification();
|
||||
|
||||
//FIXME -- technical debt -- code duplication / not all cases covered
|
||||
switch(type){
|
||||
case EN::SORT: {
|
||||
// check comma separated parts of condition cond0, cond1, cond2
|
||||
rv = isDistanceFunction(node,nullptr);
|
||||
}
|
||||
break;
|
||||
|
||||
case EN::FILTER: {
|
||||
rv = iterativePreorderWithCondition(type, node, &isGeoFilterExpression);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
rv.invalidate(); // not required but make sure the result is invalid
|
||||
}
|
||||
|
||||
rv.executionNode = n;
|
||||
rv.executionNodeType = type;
|
||||
rv.setter = static_cast<CalculationNode*>(setter);
|
||||
|
||||
checkDistanceArguments(rv, plan);
|
||||
|
||||
return rv;
|
||||
};
|
||||
|
||||
//modify plan
|
||||
|
||||
// builds a condition that can be used with the index interface and
|
||||
// contains all parameters required by the MMFilesGeoIndex
|
||||
std::unique_ptr<Condition> buildGeoCondition(ExecutionPlan* plan, MMFilesGeoIndexInfo& info) {
|
||||
AstNode* lat = info.constantPair.first;
|
||||
AstNode* lon = info.constantPair.second;
|
||||
auto ast = plan->getAst();
|
||||
auto varAstNode = ast->createNodeReference(info.collectionNode->outVariable());
|
||||
|
||||
auto args = ast->createNodeArray(info.within ? 4 : 3);
|
||||
args->addMember(varAstNode); // collection
|
||||
args->addMember(lat); // latitude
|
||||
args->addMember(lon); // longitude
|
||||
|
||||
AstNode* cond = nullptr;
|
||||
if (info.within) {
|
||||
// WITHIN
|
||||
args->addMember(info.range);
|
||||
auto lessValue = ast->createNodeValueBool(info.lessgreaterequal);
|
||||
args->addMember(lessValue);
|
||||
cond = ast->createNodeFunctionCall("WITHIN", args);
|
||||
} else {
|
||||
// NEAR
|
||||
cond = ast->createNodeFunctionCall("NEAR", args);
|
||||
}
|
||||
|
||||
TRI_ASSERT(cond != nullptr);
|
||||
|
||||
auto condition = std::make_unique<Condition>(ast);
|
||||
condition->andCombine(cond);
|
||||
condition->normalize(plan);
|
||||
return condition;
|
||||
}
|
||||
|
||||
void replaceGeoCondition(ExecutionPlan* plan, MMFilesGeoIndexInfo& info){
|
||||
if (info.expressionParent && info.executionNodeType == EN::FILTER) {
|
||||
auto ast = plan->getAst();
|
||||
CalculationNode* newNode = nullptr;
|
||||
Expression* expr = new Expression(ast, static_cast<CalculationNode*>(info.setter)->expression()->nodeForModification()->clone(ast));
|
||||
|
||||
try {
|
||||
newNode = new CalculationNode(plan, plan->nextId(), expr, static_cast<CalculationNode*>(info.setter)->outVariable());
|
||||
} catch (...) {
|
||||
delete expr;
|
||||
throw;
|
||||
}
|
||||
|
||||
plan->registerNode(newNode);
|
||||
plan->replaceNode(info.setter, newNode);
|
||||
|
||||
bool done = false;
|
||||
ast->traverseAndModify(newNode->expression()->nodeForModification(),[&done](AstNode* node, void* data) {
|
||||
if (done) {
|
||||
return node;
|
||||
}
|
||||
if (node->type == NODE_TYPE_OPERATOR_BINARY_AND) {
|
||||
for (std::size_t i = 0; i < node->numMembers(); i++){
|
||||
if (isGeoFilterExpression(node->getMemberUnchecked(i),node)) {
|
||||
done = true;
|
||||
return node->getMemberUnchecked(i ? 0 : 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
return node;
|
||||
},
|
||||
nullptr);
|
||||
|
||||
if(done){
|
||||
return;
|
||||
}
|
||||
|
||||
auto replaceInfo = iterativePreorderWithCondition(EN::FILTER, newNode->expression()->nodeForModification(), &isGeoFilterExpression);
|
||||
if (newNode->expression()->nodeForModification() == replaceInfo.expressionParent) {
|
||||
if (replaceInfo.expressionParent->type == NODE_TYPE_OPERATOR_BINARY_AND){
|
||||
for (std::size_t i = 0; i < replaceInfo.expressionParent->numMembers(); ++i) {
|
||||
if (replaceInfo.expressionParent->getMember(i) != replaceInfo.expressionNode) {
|
||||
newNode->expression()->replaceNode(replaceInfo.expressionParent->getMember(i));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//else {
|
||||
// // COULD BE IMPROVED
|
||||
// if(replaceInfo.expressionParent->type == NODE_TYPE_OPERATOR_BINARY_AND){
|
||||
// // delete ast node - we would need the parent of expression parent to delete the node
|
||||
// // we do not have it available here so we just replace the the node with true
|
||||
// return;
|
||||
// }
|
||||
//}
|
||||
|
||||
//fallback
|
||||
auto replacement = ast->createNodeValueBool(true);
|
||||
for (std::size_t i = 0; i < replaceInfo.expressionParent->numMembers(); ++i) {
|
||||
if (replaceInfo.expressionParent->getMember(i) == replaceInfo.expressionNode) {
|
||||
replaceInfo.expressionParent->removeMemberUnchecked(i);
|
||||
replaceInfo.expressionParent->addMember(replacement);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// applys the optimization for a candidate
|
||||
bool applyGeoOptimization(bool near, ExecutionPlan* plan, MMFilesGeoIndexInfo& first, MMFilesGeoIndexInfo& second) {
|
||||
if (!first && !second) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!first) {
|
||||
first = std::move(second);
|
||||
second.invalidate();
|
||||
}
|
||||
|
||||
// We are not allowed to be a inner loop
|
||||
if (first.collectionNode->isInInnerLoop() && first.executionNodeType == EN::SORT) {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::unique_ptr<Condition> condition(buildGeoCondition(plan, first));
|
||||
|
||||
auto inode = new IndexNode(
|
||||
plan, plan->nextId(), first.collectionNode->vocbase(),
|
||||
first.collectionNode->collection(), first.collectionNode->outVariable(),
|
||||
std::vector<transaction::Methods::IndexHandle>{transaction::Methods::IndexHandle{first.index}},
|
||||
condition.get(), false);
|
||||
plan->registerNode(inode);
|
||||
condition.release();
|
||||
|
||||
plan->replaceNode(first.collectionNode,inode);
|
||||
|
||||
replaceGeoCondition(plan, first);
|
||||
replaceGeoCondition(plan, second);
|
||||
|
||||
// if executionNode is sort OR a filter without further sub conditions
|
||||
// the node can be unlinked
|
||||
auto unlinkNode = [&](MMFilesGeoIndexInfo& info) {
|
||||
if (info && !info.expressionParent) {
|
||||
if (!arangodb::ServerState::instance()->isCoordinator() || info.executionNodeType == EN::FILTER) {
|
||||
plan->unlinkNode(info.executionNode);
|
||||
} else if (info.executionNodeType == EN::SORT) {
|
||||
//make sure sort is not reinserted in cluster
|
||||
static_cast<SortNode*>(info.executionNode)->_reinsertInCluster = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
unlinkNode(first);
|
||||
unlinkNode(second);
|
||||
|
||||
//signal that plan has been changed
|
||||
return true;
|
||||
}
|
||||
|
||||
void MMFilesOptimizerRules::geoIndexRule(Optimizer* opt,
|
||||
std::unique_ptr<ExecutionPlan> plan,
|
||||
OptimizerRule const* rule) {
|
||||
|
||||
SmallVector<ExecutionNode*>::allocator_type::arena_type a;
|
||||
SmallVector<ExecutionNode*> nodes{a};
|
||||
bool modified = false;
|
||||
//inspect each return node and work upwards to SingletonNode
|
||||
plan->findEndNodes(nodes, true);
|
||||
|
||||
for (auto& node : nodes) {
|
||||
MMFilesGeoIndexInfo sortInfo{};
|
||||
MMFilesGeoIndexInfo filterInfo{};
|
||||
auto current = node;
|
||||
|
||||
while (current) {
|
||||
switch(current->getType()) {
|
||||
case EN::SORT:{
|
||||
sortInfo = identifyGeoOptimizationCandidate(EN::SORT, plan.get(), current);
|
||||
break;
|
||||
}
|
||||
case EN::FILTER: {
|
||||
filterInfo = identifyGeoOptimizationCandidate(EN::FILTER, plan.get(), current);
|
||||
break;
|
||||
}
|
||||
case EN::ENUMERATE_COLLECTION: {
|
||||
EnumerateCollectionNode* collnode = static_cast<EnumerateCollectionNode*>(current);
|
||||
if( (sortInfo && sortInfo.collectionNode!= collnode)
|
||||
||(filterInfo && filterInfo.collectionNode != collnode)
|
||||
){
|
||||
filterInfo.invalidate();
|
||||
sortInfo.invalidate();
|
||||
break;
|
||||
}
|
||||
if (applyGeoOptimization(true, plan.get(), filterInfo, sortInfo)){
|
||||
modified = true;
|
||||
filterInfo.invalidate();
|
||||
sortInfo.invalidate();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case EN::INDEX:
|
||||
case EN::COLLECT:{
|
||||
filterInfo.invalidate();
|
||||
sortInfo.invalidate();
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
//skip - do nothing
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
current = current->getFirstDependency(); //inspect next node
|
||||
}
|
||||
}
|
||||
|
||||
opt->addPlan(std::move(plan), rule, modified);
|
||||
}
|
||||
|
||||
/// @brief remove SORT RAND() if appropriate
|
||||
void MMFilesOptimizerRules::removeSortRandRule(Optimizer* opt, std::unique_ptr<ExecutionPlan> plan,
|
||||
OptimizerRule const* rule) {
|
||||
|
|
|
@ -35,8 +35,6 @@ struct OptimizerRule;
|
|||
|
||||
struct MMFilesOptimizerRules {
|
||||
static void registerResources();
|
||||
|
||||
static void geoIndexRule(aql::Optimizer* opt, std::unique_ptr<aql::ExecutionPlan> plan, aql::OptimizerRule const* rule);
|
||||
|
||||
static void removeSortRandRule(aql::Optimizer* opt, std::unique_ptr<aql::ExecutionPlan> plan, aql::OptimizerRule const* rule);
|
||||
};
|
||||
|
|
|
@ -13,9 +13,11 @@ set(ROCKSDB_SOURCES
|
|||
RocksDBEngine/RocksDBEngine.cpp
|
||||
RocksDBEngine/RocksDBExportCursor.cpp
|
||||
RocksDBEngine/RocksDBFulltextIndex.cpp
|
||||
RocksDBEngine/RocksDBGeoIndex.cpp
|
||||
RocksDBEngine/RocksDBGeoIndexImpl.cpp
|
||||
RocksDBEngine/RocksDBHashIndex.cpp
|
||||
RocksDBEngine/RocksDBIndex.cpp
|
||||
RocksDBEngine/RocksDBIndexFactory.cpp
|
||||
RocksDBEngine/RocksDBHashIndex.cpp
|
||||
RocksDBEngine/RocksDBKey.cpp
|
||||
RocksDBEngine/RocksDBKeyBounds.cpp
|
||||
RocksDBEngine/RocksDBLogValue.cpp
|
||||
|
@ -33,8 +35,8 @@ set(ROCKSDB_SOURCES
|
|||
RocksDBEngine/RocksDBTransactionState.cpp
|
||||
RocksDBEngine/RocksDBTypes.cpp
|
||||
RocksDBEngine/RocksDBV8Functions.cpp
|
||||
RocksDBEngine/RocksDBVPackIndex.cpp
|
||||
RocksDBEngine/RocksDBValue.cpp
|
||||
RocksDBEngine/RocksDBView.cpp
|
||||
RocksDBEngine/RocksDBVPackIndex.cpp
|
||||
)
|
||||
set(ROCKSDB_SOURCES ${ROCKSDB_SOURCES} PARENT_SCOPE)
|
||||
|
|
|
@ -25,9 +25,12 @@
|
|||
#include "Aql/AqlFunctionFeature.h"
|
||||
#include "Aql/Function.h"
|
||||
#include "RocksDBEngine/RocksDBFulltextIndex.h"
|
||||
#include "RocksDBEngine/RocksDBGeoIndex.h"
|
||||
#include "RocksDBEngine/RocksDBToken.h"
|
||||
#include "StorageEngine/DocumentIdentifierToken.h"
|
||||
#include "Transaction/Helpers.h"
|
||||
#include "Transaction/Methods.h"
|
||||
#include "StorageEngine/TransactionState.h"
|
||||
#include "Utils/CollectionNameResolver.h"
|
||||
#include "VocBase/LogicalCollection.h"
|
||||
#include "VocBase/ManagedDocumentResult.h"
|
||||
|
@ -134,7 +137,7 @@ AqlValue RocksDBAqlFunctions::Fulltext(
|
|||
}
|
||||
// do we need this in rocksdb?
|
||||
trx->pinData(cid);
|
||||
|
||||
|
||||
transaction::BuilderLeaser builder(trx);
|
||||
FulltextQuery parsedQuery;
|
||||
Result res = fulltextIndex->parseQueryString(queryString, parsedQuery);
|
||||
|
@ -149,20 +152,242 @@ AqlValue RocksDBAqlFunctions::Fulltext(
|
|||
return AqlValue(builder.get());
|
||||
}
|
||||
|
||||
/// @brief Load geoindex for collection name
|
||||
static arangodb::RocksDBGeoIndex* getGeoIndex(
|
||||
transaction::Methods* trx, TRI_voc_cid_t const& cid,
|
||||
std::string const& collectionName) {
|
||||
// NOTE:
|
||||
// Due to trx lock the shared_index stays valid
|
||||
// as long as trx stays valid.
|
||||
// It is save to return the Raw pointer.
|
||||
// It can only be used until trx is finished.
|
||||
trx->addCollectionAtRuntime(cid, collectionName);
|
||||
Result res = trx->state()->ensureCollections();
|
||||
if (!res.ok()) {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(res.errorNumber(),
|
||||
res.errorMessage());
|
||||
}
|
||||
|
||||
auto document = trx->documentCollection(cid);
|
||||
if (document == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION_FORMAT(TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND, "'%s'",
|
||||
collectionName.c_str());
|
||||
}
|
||||
|
||||
arangodb::RocksDBGeoIndex* index = nullptr;
|
||||
for (auto const& idx : document->getIndexes()) {
|
||||
if (idx->type() == arangodb::Index::TRI_IDX_TYPE_GEO1_INDEX ||
|
||||
idx->type() == arangodb::Index::TRI_IDX_TYPE_GEO2_INDEX) {
|
||||
index = static_cast<arangodb::RocksDBGeoIndex*>(idx.get());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (index == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_GEO_INDEX_MISSING,
|
||||
collectionName.c_str());
|
||||
}
|
||||
|
||||
trx->pinData(cid);
|
||||
return index;
|
||||
}
|
||||
|
||||
static AqlValue buildGeoResult(transaction::Methods* trx,
|
||||
LogicalCollection* collection,
|
||||
arangodb::aql::Query* query,
|
||||
GeoCoordinates* cors, TRI_voc_cid_t const& cid,
|
||||
std::string const& attributeName) {
|
||||
if (cors == nullptr) {
|
||||
return AqlValue(arangodb::basics::VelocyPackHelper::EmptyArrayValue());
|
||||
}
|
||||
|
||||
size_t const nCoords = cors->length;
|
||||
if (nCoords == 0) {
|
||||
GeoIndex_CoordinatesFree(cors);
|
||||
return AqlValue(arangodb::basics::VelocyPackHelper::EmptyArrayValue());
|
||||
}
|
||||
|
||||
struct geo_coordinate_distance_t {
|
||||
geo_coordinate_distance_t(double distance, RocksDBToken token)
|
||||
: _distance(distance), _token(token) {}
|
||||
double _distance;
|
||||
RocksDBToken _token;
|
||||
};
|
||||
|
||||
std::vector<geo_coordinate_distance_t> distances;
|
||||
|
||||
try {
|
||||
distances.reserve(nCoords);
|
||||
|
||||
for (size_t i = 0; i < nCoords; ++i) {
|
||||
distances.emplace_back(geo_coordinate_distance_t(
|
||||
cors->distances[i], RocksDBToken(cors->coordinates[i].data)));
|
||||
}
|
||||
} catch (...) {
|
||||
GeoIndex_CoordinatesFree(cors);
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
|
||||
GeoIndex_CoordinatesFree(cors);
|
||||
|
||||
// sort result by distance
|
||||
std::sort(distances.begin(), distances.end(),
|
||||
[](geo_coordinate_distance_t const& left,
|
||||
geo_coordinate_distance_t const& right) {
|
||||
return left._distance < right._distance;
|
||||
});
|
||||
|
||||
try {
|
||||
ManagedDocumentResult mmdr;
|
||||
transaction::BuilderLeaser builder(trx);
|
||||
builder->openArray();
|
||||
if (!attributeName.empty()) {
|
||||
// We have to copy the entire document
|
||||
for (auto& it : distances) {
|
||||
VPackObjectBuilder docGuard(builder.get());
|
||||
builder->add(attributeName, VPackValue(it._distance));
|
||||
if (collection->readDocument(trx, it._token, mmdr)) {
|
||||
VPackSlice doc(mmdr.vpack());
|
||||
for (auto const& entry : VPackObjectIterator(doc)) {
|
||||
std::string key = entry.key.copyString();
|
||||
if (key != attributeName) {
|
||||
builder->add(key, entry.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
for (auto& it : distances) {
|
||||
if (collection->readDocument(trx, it._token, mmdr)) {
|
||||
mmdr.addToBuilder(*builder.get(), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
builder->close();
|
||||
return AqlValue(builder.get());
|
||||
} catch (...) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
}
|
||||
|
||||
/// @brief function NEAR
|
||||
AqlValue RocksDBAqlFunctions::Near(arangodb::aql::Query* query,
|
||||
transaction::Methods* trx,
|
||||
VPackFunctionParameters const& parameters) {
|
||||
// TODO: obi
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_GEO_INDEX_MISSING, "NEAR");
|
||||
ValidateParameters(parameters, "NEAR", 3, 5);
|
||||
|
||||
AqlValue collectionValue = ExtractFunctionParameterValue(trx, parameters, 0);
|
||||
if (!collectionValue.isString()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "NEAR");
|
||||
}
|
||||
|
||||
std::string const collectionName(collectionValue.slice().copyString());
|
||||
|
||||
AqlValue latitude = ExtractFunctionParameterValue(trx, parameters, 1);
|
||||
AqlValue longitude = ExtractFunctionParameterValue(trx, parameters, 2);
|
||||
|
||||
if (!latitude.isNumber() || !longitude.isNumber()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "NEAR");
|
||||
}
|
||||
|
||||
// extract limit
|
||||
int64_t limitValue = 100;
|
||||
|
||||
if (parameters.size() > 3) {
|
||||
AqlValue limit = ExtractFunctionParameterValue(trx, parameters, 3);
|
||||
|
||||
if (limit.isNumber()) {
|
||||
limitValue = limit.toInt64(trx);
|
||||
} else if (!limit.isNull(true)) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "NEAR");
|
||||
}
|
||||
}
|
||||
|
||||
std::string attributeName;
|
||||
if (parameters.size() > 4) {
|
||||
// have a distance attribute
|
||||
AqlValue distanceValue = ExtractFunctionParameterValue(trx, parameters, 4);
|
||||
|
||||
if (!distanceValue.isNull(true) && !distanceValue.isString()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "NEAR");
|
||||
}
|
||||
|
||||
if (distanceValue.isString()) {
|
||||
attributeName = distanceValue.slice().copyString();
|
||||
}
|
||||
}
|
||||
|
||||
TRI_voc_cid_t cid = trx->resolver()->getCollectionIdLocal(collectionName);
|
||||
arangodb::RocksDBGeoIndex* index = getGeoIndex(trx, cid, collectionName);
|
||||
|
||||
TRI_ASSERT(index != nullptr);
|
||||
TRI_ASSERT(trx->isPinned(cid));
|
||||
|
||||
GeoCoordinates* cors =
|
||||
index->nearQuery(trx, latitude.toDouble(trx), longitude.toDouble(trx),
|
||||
static_cast<size_t>(limitValue));
|
||||
|
||||
return buildGeoResult(trx, index->collection(), query, cors, cid,
|
||||
attributeName);
|
||||
}
|
||||
|
||||
/// @brief function WITHIN
|
||||
AqlValue RocksDBAqlFunctions::Within(
|
||||
arangodb::aql::Query* query, transaction::Methods* trx,
|
||||
VPackFunctionParameters const& parameters) {
|
||||
// TODO: obi
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(TRI_ERROR_QUERY_GEO_INDEX_MISSING, "Within");
|
||||
ValidateParameters(parameters, "WITHIN", 4, 5);
|
||||
|
||||
AqlValue collectionValue = ExtractFunctionParameterValue(trx, parameters, 0);
|
||||
|
||||
if (!collectionValue.isString()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "WITHIN");
|
||||
}
|
||||
|
||||
std::string const collectionName(collectionValue.slice().copyString());
|
||||
|
||||
AqlValue latitudeValue = ExtractFunctionParameterValue(trx, parameters, 1);
|
||||
AqlValue longitudeValue = ExtractFunctionParameterValue(trx, parameters, 2);
|
||||
AqlValue radiusValue = ExtractFunctionParameterValue(trx, parameters, 3);
|
||||
|
||||
if (!latitudeValue.isNumber() || !longitudeValue.isNumber() ||
|
||||
!radiusValue.isNumber()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "WITHIN");
|
||||
}
|
||||
|
||||
std::string attributeName;
|
||||
if (parameters.size() > 4) {
|
||||
// have a distance attribute
|
||||
AqlValue distanceValue = ExtractFunctionParameterValue(trx, parameters, 4);
|
||||
|
||||
if (!distanceValue.isNull(true) && !distanceValue.isString()) {
|
||||
THROW_ARANGO_EXCEPTION_PARAMS(
|
||||
TRI_ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "WITHIN");
|
||||
}
|
||||
|
||||
if (distanceValue.isString()) {
|
||||
attributeName = distanceValue.slice().copyString();
|
||||
}
|
||||
}
|
||||
|
||||
TRI_voc_cid_t cid = trx->resolver()->getCollectionIdLocal(collectionName);
|
||||
arangodb::RocksDBGeoIndex* index = getGeoIndex(trx, cid, collectionName);
|
||||
|
||||
TRI_ASSERT(index != nullptr);
|
||||
TRI_ASSERT(trx->isPinned(cid));
|
||||
|
||||
GeoCoordinates* cors = index->withinQuery(trx, latitudeValue.toDouble(trx),
|
||||
longitudeValue.toDouble(trx),
|
||||
radiusValue.toDouble(trx));
|
||||
|
||||
return buildGeoResult(trx, index->collection(), query, cors, cid,
|
||||
attributeName);
|
||||
}
|
||||
|
||||
void RocksDBAqlFunctions::registerResources() {
|
||||
|
|
|
@ -183,11 +183,14 @@ void RocksDBCollection::open(bool ignoreErrors) {
|
|||
<< " number of documents: " << counterValue.added();
|
||||
_numberDocuments = counterValue.added() - counterValue.removed();
|
||||
_revisionId = counterValue.revisionId();
|
||||
//_numberDocuments = countKeyRange(db, readOptions,
|
||||
// RocksDBKeyBounds::CollectionDocuments(_objectId));
|
||||
|
||||
for (auto it : getIndexes()) {
|
||||
for (std::shared_ptr<Index> it : getIndexes()) {
|
||||
static_cast<RocksDBIndex*>(it.get())->load();
|
||||
|
||||
if (it->type() == Index::TRI_IDX_TYPE_GEO1_INDEX ||
|
||||
it->type() == Index::TRI_IDX_TYPE_GEO2_INDEX) {
|
||||
_hasGeoIndex = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1263,8 +1266,11 @@ void RocksDBCollection::addIndex(std::shared_ptr<arangodb::Index> idx) {
|
|||
}
|
||||
|
||||
TRI_UpdateTickServer(static_cast<TRI_voc_tick_t>(id));
|
||||
|
||||
_indexes.emplace_back(idx);
|
||||
if (idx->type() == Index::TRI_IDX_TYPE_GEO1_INDEX ||
|
||||
idx->type() == Index::TRI_IDX_TYPE_GEO2_INDEX) {
|
||||
_hasGeoIndex = true;
|
||||
}
|
||||
}
|
||||
|
||||
void RocksDBCollection::addIndexCoordinator(
|
||||
|
|
|
@ -39,7 +39,7 @@ class Result;
|
|||
class RocksDBPrimaryIndex;
|
||||
class RocksDBVPackIndex;
|
||||
struct RocksDBToken;
|
||||
|
||||
|
||||
class RocksDBCollection final : public PhysicalCollection {
|
||||
friend class RocksDBEngine;
|
||||
friend class RocksDBVPackIndex;
|
||||
|
@ -48,7 +48,6 @@ class RocksDBCollection final : public PhysicalCollection {
|
|||
constexpr static double defaultLockTimeout = 10.0 * 60.0;
|
||||
|
||||
public:
|
||||
|
||||
public:
|
||||
explicit RocksDBCollection(LogicalCollection*, VPackSlice const& info);
|
||||
explicit RocksDBCollection(LogicalCollection*,
|
||||
|
@ -121,7 +120,7 @@ class RocksDBCollection final : public PhysicalCollection {
|
|||
/// non transactional truncate, will continoiusly commit the deletes
|
||||
/// and no fully rollback on failure. Uses trx snapshots to isolate
|
||||
/// against newer PUTs
|
||||
//void truncateNoTrx(transaction::Methods* trx);
|
||||
// void truncateNoTrx(transaction::Methods* trx);
|
||||
|
||||
DocumentIdentifierToken lookupKey(
|
||||
transaction::Methods* trx,
|
||||
|
@ -185,10 +184,12 @@ class RocksDBCollection final : public PhysicalCollection {
|
|||
|
||||
/// recalculte counts for collection in case of failure
|
||||
uint64_t recalculateCounts();
|
||||
|
||||
|
||||
/// trigger rocksdb compaction for documentDB and indexes
|
||||
void compact();
|
||||
void estimateSize(velocypack::Builder &builder);
|
||||
void estimateSize(velocypack::Builder& builder);
|
||||
|
||||
bool hasGeoIndex() { return _hasGeoIndex; }
|
||||
|
||||
private:
|
||||
/// @brief return engine-specific figures
|
||||
|
@ -231,6 +232,8 @@ class RocksDBCollection final : public PhysicalCollection {
|
|||
std::atomic<uint64_t> _numberDocuments;
|
||||
std::atomic<TRI_voc_rid_t> _revisionId;
|
||||
|
||||
/// upgrade write locks to exclusive locks if this flag is set
|
||||
bool _hasGeoIndex;
|
||||
basics::ReadWriteLock _exclusiveLock;
|
||||
};
|
||||
|
||||
|
@ -246,6 +249,6 @@ inline RocksDBCollection* toRocksDBCollection(LogicalCollection* logical) {
|
|||
return toRocksDBCollection(phys);
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace arangodb
|
||||
|
||||
#endif
|
||||
|
|
|
@ -214,9 +214,9 @@ void RocksDBEngine::start() {
|
|||
_options.compaction_readahead_size =
|
||||
static_cast<size_t>(opts->_compactionReadaheadSize);
|
||||
|
||||
_options.env->SetBackgroundThreads(opts->_numThreadsHigh,
|
||||
_options.env->SetBackgroundThreads((int)opts->_numThreadsHigh,
|
||||
rocksdb::Env::Priority::HIGH);
|
||||
_options.env->SetBackgroundThreads(opts->_numThreadsLow,
|
||||
_options.env->SetBackgroundThreads((int)opts->_numThreadsLow,
|
||||
rocksdb::Env::Priority::LOW);
|
||||
|
||||
_options.info_log_level = rocksdb::InfoLogLevel::ERROR_LEVEL;
|
||||
|
|
|
@ -0,0 +1,589 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2017 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Christoph Uhde
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#include "RocksDBGeoIndex.h"
|
||||
|
||||
#include "Aql/Ast.h"
|
||||
#include "Aql/AstNode.h"
|
||||
#include "Aql/SortCondition.h"
|
||||
#include "Basics/StringRef.h"
|
||||
#include "Basics/VelocyPackHelper.h"
|
||||
#include "Logger/Logger.h"
|
||||
#include "RocksDBEngine/RocksDBCommon.h"
|
||||
#include "RocksDBEngine/RocksDBToken.h"
|
||||
#include "StorageEngine/TransactionState.h"
|
||||
#include <rocksdb/utilities/transaction_db.h>
|
||||
|
||||
using namespace arangodb;
|
||||
|
||||
RocksDBGeoIndexIterator::RocksDBGeoIndexIterator(
|
||||
LogicalCollection* collection, transaction::Methods* trx,
|
||||
ManagedDocumentResult* mmdr, RocksDBGeoIndex const* index,
|
||||
arangodb::aql::AstNode const* cond, arangodb::aql::Variable const* var)
|
||||
: IndexIterator(collection, trx, mmdr, index),
|
||||
_index(index),
|
||||
_cursor(nullptr),
|
||||
_coor(),
|
||||
_condition(cond),
|
||||
_lat(0.0),
|
||||
_lon(0.0),
|
||||
_near(true),
|
||||
_inclusive(false),
|
||||
_done(false),
|
||||
_radius(0.0) {
|
||||
evaluateCondition();
|
||||
}
|
||||
|
||||
void RocksDBGeoIndexIterator::evaluateCondition() {
|
||||
if (_condition) {
|
||||
auto numMembers = _condition->numMembers();
|
||||
|
||||
TRI_ASSERT(numMembers == 1); // should only be an FCALL
|
||||
auto fcall = _condition->getMember(0);
|
||||
TRI_ASSERT(fcall->type == arangodb::aql::NODE_TYPE_FCALL);
|
||||
TRI_ASSERT(fcall->numMembers() == 1);
|
||||
auto args = fcall->getMember(0);
|
||||
|
||||
numMembers = args->numMembers();
|
||||
TRI_ASSERT(numMembers >= 3);
|
||||
|
||||
_lat = args->getMember(1)->getDoubleValue();
|
||||
_lon = args->getMember(2)->getDoubleValue();
|
||||
|
||||
if (numMembers == 3) {
|
||||
// NEAR
|
||||
_near = true;
|
||||
} else {
|
||||
// WITHIN
|
||||
TRI_ASSERT(numMembers == 5);
|
||||
_near = false;
|
||||
_radius = args->getMember(3)->getDoubleValue();
|
||||
_inclusive = args->getMember(4)->getBoolValue();
|
||||
}
|
||||
} else {
|
||||
LOG_TOPIC(ERR, arangodb::Logger::FIXME)
|
||||
<< "No condition passed to RocksDBGeoIndexIterator constructor";
|
||||
}
|
||||
}
|
||||
|
||||
size_t RocksDBGeoIndexIterator::findLastIndex(GeoCoordinates* coords) const {
|
||||
TRI_ASSERT(coords != nullptr);
|
||||
|
||||
// determine which documents to return...
|
||||
size_t numDocs = coords->length;
|
||||
|
||||
if (!_near) {
|
||||
// WITHIN
|
||||
// only return those documents that are within the specified radius
|
||||
TRI_ASSERT(numDocs > 0);
|
||||
|
||||
// linear scan for the first document outside the specified radius
|
||||
// scan backwards because documents with higher distances are more
|
||||
// interesting
|
||||
int iterations = 0;
|
||||
while ((_inclusive && coords->distances[numDocs - 1] > _radius) ||
|
||||
(!_inclusive && coords->distances[numDocs - 1] >= _radius)) {
|
||||
// document is outside the specified radius!
|
||||
--numDocs;
|
||||
|
||||
if (numDocs == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (++iterations == 8 && numDocs >= 10) {
|
||||
// switch to a binary search for documents inside/outside the specified
|
||||
// radius
|
||||
size_t l = 0;
|
||||
size_t r = numDocs - 1;
|
||||
|
||||
while (true) {
|
||||
// determine midpoint
|
||||
size_t m = l + ((r - l) / 2);
|
||||
if ((_inclusive && coords->distances[m] > _radius) ||
|
||||
(!_inclusive && coords->distances[m] >= _radius)) {
|
||||
// document is outside the specified radius!
|
||||
if (m == 0) {
|
||||
numDocs = 0;
|
||||
break;
|
||||
}
|
||||
r = m - 1;
|
||||
} else {
|
||||
// still inside the radius
|
||||
numDocs = m + 1;
|
||||
l = m + 1;
|
||||
}
|
||||
|
||||
if (r < l) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return numDocs;
|
||||
}
|
||||
|
||||
bool RocksDBGeoIndexIterator::next(TokenCallback const& cb, size_t limit) {
|
||||
if (!_cursor) {
|
||||
createCursor(_lat, _lon);
|
||||
|
||||
if (!_cursor) {
|
||||
// actually validate that we got a valid cursor
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
}
|
||||
|
||||
TRI_ASSERT(_cursor != nullptr);
|
||||
|
||||
if (_done) {
|
||||
// we already know that no further results will be returned by the index
|
||||
return false;
|
||||
}
|
||||
|
||||
TRI_ASSERT(limit > 0);
|
||||
if (limit > 0) {
|
||||
// only need to calculate distances for WITHIN queries, but not for NEAR
|
||||
// queries
|
||||
bool withDistances;
|
||||
double maxDistance;
|
||||
if (_near) {
|
||||
withDistances = false;
|
||||
maxDistance = -1.0;
|
||||
} else {
|
||||
withDistances = true;
|
||||
maxDistance = _radius;
|
||||
}
|
||||
auto coords = std::unique_ptr<GeoCoordinates>(::GeoIndex_ReadCursor(
|
||||
_cursor, static_cast<int>(limit), withDistances, maxDistance));
|
||||
|
||||
size_t const length = coords ? coords->length : 0;
|
||||
|
||||
if (length == 0) {
|
||||
// Nothing Found
|
||||
// TODO validate
|
||||
_done = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
size_t numDocs = findLastIndex(coords.get());
|
||||
if (numDocs == 0) {
|
||||
// we are done
|
||||
_done = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < numDocs; ++i) {
|
||||
cb(RocksDBToken(coords->coordinates[i].data));
|
||||
}
|
||||
// If we return less then limit many docs we are done.
|
||||
_done = numDocs < limit;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void RocksDBGeoIndexIterator::replaceCursor(::GeoCursor* c) {
|
||||
if (_cursor) {
|
||||
::GeoIndex_CursorFree(_cursor);
|
||||
}
|
||||
_cursor = c;
|
||||
_done = false;
|
||||
}
|
||||
|
||||
void RocksDBGeoIndexIterator::createCursor(double lat, double lon) {
|
||||
_coor = GeoCoordinate{lat, lon, 0};
|
||||
replaceCursor(::GeoIndex_NewCursor(_index->_geoIndex, &_coor));
|
||||
}
|
||||
|
||||
/// @brief creates an IndexIterator for the given Condition
|
||||
IndexIterator* RocksDBGeoIndex::iteratorForCondition(
|
||||
transaction::Methods* trx, ManagedDocumentResult* mmdr,
|
||||
arangodb::aql::AstNode const* node,
|
||||
arangodb::aql::Variable const* reference, bool) {
|
||||
TRI_IF_FAILURE("GeoIndex::noIterator") {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
|
||||
}
|
||||
return new RocksDBGeoIndexIterator(_collection, trx, mmdr, this, node,
|
||||
reference);
|
||||
}
|
||||
|
||||
void RocksDBGeoIndexIterator::reset() { replaceCursor(nullptr); }
|
||||
|
||||
RocksDBGeoIndex::RocksDBGeoIndex(TRI_idx_iid_t iid,
|
||||
arangodb::LogicalCollection* collection,
|
||||
VPackSlice const& info)
|
||||
: RocksDBIndex(iid, collection, info),
|
||||
_variant(INDEX_GEO_INDIVIDUAL_LAT_LON),
|
||||
_geoJson(false),
|
||||
_geoIndex(nullptr) {
|
||||
TRI_ASSERT(iid != 0);
|
||||
_unique = false;
|
||||
_sparse = true;
|
||||
|
||||
if (_fields.size() == 1) {
|
||||
_geoJson = arangodb::basics::VelocyPackHelper::getBooleanValue(
|
||||
info, "geoJson", false);
|
||||
auto& loc = _fields[0];
|
||||
_location.reserve(loc.size());
|
||||
for (auto const& it : loc) {
|
||||
_location.emplace_back(it.name);
|
||||
}
|
||||
_variant =
|
||||
_geoJson ? INDEX_GEO_COMBINED_LAT_LON : INDEX_GEO_COMBINED_LON_LAT;
|
||||
} else if (_fields.size() == 2) {
|
||||
_variant = INDEX_GEO_INDIVIDUAL_LAT_LON;
|
||||
auto& lat = _fields[0];
|
||||
_latitude.reserve(lat.size());
|
||||
for (auto const& it : lat) {
|
||||
_latitude.emplace_back(it.name);
|
||||
}
|
||||
auto& lon = _fields[1];
|
||||
_longitude.reserve(lon.size());
|
||||
for (auto const& it : lon) {
|
||||
_longitude.emplace_back(it.name);
|
||||
}
|
||||
} else {
|
||||
THROW_ARANGO_EXCEPTION_MESSAGE(
|
||||
TRI_ERROR_BAD_PARAMETER,
|
||||
"RocksDBGeoIndex can only be created with one or two fields.");
|
||||
}
|
||||
|
||||
|
||||
// cheap trick to get the last inserted pot and slot number
|
||||
rocksdb::TransactionDB *db = rocksutils::globalRocksDB();
|
||||
rocksdb::ReadOptions opts;
|
||||
std::unique_ptr<rocksdb::Iterator> iter(db->NewIterator(opts));
|
||||
|
||||
int numPots = 0;
|
||||
RocksDBKeyBounds b1 = RocksDBKeyBounds::GeoIndex(_objectId, false);
|
||||
iter->SeekForPrev(b1.end());
|
||||
if (iter->Valid()
|
||||
&& _cmp->Compare(b1.start(), iter->key()) < 0
|
||||
&& _cmp->Compare(iter->key(), b1.end()) < 0) {
|
||||
// found a key smaller than bounds end
|
||||
std::pair<bool, int32_t> pair = RocksDBKey::geoValues(iter->key());
|
||||
TRI_ASSERT(pair.first == false);
|
||||
numPots = pair.second;
|
||||
}
|
||||
|
||||
int numSlots = 0;
|
||||
RocksDBKeyBounds b2 = RocksDBKeyBounds::GeoIndex(_objectId, true);
|
||||
iter->SeekForPrev(b2.end());
|
||||
if (iter->Valid()
|
||||
&& _cmp->Compare(b2.start(), iter->key()) < 0
|
||||
&& _cmp->Compare(iter->key(), b2.end()) < 0) {
|
||||
// found a key smaller than bounds end
|
||||
std::pair<bool, int32_t> pair = RocksDBKey::geoValues(iter->key());
|
||||
TRI_ASSERT(pair.first);
|
||||
numSlots = pair.second;
|
||||
}
|
||||
|
||||
_geoIndex = GeoIndex_new(_objectId, numPots, numSlots);
|
||||
if (_geoIndex == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
}
|
||||
|
||||
RocksDBGeoIndex::~RocksDBGeoIndex() {
|
||||
if (_geoIndex != nullptr) {
|
||||
GeoIndex_free(_geoIndex);
|
||||
}
|
||||
}
|
||||
|
||||
size_t RocksDBGeoIndex::memory() const {
|
||||
rocksdb::TransactionDB* db = rocksutils::globalRocksDB();
|
||||
RocksDBKeyBounds bounds = RocksDBKeyBounds::GeoIndex(_objectId);
|
||||
rocksdb::Range r(bounds.start(), bounds.end());
|
||||
uint64_t out;
|
||||
db->GetApproximateSizes(&r, 1, &out, true);
|
||||
return (size_t)out;
|
||||
}
|
||||
|
||||
/// @brief return a JSON representation of the index
|
||||
void RocksDBGeoIndex::toVelocyPack(VPackBuilder& builder, bool withFigures,
|
||||
bool forPersistence) const {
|
||||
builder.openObject();
|
||||
// Basic index
|
||||
RocksDBIndex::toVelocyPack(builder, withFigures, forPersistence);
|
||||
|
||||
if (_variant == INDEX_GEO_COMBINED_LAT_LON ||
|
||||
_variant == INDEX_GEO_COMBINED_LON_LAT) {
|
||||
builder.add("geoJson", VPackValue(_geoJson));
|
||||
}
|
||||
|
||||
// geo indexes are always non-unique
|
||||
// geo indexes are always sparse.
|
||||
// "ignoreNull" has the same meaning as "sparse" and is only returned for
|
||||
// backwards compatibility
|
||||
// the "constraint" attribute has no meaning since ArangoDB 2.5 and is only
|
||||
// returned for backwards compatibility
|
||||
builder.add("constraint", VPackValue(false));
|
||||
builder.add("unique", VPackValue(false));
|
||||
builder.add("ignoreNull", VPackValue(true));
|
||||
builder.add("sparse", VPackValue(true));
|
||||
builder.close();
|
||||
}
|
||||
|
||||
/// @brief Test if this index matches the definition
|
||||
bool RocksDBGeoIndex::matchesDefinition(VPackSlice const& info) const {
|
||||
TRI_ASSERT(info.isObject());
|
||||
#ifdef ARANGODB_ENABLE_MAINTAINER_MODE
|
||||
VPackSlice typeSlice = info.get("type");
|
||||
TRI_ASSERT(typeSlice.isString());
|
||||
StringRef typeStr(typeSlice);
|
||||
TRI_ASSERT(typeStr == oldtypeName());
|
||||
#endif
|
||||
auto value = info.get("id");
|
||||
if (!value.isNone()) {
|
||||
// We already have an id.
|
||||
if (!value.isString()) {
|
||||
// Invalid ID
|
||||
return false;
|
||||
}
|
||||
// Short circuit. If id is correct the index is identical.
|
||||
StringRef idRef(value);
|
||||
return idRef == std::to_string(_iid);
|
||||
}
|
||||
value = info.get("fields");
|
||||
if (!value.isArray()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
size_t const n = static_cast<size_t>(value.length());
|
||||
if (n != _fields.size()) {
|
||||
return false;
|
||||
}
|
||||
if (_unique != arangodb::basics::VelocyPackHelper::getBooleanValue(
|
||||
info, "unique", false)) {
|
||||
return false;
|
||||
}
|
||||
if (_sparse != arangodb::basics::VelocyPackHelper::getBooleanValue(
|
||||
info, "sparse", true)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
if (_geoJson != arangodb::basics::VelocyPackHelper::getBooleanValue(
|
||||
info, "geoJson", false)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// This check takes ordering of attributes into account.
|
||||
std::vector<arangodb::basics::AttributeName> translate;
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
translate.clear();
|
||||
VPackSlice f = value.at(i);
|
||||
if (!f.isString()) {
|
||||
// Invalid field definition!
|
||||
return false;
|
||||
}
|
||||
arangodb::StringRef in(f);
|
||||
TRI_ParseAttributeString(in, translate, true);
|
||||
if (!arangodb::basics::AttributeName::isIdentical(_fields[i], translate,
|
||||
false)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
int RocksDBGeoIndex::insert(transaction::Methods*, TRI_voc_rid_t revisionId,
|
||||
VPackSlice const& doc, bool isRollback) {
|
||||
double latitude;
|
||||
double longitude;
|
||||
|
||||
if (_variant == INDEX_GEO_INDIVIDUAL_LAT_LON) {
|
||||
VPackSlice lat = doc.get(_latitude);
|
||||
if (!lat.isNumber()) {
|
||||
// Invalid, no insert. Index is sparse
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
VPackSlice lon = doc.get(_longitude);
|
||||
if (!lon.isNumber()) {
|
||||
// Invalid, no insert. Index is sparse
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
latitude = lat.getNumericValue<double>();
|
||||
longitude = lon.getNumericValue<double>();
|
||||
} else {
|
||||
VPackSlice loc = doc.get(_location);
|
||||
if (!loc.isArray() || loc.length() < 2) {
|
||||
// Invalid, no insert. Index is sparse
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
VPackSlice first = loc.at(0);
|
||||
if (!first.isNumber()) {
|
||||
// Invalid, no insert. Index is sparse
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
VPackSlice second = loc.at(1);
|
||||
if (!second.isNumber()) {
|
||||
// Invalid, no insert. Index is sparse
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
if (_geoJson) {
|
||||
longitude = first.getNumericValue<double>();
|
||||
latitude = second.getNumericValue<double>();
|
||||
} else {
|
||||
latitude = first.getNumericValue<double>();
|
||||
longitude = second.getNumericValue<double>();
|
||||
}
|
||||
}
|
||||
|
||||
// and insert into index
|
||||
GeoCoordinate gc;
|
||||
gc.latitude = latitude;
|
||||
gc.longitude = longitude;
|
||||
gc.data = static_cast<uint64_t>(revisionId);
|
||||
|
||||
int res = GeoIndex_insert(_geoIndex, &gc);
|
||||
|
||||
if (res == -1) {
|
||||
LOG_TOPIC(WARN, arangodb::Logger::FIXME)
|
||||
<< "found duplicate entry in geo-index, should not happen";
|
||||
return TRI_set_errno(TRI_ERROR_INTERNAL);
|
||||
} else if (res == -2) {
|
||||
return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
|
||||
} else if (res == -3) {
|
||||
LOG_TOPIC(DEBUG, arangodb::Logger::FIXME)
|
||||
<< "illegal geo-coordinates, ignoring entry";
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
} else if (res < 0) {
|
||||
return TRI_set_errno(TRI_ERROR_INTERNAL);
|
||||
}
|
||||
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
int RocksDBGeoIndex::insertRaw(rocksdb::WriteBatchWithIndex* batch,
|
||||
TRI_voc_rid_t revisionId,
|
||||
arangodb::velocypack::Slice const& doc) {
|
||||
return this->insert(nullptr, revisionId, doc, false);
|
||||
}
|
||||
|
||||
int RocksDBGeoIndex::remove(transaction::Methods*, TRI_voc_rid_t revisionId,
|
||||
VPackSlice const& doc, bool isRollback) {
|
||||
double latitude = 0.0;
|
||||
double longitude = 0.0;
|
||||
bool ok = true;
|
||||
|
||||
if (_variant == INDEX_GEO_INDIVIDUAL_LAT_LON) {
|
||||
VPackSlice lat = doc.get(_latitude);
|
||||
VPackSlice lon = doc.get(_longitude);
|
||||
if (!lat.isNumber()) {
|
||||
ok = false;
|
||||
} else {
|
||||
latitude = lat.getNumericValue<double>();
|
||||
}
|
||||
if (!lon.isNumber()) {
|
||||
ok = false;
|
||||
} else {
|
||||
longitude = lon.getNumericValue<double>();
|
||||
}
|
||||
} else {
|
||||
VPackSlice loc = doc.get(_location);
|
||||
if (!loc.isArray() || loc.length() < 2) {
|
||||
ok = false;
|
||||
} else {
|
||||
VPackSlice first = loc.at(0);
|
||||
if (!first.isNumber()) {
|
||||
ok = false;
|
||||
}
|
||||
VPackSlice second = loc.at(1);
|
||||
if (!second.isNumber()) {
|
||||
ok = false;
|
||||
}
|
||||
if (ok) {
|
||||
if (_geoJson) {
|
||||
longitude = first.getNumericValue<double>();
|
||||
latitude = second.getNumericValue<double>();
|
||||
} else {
|
||||
latitude = first.getNumericValue<double>();
|
||||
longitude = second.getNumericValue<double>();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!ok) {
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
GeoCoordinate gc;
|
||||
gc.latitude = latitude;
|
||||
gc.longitude = longitude;
|
||||
gc.data = static_cast<uint64_t>(revisionId);
|
||||
|
||||
// ignore non-existing elements in geo-index
|
||||
GeoIndex_remove(_geoIndex, &gc);
|
||||
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
int RocksDBGeoIndex::removeRaw(rocksdb::WriteBatch*, TRI_voc_rid_t revisionId,
|
||||
arangodb::velocypack::Slice const& doc) {
|
||||
return this->remove(nullptr, revisionId, doc, false);
|
||||
}
|
||||
|
||||
int RocksDBGeoIndex::unload() {
|
||||
// create a new, empty index
|
||||
/*auto empty = GeoIndex_new(_objectId, 0, 0);
|
||||
|
||||
if (empty == nullptr) {
|
||||
THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
|
||||
// free the old one
|
||||
if (_geoIndex != nullptr) {
|
||||
GeoIndex_free(_geoIndex);
|
||||
}
|
||||
|
||||
// and assign it
|
||||
_geoIndex = empty;*/
|
||||
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
/// @brief looks up all points within a given radius
|
||||
GeoCoordinates* RocksDBGeoIndex::withinQuery(transaction::Methods* trx,
|
||||
double lat, double lon,
|
||||
double radius) const {
|
||||
GeoCoordinate gc;
|
||||
gc.latitude = lat;
|
||||
gc.longitude = lon;
|
||||
|
||||
return GeoIndex_PointsWithinRadius(_geoIndex, &gc, radius);
|
||||
}
|
||||
|
||||
/// @brief looks up the nearest points
|
||||
GeoCoordinates* RocksDBGeoIndex::nearQuery(transaction::Methods* trx,
|
||||
double lat, double lon,
|
||||
size_t count) const {
|
||||
GeoCoordinate gc;
|
||||
gc.latitude = lat;
|
||||
gc.longitude = lon;
|
||||
|
||||
return GeoIndex_NearestCountPoints(_geoIndex, &gc, static_cast<int>(count));
|
||||
}
|
|
@ -0,0 +1,200 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2017 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Christoph Uhde
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#ifndef ARANGOD_MMFILES_GEO_INDEX_H
|
||||
#define ARANGOD_MMFILES_GEO_INDEX_H 1
|
||||
|
||||
#include "Basics/Common.h"
|
||||
#include "Indexes/IndexIterator.h"
|
||||
#include "RocksDBEngine/RocksDBGeoIndexImpl.h"
|
||||
#include "RocksDBEngine/RocksDBIndex.h"
|
||||
#include "VocBase/voc-types.h"
|
||||
#include "VocBase/vocbase.h"
|
||||
|
||||
#include <velocypack/Builder.h>
|
||||
#include <velocypack/velocypack-aliases.h>
|
||||
|
||||
using namespace ::arangodb::rocksdbengine;
|
||||
|
||||
// GeoCoordinate.data must be capable of storing revision ids
|
||||
static_assert(sizeof(GeoCoordinate::data) >= sizeof(TRI_voc_rid_t),
|
||||
"invalid size of GeoCoordinate.data");
|
||||
|
||||
namespace arangodb {
|
||||
class RocksDBGeoIndex;
|
||||
|
||||
class RocksDBGeoIndexIterator final : public IndexIterator {
|
||||
public:
|
||||
/// @brief Construct an RocksDBGeoIndexIterator based on Ast Conditions
|
||||
RocksDBGeoIndexIterator(LogicalCollection* collection,
|
||||
transaction::Methods* trx,
|
||||
ManagedDocumentResult* mmdr,
|
||||
RocksDBGeoIndex const* index,
|
||||
arangodb::aql::AstNode const*,
|
||||
arangodb::aql::Variable const*);
|
||||
|
||||
~RocksDBGeoIndexIterator() { replaceCursor(nullptr); }
|
||||
|
||||
char const* typeName() const override { return "geo-index-iterator"; }
|
||||
|
||||
bool next(TokenCallback const& cb, size_t limit) override;
|
||||
|
||||
void reset() override;
|
||||
|
||||
private:
|
||||
size_t findLastIndex(GeoCoordinates* coords) const;
|
||||
void replaceCursor(::GeoCursor* c);
|
||||
void createCursor(double lat, double lon);
|
||||
void evaluateCondition(); // called in constructor
|
||||
|
||||
RocksDBGeoIndex const* _index;
|
||||
::GeoCursor* _cursor;
|
||||
::GeoCoordinate _coor;
|
||||
arangodb::aql::AstNode const* _condition;
|
||||
double _lat;
|
||||
double _lon;
|
||||
bool _near;
|
||||
bool _inclusive;
|
||||
bool _done;
|
||||
double _radius;
|
||||
};
|
||||
|
||||
class RocksDBGeoIndex final : public RocksDBIndex {
|
||||
friend class RocksDBGeoIndexIterator;
|
||||
|
||||
public:
|
||||
RocksDBGeoIndex() = delete;
|
||||
|
||||
RocksDBGeoIndex(TRI_idx_iid_t, LogicalCollection*,
|
||||
arangodb::velocypack::Slice const&);
|
||||
|
||||
~RocksDBGeoIndex();
|
||||
|
||||
public:
|
||||
/// @brief geo index variants
|
||||
enum IndexVariant {
|
||||
INDEX_GEO_NONE = 0,
|
||||
INDEX_GEO_INDIVIDUAL_LAT_LON,
|
||||
INDEX_GEO_COMBINED_LAT_LON,
|
||||
INDEX_GEO_COMBINED_LON_LAT
|
||||
};
|
||||
|
||||
public:
|
||||
IndexType type() const override {
|
||||
if (_variant == INDEX_GEO_COMBINED_LAT_LON ||
|
||||
_variant == INDEX_GEO_COMBINED_LON_LAT) {
|
||||
return TRI_IDX_TYPE_GEO1_INDEX;
|
||||
}
|
||||
|
||||
return TRI_IDX_TYPE_GEO2_INDEX;
|
||||
}
|
||||
|
||||
char const* typeName() const override {
|
||||
if (_variant == INDEX_GEO_COMBINED_LAT_LON ||
|
||||
_variant == INDEX_GEO_COMBINED_LON_LAT) {
|
||||
return "geo1";
|
||||
}
|
||||
return "geo2";
|
||||
}
|
||||
|
||||
IndexIterator* iteratorForCondition(transaction::Methods*,
|
||||
ManagedDocumentResult*,
|
||||
arangodb::aql::AstNode const*,
|
||||
arangodb::aql::Variable const*,
|
||||
bool) override;
|
||||
|
||||
bool allowExpansion() const override { return false; }
|
||||
|
||||
bool canBeDropped() const override { return true; }
|
||||
|
||||
bool isSorted() const override { return true; }
|
||||
|
||||
bool hasSelectivityEstimate() const override { return false; }
|
||||
|
||||
size_t memory() const override;
|
||||
|
||||
void toVelocyPack(VPackBuilder&, bool, bool) const override;
|
||||
// Uses default toVelocyPackFigures
|
||||
|
||||
bool matchesDefinition(VPackSlice const& info) const override;
|
||||
|
||||
int insert(transaction::Methods*, TRI_voc_rid_t,
|
||||
arangodb::velocypack::Slice const&, bool isRollback) override;
|
||||
int insertRaw(rocksdb::WriteBatchWithIndex*, TRI_voc_rid_t,
|
||||
arangodb::velocypack::Slice const&) override;
|
||||
int remove(transaction::Methods*, TRI_voc_rid_t,
|
||||
arangodb::velocypack::Slice const&, bool isRollback) override;
|
||||
int removeRaw(rocksdb::WriteBatch*, TRI_voc_rid_t,
|
||||
arangodb::velocypack::Slice const&) override;
|
||||
|
||||
int unload() override;
|
||||
|
||||
/// @brief looks up all points within a given radius
|
||||
GeoCoordinates* withinQuery(transaction::Methods*, double, double,
|
||||
double) const;
|
||||
|
||||
/// @brief looks up the nearest points
|
||||
GeoCoordinates* nearQuery(transaction::Methods*, double, double,
|
||||
size_t) const;
|
||||
|
||||
bool isSame(std::vector<std::string> const& location, bool geoJson) const {
|
||||
return (!_location.empty() && _location == location && _geoJson == geoJson);
|
||||
}
|
||||
|
||||
bool isSame(std::vector<std::string> const& latitude,
|
||||
std::vector<std::string> const& longitude) const {
|
||||
return (!_latitude.empty() && !_longitude.empty() &&
|
||||
_latitude == latitude && _longitude == longitude);
|
||||
}
|
||||
|
||||
private:
|
||||
/// @brief attribute paths
|
||||
std::vector<std::string> _location;
|
||||
std::vector<std::string> _latitude;
|
||||
std::vector<std::string> _longitude;
|
||||
|
||||
/// @brief the geo index variant (geo1 or geo2)
|
||||
IndexVariant _variant;
|
||||
|
||||
/// @brief whether the index is a geoJson index (latitude / longitude
|
||||
/// reversed)
|
||||
bool _geoJson;
|
||||
|
||||
/// @brief the actual geo index
|
||||
GeoIdx* _geoIndex;
|
||||
};
|
||||
} // namespace arangodb
|
||||
|
||||
namespace std {
|
||||
template <>
|
||||
class default_delete<GeoCoordinates> {
|
||||
public:
|
||||
void operator()(GeoCoordinates* result) {
|
||||
if (result != nullptr) {
|
||||
GeoIndex_CoordinatesFree(result);
|
||||
}
|
||||
}
|
||||
};
|
||||
} // namespace std
|
||||
|
||||
#endif
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,114 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author R. A. Parker
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* GeoIdx.h - header file for GeoIdx algorithms */
|
||||
/* Version 2.2 25.11.2015 R. A. Parker */
|
||||
|
||||
#ifndef ARANGOD_ROCKSDB_GEO_INDEX_IMPL_H
|
||||
#define ARANGOD_ROCKSDB_GEO_INDEX_IMPL_H 1
|
||||
|
||||
#include "Basics/Common.h"
|
||||
#include <cstdint>
|
||||
|
||||
namespace arangodb { namespace rocksdbengine {
|
||||
|
||||
/* first the things that a user might want to change */
|
||||
|
||||
/* a GeoString - a signed type of at least 64 bits */
|
||||
typedef std::uint_fast64_t GeoString;
|
||||
|
||||
/* percentage growth of slot or slotslot tables */
|
||||
#define GeoIndexGROW 50
|
||||
|
||||
/* maximum number of points in a pot */
|
||||
/* *** note - must be even! */
|
||||
/* smaller takes more space but is a little faster */
|
||||
#define GeoIndexPOTSIZE 6
|
||||
|
||||
/* choses the set of fixed points */
|
||||
#define GeoIndexFIXEDSET 6
|
||||
/* 1 is just the N pole (doesn't really work) */
|
||||
/* 2 is N and S pole - slow but OK */
|
||||
/* 3 is equilateral triangle on 0/180 long */
|
||||
/* 4 is four corners of a tetrahedron */
|
||||
/* 5 is trigonal bipyramid */
|
||||
/* 6 is the corners of octahedron (default) */
|
||||
/* 8 is eight corners of a cube */
|
||||
|
||||
/* size of max-dist integer. */
|
||||
/* 2 is 16-bit - smaller but slow when lots of points */
|
||||
/* within a few hundred meters of target */
|
||||
/* 4 is 32-bit - larger and fast even when points are */
|
||||
/* only centimeters apart. Default */
|
||||
#define GEOFIXLEN 4
|
||||
#if GEOFIXLEN == 2
|
||||
typedef std::uint16_t GeoFix;
|
||||
#endif
|
||||
#if GEOFIXLEN == 4
|
||||
typedef std::uint32_t GeoFix;
|
||||
#endif
|
||||
|
||||
/* If this #define is there, then the INDEXDUMP and */
|
||||
/* INDEXVALID functions are also available. These */
|
||||
/* are not needed for normal production versions */
|
||||
/* the INDEXDUMP function also prints the data, */
|
||||
/* assumed to be a character string, if DEBUG is */
|
||||
/* set to 2. */
|
||||
//#define TRI_GEO_DEBUG 1
|
||||
|
||||
typedef struct {
|
||||
double latitude;
|
||||
double longitude;
|
||||
uint64_t data;
|
||||
} GeoCoordinate;
|
||||
|
||||
typedef struct {
|
||||
size_t length;
|
||||
GeoCoordinate* coordinates;
|
||||
double* distances;
|
||||
} GeoCoordinates;
|
||||
|
||||
typedef void GeoIdx; /* to keep the structure private */
|
||||
typedef void GeoCursor; /* to keep the structure private */
|
||||
|
||||
GeoIdx* GeoIndex_new(uint64_t objectId, int slo, int);
|
||||
void GeoIndex_free(GeoIdx* gi);
|
||||
double GeoIndex_distance(GeoCoordinate* c1, GeoCoordinate* c2);
|
||||
int GeoIndex_insert(GeoIdx* gi, GeoCoordinate* c);
|
||||
int GeoIndex_remove(GeoIdx* gi, GeoCoordinate* c);
|
||||
int GeoIndex_hint(GeoIdx* gi, int hint);
|
||||
GeoCoordinates* GeoIndex_PointsWithinRadius(GeoIdx* gi, GeoCoordinate* c,
|
||||
double d);
|
||||
GeoCoordinates* GeoIndex_NearestCountPoints(GeoIdx* gi, GeoCoordinate* c,
|
||||
int count);
|
||||
GeoCursor* GeoIndex_NewCursor(GeoIdx* gi, GeoCoordinate* c);
|
||||
GeoCoordinates* GeoIndex_ReadCursor(GeoCursor* gc, int count, bool returnDistances = true, double maxDistance = -1.0);
|
||||
void GeoIndex_CursorFree(GeoCursor* gc);
|
||||
void GeoIndex_CoordinatesFree(GeoCoordinates* clist);
|
||||
#ifdef TRI_GEO_DEBUG
|
||||
void GeoIndex_INDEXDUMP(GeoIdx* gi, FILE* f);
|
||||
int GeoIndex_INDEXVALID(GeoIdx* gi);
|
||||
#endif
|
||||
}}
|
||||
#endif
|
||||
/* end of GeoIdx.h */
|
|
@ -0,0 +1,117 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2014-2017 ArangoDB GmbH, Cologne, Germany
|
||||
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Christoph Uhde
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// MUST BE ONLY INCLUDED IN RocksDBGeoIndexImpl.cpp after struct definitions!
|
||||
// IT CAN NOT BE USED IN OTHER
|
||||
// This file has only been added to keep Richards code clean. So it is easier
|
||||
// for him to spot relevant changes.
|
||||
|
||||
#ifndef ARANGOD_ROCKSDB_GEO_INDEX_IMPL_HELPER_H
|
||||
#define ARANGOD_ROCKSDB_GEO_INDEX_IMPL_HELPER_H 1
|
||||
|
||||
#include <RocksDBEngine/RocksDBGeoIndexImpl.h>
|
||||
|
||||
#include <RocksDBEngine/RocksDBCommon.h>
|
||||
#include <RocksDBEngine/RocksDBEngine.h>
|
||||
#include <RocksDBEngine/RocksDBKey.h>
|
||||
|
||||
#include <velocypack/Builder.h>
|
||||
#include <velocypack/Iterator.h>
|
||||
#include <velocypack/velocypack-aliases.h>
|
||||
|
||||
namespace arangodb {
|
||||
namespace rocksdbengine {
|
||||
|
||||
VPackBuilder CoordToVpack(GeoCoordinate* coord) {
|
||||
VPackBuilder rv{};
|
||||
rv.openArray();
|
||||
rv.add(VPackValue(coord->latitude)); // double
|
||||
rv.add(VPackValue(coord->longitude)); // double
|
||||
rv.add(VPackValue(coord->data)); // uint64_t
|
||||
rv.close();
|
||||
return rv;
|
||||
}
|
||||
|
||||
void VpackToCoord(VPackSlice const& slice, GeoCoordinate* gc) {
|
||||
TRI_ASSERT(slice.isArray() && slice.length() == 3);
|
||||
gc->latitude = slice.at(0).getDouble();
|
||||
gc->longitude = slice.at(1).getDouble();
|
||||
gc->data = slice.at(2).getUInt();
|
||||
}
|
||||
|
||||
VPackBuilder PotToVpack(GeoPot* pot) {
|
||||
VPackBuilder rv{};
|
||||
rv.openArray(); // open
|
||||
rv.add(VPackValue(pot->LorLeaf)); // int
|
||||
rv.add(VPackValue(pot->RorPoints)); // int
|
||||
rv.add(VPackValue(pot->middle)); // GeoString
|
||||
{
|
||||
rv.openArray(); // array GeoFix //uint 16/32
|
||||
for (std::size_t i = 0; i < GeoIndexFIXEDPOINTS; i++) {
|
||||
rv.add(VPackValue(pot->maxdist[i])); // unit 16/32
|
||||
}
|
||||
rv.close(); // close array
|
||||
}
|
||||
rv.add(VPackValue(pot->start)); // GeoString
|
||||
rv.add(VPackValue(pot->end)); // GeoString
|
||||
rv.add(VPackValue(pot->level)); // int
|
||||
{
|
||||
rv.openArray(); // arrray of int
|
||||
for (std::size_t i = 0; i < GeoIndexPOTSIZE; i++) {
|
||||
rv.add(VPackValue(pot->points[i])); // int
|
||||
}
|
||||
rv.close(); // close array
|
||||
}
|
||||
rv.close(); // close
|
||||
return rv;
|
||||
}
|
||||
|
||||
void VpackToPot(VPackSlice const& slice, GeoPot* rv) {
|
||||
TRI_ASSERT(slice.isArray());
|
||||
rv->LorLeaf = (int)slice.at(0).getInt(); // int
|
||||
rv->RorPoints = (int)slice.at(1).getInt(); // int
|
||||
rv->middle = slice.at(2).getUInt(); // GeoString
|
||||
{
|
||||
auto maxdistSlice = slice.at(3);
|
||||
TRI_ASSERT(maxdistSlice.isArray());
|
||||
TRI_ASSERT(maxdistSlice.length() == GeoIndexFIXEDPOINTS);
|
||||
for (std::size_t i = 0; i < GeoIndexFIXEDPOINTS; i++) {
|
||||
rv->maxdist[i] = (int)maxdistSlice.at(i).getUInt(); // unit 16/33
|
||||
}
|
||||
}
|
||||
rv->start = (int)slice.at(4).getUInt(); // GeoString
|
||||
rv->end = slice.at(5).getUInt(); // GeoString
|
||||
rv->level = (int)slice.at(6).getInt(); // int
|
||||
{
|
||||
auto pointsSlice = slice.at(7);
|
||||
TRI_ASSERT(pointsSlice.isArray());
|
||||
TRI_ASSERT(pointsSlice.length() == GeoIndexFIXEDPOINTS);
|
||||
for (std::size_t i = 0; i < GeoIndexPOTSIZE; i++) {
|
||||
rv->points[i] = (int)pointsSlice.at(i).getInt(); // int
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace rocksdbengine
|
||||
} // namespace arangodb
|
||||
#endif
|
|
@ -29,6 +29,7 @@
|
|||
#include "RocksDBEngine/RocksDBEdgeIndex.h"
|
||||
#include "RocksDBEngine/RocksDBEngine.h"
|
||||
#include "RocksDBEngine/RocksDBFulltextIndex.h"
|
||||
#include "RocksDBEngine/RocksDBGeoIndex.h"
|
||||
#include "RocksDBEngine/RocksDBHashIndex.h"
|
||||
#include "RocksDBEngine/RocksDBPersistentIndex.h"
|
||||
#include "RocksDBEngine/RocksDBPrimaryIndex.h"
|
||||
|
@ -428,6 +429,11 @@ std::shared_ptr<Index> RocksDBIndexFactory::prepareIndexFromSlice(
|
|||
newIdx.reset(new arangodb::RocksDBPersistentIndex(iid, col, info));
|
||||
break;
|
||||
}
|
||||
case arangodb::Index::TRI_IDX_TYPE_GEO1_INDEX:
|
||||
case arangodb::Index::TRI_IDX_TYPE_GEO2_INDEX:{
|
||||
newIdx.reset(new arangodb::RocksDBGeoIndex(iid, col, info));
|
||||
break;
|
||||
}
|
||||
case arangodb::Index::TRI_IDX_TYPE_FULLTEXT_INDEX: {
|
||||
newIdx.reset(new arangodb::RocksDBFulltextIndex(iid, col, info));
|
||||
break;
|
||||
|
|
|
@ -77,6 +77,24 @@ RocksDBKey RocksDBKey::UniqueIndexValue(uint64_t indexId,
|
|||
return RocksDBKey(RocksDBEntryType::UniqueIndexValue, indexId, indexValues);
|
||||
}
|
||||
|
||||
RocksDBKey RocksDBKey::FulltextIndexValue(uint64_t indexId,
|
||||
arangodb::StringRef const& word,
|
||||
arangodb::StringRef const& primaryKey) {
|
||||
return RocksDBKey(RocksDBEntryType::FulltextIndexValue, indexId, word, primaryKey);
|
||||
}
|
||||
|
||||
RocksDBKey RocksDBKey::GeoIndexValue(uint64_t indexId, int32_t offset, bool isSlot) {
|
||||
RocksDBKey key(RocksDBEntryType::GeoIndexValue);
|
||||
size_t length = sizeof(char) + sizeof(indexId) + sizeof(offset);
|
||||
key._buffer.reserve(length);
|
||||
uint64ToPersistent(key._buffer, indexId);
|
||||
|
||||
uint64_t norm = uint64_t(offset) << 32;
|
||||
norm |= isSlot ? 0xFFU : 0; //encode slot|pot in lowest bit
|
||||
uint64ToPersistent(key._buffer, norm);
|
||||
return key;
|
||||
}
|
||||
|
||||
RocksDBKey RocksDBKey::View(TRI_voc_tick_t databaseId, TRI_voc_cid_t viewId) {
|
||||
return RocksDBKey(RocksDBEntryType::View, databaseId, viewId);
|
||||
}
|
||||
|
@ -93,12 +111,6 @@ RocksDBKey RocksDBKey::ReplicationApplierConfig(TRI_voc_tick_t databaseId) {
|
|||
return RocksDBKey(RocksDBEntryType::ReplicationApplierConfig, databaseId);
|
||||
}
|
||||
|
||||
RocksDBKey RocksDBKey::FulltextIndexValue(uint64_t indexId,
|
||||
arangodb::StringRef const& word,
|
||||
arangodb::StringRef const& primaryKey) {
|
||||
return RocksDBKey(RocksDBEntryType::FulltextIndexValue, indexId, word, primaryKey);
|
||||
}
|
||||
|
||||
// ========================= Member methods ===========================
|
||||
|
||||
RocksDBEntryType RocksDBKey::type(RocksDBKey const& key) {
|
||||
|
@ -177,10 +189,20 @@ VPackSlice RocksDBKey::indexedVPack(rocksdb::Slice const& slice) {
|
|||
return indexedVPack(slice.data(), slice.size());
|
||||
}
|
||||
|
||||
std::pair<bool, int32_t> RocksDBKey::geoValues(rocksdb::Slice const& slice) {
|
||||
TRI_ASSERT(slice.size() >= sizeof(char) + sizeof(uint64_t) * 2);
|
||||
RocksDBEntryType type = static_cast<RocksDBEntryType>(*slice.data());
|
||||
TRI_ASSERT(type == RocksDBEntryType::GeoIndexValue);
|
||||
uint64_t val = uint64FromPersistent(slice.data() + sizeof(char) + sizeof(uint64_t));
|
||||
bool isSlot = val & 0xFFU;// lowest byte is 0xFF if true
|
||||
return std::pair<bool, int32_t>(isSlot, (val >> 32));
|
||||
}
|
||||
|
||||
std::string const& RocksDBKey::string() const { return _buffer; }
|
||||
|
||||
RocksDBKey::RocksDBKey(RocksDBEntryType type) : _type(type), _buffer() {
|
||||
switch (_type) {
|
||||
case RocksDBEntryType::GeoIndexValue:
|
||||
case RocksDBEntryType::SettingsValue: {
|
||||
_buffer.push_back(static_cast<char>(_type));
|
||||
break;
|
||||
|
@ -327,6 +349,8 @@ RocksDBKey::RocksDBKey(RocksDBEntryType type, uint64_t first,
|
|||
}
|
||||
}
|
||||
|
||||
// ====================== Private Methods ==========================
|
||||
|
||||
RocksDBEntryType RocksDBKey::type(char const* data, size_t size) {
|
||||
TRI_ASSERT(data != nullptr);
|
||||
TRI_ASSERT(size >= sizeof(char));
|
||||
|
@ -375,7 +399,9 @@ TRI_voc_cid_t RocksDBKey::objectId(char const* data, size_t size) {
|
|||
case RocksDBEntryType::PrimaryIndexValue:
|
||||
case RocksDBEntryType::EdgeIndexValue:
|
||||
case RocksDBEntryType::IndexValue:
|
||||
case RocksDBEntryType::UniqueIndexValue: {
|
||||
case RocksDBEntryType::UniqueIndexValue:
|
||||
case RocksDBEntryType::GeoIndexValue:
|
||||
{
|
||||
TRI_ASSERT(size >= (sizeof(char) + (2 * sizeof(uint64_t))));
|
||||
return uint64FromPersistent(data + sizeof(char));
|
||||
}
|
||||
|
|
|
@ -103,6 +103,18 @@ class RocksDBKey {
|
|||
static RocksDBKey UniqueIndexValue(uint64_t indexId,
|
||||
VPackSlice const& indexValues);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Create a fully-specified key for the fulltext index
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKey FulltextIndexValue(uint64_t indexId,
|
||||
arangodb::StringRef const& word,
|
||||
arangodb::StringRef const& primaryKey);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Create a fully-specified key for a geoIndexValue
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKey GeoIndexValue(uint64_t indexId, int32_t offset, bool isSlot);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Create a fully-specified key for a view
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -122,13 +134,6 @@ class RocksDBKey {
|
|||
/// @brief Create a fully-specified key for a replication applier config
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKey ReplicationApplierConfig(TRI_voc_tick_t databaseId);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Create a fully-specified key for the fulltext index
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKey FulltextIndexValue(uint64_t indexId,
|
||||
arangodb::StringRef const& word,
|
||||
arangodb::StringRef const& primaryKey);
|
||||
|
||||
public:
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -167,8 +172,8 @@ class RocksDBKey {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Extracts the objectId from a key
|
||||
///
|
||||
/// May be called only on the the following key types: Document.
|
||||
/// Other types will throw.
|
||||
/// May be called only on the the following key types: Document,
|
||||
/// all kinds of index entries. Other types will throw.
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static uint64_t objectId(RocksDBKey const&);
|
||||
static uint64_t objectId(rocksdb::Slice const&);
|
||||
|
@ -216,6 +221,13 @@ class RocksDBKey {
|
|||
static VPackSlice indexedVPack(RocksDBKey const&);
|
||||
static VPackSlice indexedVPack(rocksdb::Slice const&);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Extracts the geo pot offset
|
||||
///
|
||||
/// May be called only on GeoIndexValues
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static std::pair<bool, int32_t> geoValues(rocksdb::Slice const& slice);
|
||||
|
||||
public:
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Returns a reference to the full, constructed key
|
||||
|
@ -231,6 +243,8 @@ class RocksDBKey {
|
|||
arangodb::StringRef const& docKey, VPackSlice const& indexData);
|
||||
RocksDBKey(RocksDBEntryType type, uint64_t first,
|
||||
arangodb::StringRef const& second);
|
||||
RocksDBKey(RocksDBEntryType type, uint64_t first, std::string const& second,
|
||||
std::string const& third);
|
||||
RocksDBKey(RocksDBEntryType type, uint64_t first, arangodb::StringRef const& second,
|
||||
arangodb::StringRef const& third);
|
||||
|
||||
|
|
|
@ -73,6 +73,32 @@ RocksDBKeyBounds RocksDBKeyBounds::UniqueIndex(uint64_t indexId) {
|
|||
return RocksDBKeyBounds(RocksDBEntryType::UniqueIndexValue, indexId);
|
||||
}
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::FulltextIndex(uint64_t indexId) {
|
||||
return RocksDBKeyBounds(RocksDBEntryType::FulltextIndexValue, indexId);
|
||||
}
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::GeoIndex(uint64_t indexId) {
|
||||
return RocksDBKeyBounds(RocksDBEntryType::GeoIndexValue, indexId);
|
||||
}
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::GeoIndex(uint64_t indexId, bool isSlot) {
|
||||
RocksDBKeyBounds b;
|
||||
size_t length = sizeof(char) + sizeof(uint64_t) * 2;
|
||||
b._startBuffer.reserve(length);
|
||||
b._startBuffer.push_back(static_cast<char>(RocksDBEntryType::GeoIndexValue));
|
||||
uint64ToPersistent(b._startBuffer, indexId);
|
||||
|
||||
b._endBuffer.clear();
|
||||
b._endBuffer.append(b._startBuffer);// append common prefix
|
||||
|
||||
uint64_t norm = isSlot ? 0xFFU : 0;//encode slot|pot in lowest bit
|
||||
uint64ToPersistent(b._startBuffer, norm);// lower endian
|
||||
norm = norm | (0xFFFFFFFFULL << 32);
|
||||
uint64ToPersistent(b._endBuffer, norm);
|
||||
return b;
|
||||
}
|
||||
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::IndexRange(uint64_t indexId,
|
||||
VPackSlice const& left,
|
||||
VPackSlice const& right) {
|
||||
|
@ -94,11 +120,6 @@ RocksDBKeyBounds RocksDBKeyBounds::CounterValues() {
|
|||
return RocksDBKeyBounds(RocksDBEntryType::CounterValue);
|
||||
}
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::FulltextIndex(uint64_t indexId) {
|
||||
return RocksDBKeyBounds(RocksDBEntryType::FulltextIndexValue, indexId);
|
||||
}
|
||||
|
||||
|
||||
RocksDBKeyBounds RocksDBKeyBounds::FulltextIndexPrefix(uint64_t indexId,
|
||||
arangodb::StringRef const& word) {
|
||||
// I did not want to pass a bool to the constructor for this
|
||||
|
@ -110,8 +131,9 @@ RocksDBKeyBounds RocksDBKeyBounds::FulltextIndexPrefix(uint64_t indexId,
|
|||
uint64ToPersistent(bounds._startBuffer, indexId);
|
||||
bounds._startBuffer.append(word.data(), word.length());
|
||||
|
||||
bounds._endBuffer.clear();
|
||||
bounds._endBuffer.append(bounds._startBuffer);
|
||||
bounds._endBuffer.push_back((const unsigned char)0xFF);// invalid UTF-8 character, higher than with memcmp
|
||||
bounds._endBuffer.push_back(0xFFU);// invalid UTF-8 character, higher than with memcmp
|
||||
return bounds;
|
||||
}
|
||||
|
||||
|
@ -209,7 +231,8 @@ RocksDBKeyBounds::RocksDBKeyBounds(RocksDBEntryType type, uint64_t first)
|
|||
}
|
||||
|
||||
case RocksDBEntryType::Collection:
|
||||
case RocksDBEntryType::Document:{
|
||||
case RocksDBEntryType::Document:
|
||||
case RocksDBEntryType::GeoIndexValue: {
|
||||
// Collections are stored as follows:
|
||||
// Key: 1 + 8-byte ArangoDB database ID + 8-byte ArangoDB collection ID
|
||||
//
|
||||
|
@ -232,8 +255,8 @@ RocksDBKeyBounds::RocksDBKeyBounds(RocksDBEntryType type, uint64_t first)
|
|||
|
||||
case RocksDBEntryType::PrimaryIndexValue:
|
||||
case RocksDBEntryType::EdgeIndexValue:
|
||||
case RocksDBEntryType::View:
|
||||
case RocksDBEntryType::FulltextIndexValue: {
|
||||
case RocksDBEntryType::FulltextIndexValue:
|
||||
case RocksDBEntryType::View: {
|
||||
size_t length = sizeof(char) + sizeof(uint64_t);
|
||||
_startBuffer.reserve(length);
|
||||
_startBuffer.push_back(static_cast<char>(_type));
|
||||
|
|
|
@ -85,6 +85,17 @@ class RocksDBKeyBounds {
|
|||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKeyBounds UniqueIndex(uint64_t indexId);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Bounds for all entries of a fulltext index
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKeyBounds FulltextIndex(uint64_t indexId);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Bounds for all entries belonging to a specified unique index
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKeyBounds GeoIndex(uint64_t indexId);
|
||||
static RocksDBKeyBounds GeoIndex(uint64_t indexId, bool isSlot);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Bounds for all index-entries within a value range belonging to a
|
||||
/// specified non-unique index
|
||||
|
@ -109,12 +120,7 @@ class RocksDBKeyBounds {
|
|||
/// @brief Bounds for all counter values
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKeyBounds CounterValues();
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Bounds for all entries of a fulltext index
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
static RocksDBKeyBounds FulltextIndex(uint64_t indexId);
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Bounds for all entries of a fulltext index, matching prefixes
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -163,6 +163,12 @@ int RocksDBTransactionCollection::use(int nestingLevel) {
|
|||
_collection = _transaction->vocbase()->useCollection(_cid, status);
|
||||
if (_collection != nullptr) {
|
||||
_usageLocked = true;
|
||||
|
||||
// geo index needs exclusive write access
|
||||
RocksDBCollection* rc = static_cast<RocksDBCollection*>(_collection->getPhysical());
|
||||
if (AccessMode::isWrite(_accessType) && rc->hasGeoIndex()) {
|
||||
_accessType = AccessMode::Type::EXCLUSIVE;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// use without usage-lock (lock already set externally)
|
||||
|
@ -187,11 +193,9 @@ int RocksDBTransactionCollection::use(int nestingLevel) {
|
|||
return TRI_ERROR_ARANGO_READ_ONLY;
|
||||
}
|
||||
|
||||
_initialNumberDocuments =
|
||||
static_cast<RocksDBCollection*>(_collection->getPhysical())
|
||||
->numberDocuments();
|
||||
_revision =
|
||||
static_cast<RocksDBCollection*>(_collection->getPhysical())->revision();
|
||||
RocksDBCollection* rc = static_cast<RocksDBCollection*>(_collection->getPhysical());
|
||||
_initialNumberDocuments = rc->numberDocuments();
|
||||
_revision = rc->revision();
|
||||
}
|
||||
|
||||
if (AccessMode::isWriteOrExclusive(_accessType) && !isLocked()) {
|
||||
|
|
|
@ -72,6 +72,20 @@ static rocksdb::Slice UniqueIndexValue(
|
|||
reinterpret_cast<std::underlying_type<RocksDBEntryType>::type*>(
|
||||
&uniqueIndexValue),
|
||||
1);
|
||||
|
||||
static RocksDBEntryType fulltextIndexValue =
|
||||
RocksDBEntryType::FulltextIndexValue;
|
||||
static rocksdb::Slice FulltextIndexValue(
|
||||
reinterpret_cast<std::underlying_type<RocksDBEntryType>::type*>(
|
||||
&fulltextIndexValue),
|
||||
1);
|
||||
|
||||
static RocksDBEntryType geoIndexValue =
|
||||
RocksDBEntryType::GeoIndexValue;
|
||||
static rocksdb::Slice GeoIndexValue(
|
||||
reinterpret_cast<std::underlying_type<RocksDBEntryType>::type*>(
|
||||
&geoIndexValue),
|
||||
1);
|
||||
|
||||
static RocksDBEntryType view = RocksDBEntryType::View;
|
||||
static rocksdb::Slice View(
|
||||
|
@ -89,13 +103,6 @@ static rocksdb::Slice ReplicationApplierConfig(
|
|||
reinterpret_cast<std::underlying_type<RocksDBEntryType>::type*>(
|
||||
&replicationApplierConfig),
|
||||
1);
|
||||
|
||||
static RocksDBEntryType fulltextIndexValue =
|
||||
RocksDBEntryType::FulltextIndexValue;
|
||||
static rocksdb::Slice FulltextIndexValue(
|
||||
reinterpret_cast<std::underlying_type<RocksDBEntryType>::type*>(
|
||||
&fulltextIndexValue),
|
||||
1);
|
||||
}
|
||||
|
||||
rocksdb::Slice const& arangodb::rocksDBSlice(RocksDBEntryType const& type) {
|
||||
|
@ -116,14 +123,16 @@ rocksdb::Slice const& arangodb::rocksDBSlice(RocksDBEntryType const& type) {
|
|||
return IndexValue;
|
||||
case RocksDBEntryType::UniqueIndexValue:
|
||||
return UniqueIndexValue;
|
||||
case RocksDBEntryType::FulltextIndexValue:
|
||||
return FulltextIndexValue;
|
||||
case RocksDBEntryType::GeoIndexValue:
|
||||
return GeoIndexValue;
|
||||
case RocksDBEntryType::View:
|
||||
return View;
|
||||
case RocksDBEntryType::SettingsValue:
|
||||
return SettingsValue;
|
||||
case RocksDBEntryType::ReplicationApplierConfig:
|
||||
return ReplicationApplierConfig;
|
||||
case RocksDBEntryType::FulltextIndexValue:
|
||||
return FulltextIndexValue;
|
||||
}
|
||||
|
||||
return Document; // avoids warning - errorslice instead ?!
|
||||
|
|
|
@ -47,7 +47,8 @@ enum class RocksDBEntryType : char {
|
|||
View = '8',
|
||||
SettingsValue = '9',
|
||||
ReplicationApplierConfig = ':',
|
||||
FulltextIndexValue = ';'
|
||||
FulltextIndexValue = ';',
|
||||
GeoIndexValue = '<'
|
||||
};
|
||||
|
||||
enum class RocksDBLogType : char {
|
||||
|
|
|
@ -725,9 +725,6 @@ function ReplicationLoggerSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testLoggerCreateIndexGeo1 : function () {
|
||||
if (db._engine().name === "rocksdb") {
|
||||
return;
|
||||
}
|
||||
var c = db._create(cn);
|
||||
|
||||
var tick = getLastLogTick();
|
||||
|
@ -750,9 +747,6 @@ function ReplicationLoggerSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testLoggerCreateIndexGeo2 : function () {
|
||||
if (db._engine().name === "rocksdb") {
|
||||
return;
|
||||
}
|
||||
var c = db._create(cn);
|
||||
|
||||
var tick = getLastLogTick();
|
||||
|
@ -775,9 +769,6 @@ function ReplicationLoggerSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testLoggerCreateIndexGeo3 : function () {
|
||||
if (db._engine().name === "rocksdb") {
|
||||
return;
|
||||
}
|
||||
var c = db._create(cn);
|
||||
|
||||
var tick = getLastLogTick();
|
||||
|
@ -802,9 +793,6 @@ function ReplicationLoggerSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testLoggerCreateIndexGeo4 : function () {
|
||||
if (db._engine().name === "rocksdb") {
|
||||
return;
|
||||
}
|
||||
var c = db._create(cn);
|
||||
|
||||
var tick = getLastLogTick();
|
||||
|
@ -829,9 +817,6 @@ function ReplicationLoggerSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testLoggerCreateIndexGeo5 : function () {
|
||||
if (db._engine().name === "rocksdb") {
|
||||
return;
|
||||
}
|
||||
var c = db._create(cn);
|
||||
|
||||
var tick = getLastLogTick();
|
||||
|
|
|
@ -222,11 +222,9 @@ function dumpTestSuite () {
|
|||
assertEqual("fulltext", c.getIndexes()[7].type);
|
||||
assertEqual([ "a_f" ], c.getIndexes()[7].fields);
|
||||
|
||||
if (db._engine().name !== "rocksdb") {
|
||||
assertEqual("geo2", c.getIndexes()[8].type);
|
||||
assertEqual([ "a_la", "a_lo" ], c.getIndexes()[8].fields);
|
||||
assertFalse(c.getIndexes()[8].unique);
|
||||
}
|
||||
assertEqual("geo2", c.getIndexes()[8].type);
|
||||
assertEqual([ "a_la", "a_lo" ], c.getIndexes()[8].fields);
|
||||
assertFalse(c.getIndexes()[8].unique);
|
||||
|
||||
assertEqual(0, c.count());
|
||||
},
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
/*jshint globalstrict:false, strict:false */
|
||||
/*global fail, assertFalse, assertTrue, assertEqual, assertUndefined */
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test the shaped json behavior
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var jsunity = require("jsunity");
|
||||
|
||||
var arangodb = require("@arangodb");
|
||||
var db = arangodb.db;
|
||||
var internal = require("internal");
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function GeoShapedJsonSuite () {
|
||||
'use strict';
|
||||
var cn = "UnitTestsCollectionShaped";
|
||||
var c;
|
||||
|
||||
return {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief set up
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
setUp : function () {
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
c.ensureGeoIndex("lat", "lon");
|
||||
|
||||
for (var i = -3; i < 3; ++i) {
|
||||
for (var j = -3; j < 3; ++j) {
|
||||
c.save({ distance: 0, lat: 40 + 0.01 * i, lon: 40 + 0.01 * j, something: "test" });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// wait until the documents are actually shaped json
|
||||
internal.wal.flush(true, true);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief tear down
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
tearDown : function () {
|
||||
db._drop(cn);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief call within function with "distance" attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDistance : function () {
|
||||
var result = db._query(
|
||||
"FOR u IN WITHIN(" + cn + ", 40.0, 40.0, 5000000, 'distance') " +
|
||||
"SORT u.distance "+
|
||||
"RETURN { lat: u.lat, lon: u.lon, distance: u.distance }"
|
||||
).toArray();
|
||||
|
||||
// skip first result (which has a distance of 0)
|
||||
for (var i = 1; i < result.length; ++i) {
|
||||
var doc = result[i];
|
||||
|
||||
assertTrue(doc.hasOwnProperty("lat"));
|
||||
assertTrue(doc.hasOwnProperty("lon"));
|
||||
assertTrue(doc.hasOwnProperty("distance"));
|
||||
assertTrue(doc.distance > 0);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief call near function with "distance" attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testNear : function () {
|
||||
var result = db._query(
|
||||
"FOR u IN NEAR(" + cn + ", 40.0, 40.0, 5, 'something') SORT u.something " +
|
||||
"RETURN { lat: u.lat, lon: u.lon, distance: u.something }")
|
||||
.toArray();
|
||||
|
||||
// skip first result (which has a distance of 0)
|
||||
for (var i = 1; i < result.length; ++i) {
|
||||
var doc = result[i];
|
||||
|
||||
assertTrue(doc.hasOwnProperty("lat"));
|
||||
assertTrue(doc.hasOwnProperty("lon"));
|
||||
assertTrue(doc.hasOwnProperty("distance"));
|
||||
assertTrue(doc.distance >= 0);
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief executes the test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
jsunity.run(GeoShapedJsonSuite);
|
||||
|
||||
return jsunity.done();
|
||||
|
|
@ -38,7 +38,7 @@ var internal = require("internal");
|
|||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function DocumentShapedJsonSuite () {
|
||||
function GeoShapedJsonSuite () {
|
||||
'use strict';
|
||||
var cn = "UnitTestsCollectionShaped";
|
||||
var c;
|
||||
|
@ -52,15 +52,15 @@ function DocumentShapedJsonSuite () {
|
|||
setUp : function () {
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
c.ensureGeoIndex("lat", "lon");
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
c.save({ _key: "test" + i,
|
||||
value: i,
|
||||
text: "Test" + i,
|
||||
values: [ i ],
|
||||
one: { two: { three: [ 1 ] } } });
|
||||
for (var i = -3; i < 3; ++i) {
|
||||
for (var j = -3; j < 3; ++j) {
|
||||
c.save({ distance: 0, lat: 40 + 0.01 * i, lon: 40 + 0.01 * j, something: "test" });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// wait until the documents are actually shaped json
|
||||
internal.wal.flush(true, true);
|
||||
},
|
||||
|
@ -74,915 +74,45 @@ function DocumentShapedJsonSuite () {
|
|||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief save a Buffer object
|
||||
/// @brief call within function with "distance" attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testBuffer : function () {
|
||||
var b = new Buffer('abcdefg', 'binary');
|
||||
c.save({ _key: "buffer", value: b });
|
||||
var doc = c.document("buffer");
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertEqual(b.toJSON(), doc.value);
|
||||
assertEqual([ 97, 98, 99, 100, 101, 102, 103 ], doc.value);
|
||||
},
|
||||
testDistance : function () {
|
||||
var result = db._query(
|
||||
"FOR u IN WITHIN(" + cn + ", 40.0, 40.0, 5000000, 'distance') " +
|
||||
"SORT u.distance "+
|
||||
"RETURN { lat: u.lat, lon: u.lon, distance: u.distance }"
|
||||
).toArray();
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief save a date object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// skip first result (which has a distance of 0)
|
||||
for (var i = 1; i < result.length; ++i) {
|
||||
var doc = result[i];
|
||||
|
||||
testDate : function () {
|
||||
var dt = new Date();
|
||||
c.save({ _key: "date", value: dt });
|
||||
var doc = c.document("date");
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertEqual(dt.toJSON(), doc.value);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief save a regexp object
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRegexp : function () {
|
||||
try {
|
||||
c.save({ _key: "date", regexp : /foobar/ });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_BAD_PARAMETER.code, err.errorNum);
|
||||
assertTrue(doc.hasOwnProperty("lat"));
|
||||
assertTrue(doc.hasOwnProperty("lon"));
|
||||
assertTrue(doc.hasOwnProperty("distance"));
|
||||
assertTrue(doc.distance > 0);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief save a function object
|
||||
/// @brief call near function with "distance" attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testFunction : function () {
|
||||
try {
|
||||
c.save({ _key: "date", func : function () { } });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_BAD_PARAMETER.code, err.errorNum);
|
||||
}
|
||||
},
|
||||
testNear : function () {
|
||||
var result = db._query(
|
||||
"FOR u IN NEAR(" + cn + ", 40.0, 40.0, 5, 'something') SORT u.something " +
|
||||
"RETURN { lat: u.lat, lon: u.lon, distance: u.something }")
|
||||
.toArray();
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check getting keys
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testGet : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
|
||||
assertEqual(cn + "/test" + i, doc._id);
|
||||
assertEqual("test" + i, doc._key);
|
||||
assertEqual(i, doc.value);
|
||||
assertEqual("Test" + i, doc.text);
|
||||
assertEqual([ i ], doc.values);
|
||||
assertEqual({ two: { three: [ 1 ] } }, doc.one);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check getting keys
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testGetKeys : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
var keys = Object.keys(doc).sort();
|
||||
assertEqual([ "_id", "_key", "_rev", "one", "text", "value", "values" ], keys);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdatePseudo : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual(cn + "/test" + i, doc._id);
|
||||
assertEqual("test" + i, doc._key);
|
||||
assertEqual(i, doc.value);
|
||||
assertEqual("Test" + i, doc.text);
|
||||
assertEqual([ i ], doc.values);
|
||||
|
||||
doc._id = "foobarbaz";
|
||||
doc._key = "meow";
|
||||
doc._rev = null;
|
||||
|
||||
assertEqual("foobarbaz", doc._id);
|
||||
assertEqual("meow", doc._key);
|
||||
assertEqual(null, doc._rev);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShaped1 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc.value = "Tester" + i;
|
||||
doc.text = 42 + i;
|
||||
doc.values.push(i);
|
||||
|
||||
assertEqual(cn + "/test" + i, doc._id);
|
||||
assertEqual("test" + i, doc._key);
|
||||
assertEqual("Tester" + i, doc.value);
|
||||
assertEqual(42 + i, doc.text);
|
||||
assertEqual([ i, i ], doc.values);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShaped2 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual(i, doc.value);
|
||||
|
||||
doc.value = 99;
|
||||
assertEqual(99, doc.value);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShaped3 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual([ i ], doc.values);
|
||||
|
||||
doc.someValue = 1; // need to do this to trigger copying
|
||||
doc.values.push(42);
|
||||
assertEqual([ i, 42 ], doc.values);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShapedNested1 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual({ two: { three: [ 1 ] } }, doc.one);
|
||||
|
||||
doc.one = "removing the nested structure";
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertEqual("removing the nested structure", doc.one);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShapedNested2 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual({ two: { three: [ 1 ] } }, doc.one);
|
||||
|
||||
doc.someValue = 1; // need to do this to trigger copying
|
||||
doc.one.two.three = "removing the nested structure";
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.one.hasOwnProperty("two"));
|
||||
assertTrue(doc.one.two.hasOwnProperty("three"));
|
||||
assertEqual("removing the nested structure", doc.one.two.three);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateShapedNested3 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual({ two: { three: [ 1 ] } }, doc.one);
|
||||
doc.someValue = 1; // need to do this to trigger copying
|
||||
|
||||
doc.one.two.four = 42;
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.one.hasOwnProperty("two"));
|
||||
assertTrue(doc.one.two.hasOwnProperty("three"));
|
||||
assertTrue(doc.one.two.hasOwnProperty("four"));
|
||||
assertEqual([ 1 ], doc.one.two.three);
|
||||
assertEqual(42, doc.one.two.four);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttributes1 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc.thisIsAnAttribute = 99;
|
||||
|
||||
assertTrue(doc.hasOwnProperty("thisIsAnAttribute"));
|
||||
assertEqual(99, doc.thisIsAnAttribute);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttributes2 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc["some attribute set now"] = "aha";
|
||||
|
||||
assertTrue(doc.hasOwnProperty("some attribute set now"));
|
||||
assertEqual("aha", doc["some attribute set now"]);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttributesIndexed : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc[1] = "aha";
|
||||
|
||||
assertTrue(doc.hasOwnProperty(1));
|
||||
assertTrue(doc.hasOwnProperty("1"));
|
||||
assertEqual("aha", doc[1]);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttributesNested1 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc.someValue = 1; // need to do this to trigger copying
|
||||
doc.one.test = { foo: "bar" };
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.one.hasOwnProperty("two"));
|
||||
assertTrue(doc.one.two.hasOwnProperty("three"));
|
||||
assertTrue(doc.one.hasOwnProperty("test"));
|
||||
assertEqual({ foo: "bar" }, doc.one.test);
|
||||
assertEqual({ three: [ 1 ] }, doc.one.two);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttributesNested2 : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc.something = { foo: "bar" };
|
||||
assertTrue(doc.hasOwnProperty("something"));
|
||||
assertTrue(doc.something.hasOwnProperty("foo"));
|
||||
assertEqual("bar", doc.something.foo);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionPseudoFirst : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete pseudo-attributes first
|
||||
delete doc._key;
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
|
||||
delete doc._rev;
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
|
||||
delete doc._id;
|
||||
assertFalse(doc.hasOwnProperty("_id"));
|
||||
|
||||
delete doc.value;
|
||||
assertFalse(doc.hasOwnProperty("value"));
|
||||
|
||||
delete doc.text;
|
||||
assertFalse(doc.hasOwnProperty("text"));
|
||||
|
||||
delete doc.values;
|
||||
assertFalse(doc.hasOwnProperty("values"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of special attribute _id
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionShapedKeyId : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete special attribute _id
|
||||
delete doc._id;
|
||||
assertFalse(doc.hasOwnProperty("_id"));
|
||||
assertUndefined(doc._id);
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of special attributes from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionShapedKeyRev : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete special attribute _key
|
||||
delete doc._key;
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertUndefined(doc._key);
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete special attribute _rev
|
||||
delete doc._rev;
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertUndefined(doc._rev);
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionShapedFirst : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete shaped attributes first
|
||||
delete doc.value;
|
||||
assertFalse(doc.hasOwnProperty("value"));
|
||||
assertUndefined(doc.value);
|
||||
|
||||
delete doc.text;
|
||||
assertFalse(doc.hasOwnProperty("text"));
|
||||
assertUndefined(doc.text);
|
||||
|
||||
delete doc.values;
|
||||
assertFalse(doc.hasOwnProperty("values"));
|
||||
assertUndefined(doc.values);
|
||||
|
||||
delete doc._key;
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertUndefined(doc._key);
|
||||
|
||||
delete doc._rev;
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
assertUndefined(doc._rev);
|
||||
|
||||
delete doc._id;
|
||||
assertFalse(doc.hasOwnProperty("_id"));
|
||||
assertUndefined(doc._id);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion after deletion
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionDeletion : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
assertEqual([ "_id", "_key", "_rev", "one", "text", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _key
|
||||
delete doc._key;
|
||||
assertEqual([ "_id", "_rev", "one", "text", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete text
|
||||
delete doc.text;
|
||||
assertEqual([ "_id", "_rev", "one", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _id
|
||||
delete doc._id;
|
||||
assertEqual([ "_rev", "one", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete value
|
||||
delete doc.value;
|
||||
assertEqual([ "_rev", "one", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _rev
|
||||
delete doc._rev;
|
||||
assertEqual([ "one", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete values
|
||||
delete doc.values;
|
||||
assertEqual([ "one" ], Object.keys(doc).sort());
|
||||
|
||||
// delete one
|
||||
delete doc.one;
|
||||
assertEqual([ ], Object.keys(doc).sort());
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionAfterUpdate : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
doc._key = "foobar";
|
||||
assertEqual("foobar", doc._key);
|
||||
doc._rev = 12345;
|
||||
assertEqual(12345, doc._rev);
|
||||
doc._id = "foo";
|
||||
assertEqual("foo", doc._id);
|
||||
|
||||
delete doc._key;
|
||||
delete doc._rev;
|
||||
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertEqual("foo", doc._id);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionSomeAttributes : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
delete doc._key;
|
||||
delete doc.value;
|
||||
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertFalse(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionIndexed : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
delete doc._key;
|
||||
doc[9] = "42!";
|
||||
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertEqual("42!", doc[9]);
|
||||
|
||||
delete doc[9];
|
||||
assertFalse(doc.hasOwnProperty(9));
|
||||
assertFalse(doc.hasOwnProperty("9"));
|
||||
assertUndefined(doc[9]);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionNested : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
delete doc.one.two.three;
|
||||
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.one.hasOwnProperty("two"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check access after deletion of documents
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAccessAfterDeletion : function () {
|
||||
var docs = [ ];
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
docs[i] = c.document("test" + i);
|
||||
}
|
||||
|
||||
c.truncate();
|
||||
if (c.rotate) {
|
||||
c.rotate();
|
||||
internal.wait(5);
|
||||
}
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
assertEqual(cn + "/test" + i, docs[i]._id);
|
||||
assertEqual("test" + i, docs[i]._key);
|
||||
assertEqual("Test" + i, docs[i].text);
|
||||
assertEqual([ i ], docs[i].values);
|
||||
assertEqual({ two: { three: [ 1 ] } }, docs[i].one);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check access after dropping collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAccessAfterDropping : function () {
|
||||
var docs = [ ];
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
docs[i] = c.document("test" + i);
|
||||
}
|
||||
|
||||
c.drop();
|
||||
|
||||
internal.wait(5);
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
assertEqual(cn + "/test" + i, docs[i]._id);
|
||||
assertEqual("test" + i, docs[i]._key);
|
||||
assertEqual("Test" + i, docs[i].text);
|
||||
assertEqual([ i ], docs[i].values);
|
||||
assertEqual({ two: { three: [ 1 ] } }, docs[i].one);
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function EdgeShapedJsonSuite () {
|
||||
'use strict';
|
||||
var cn = "UnitTestsCollectionShaped";
|
||||
var c;
|
||||
|
||||
return {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief set up
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
setUp : function () {
|
||||
db._drop(cn);
|
||||
c = db._createEdgeCollection(cn);
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
c.save(cn + "/from" + i,
|
||||
cn + "/to" + i,
|
||||
{ _key: "test" + i,
|
||||
value: i,
|
||||
text: "Test" + i,
|
||||
values: [ i ],
|
||||
one: { two: { three: [ 1 ] } } });
|
||||
}
|
||||
|
||||
// wait until the documents are actually shaped json
|
||||
internal.wal.flush(true, true);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief tear down
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
tearDown : function () {
|
||||
db._drop(cn);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check updating of keys in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdatePseudo : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
assertEqual(cn + "/from" + i, doc._from);
|
||||
assertEqual(cn + "/to" + i, doc._to);
|
||||
|
||||
doc._from = "foobarbaz";
|
||||
doc._to = "meow";
|
||||
|
||||
assertEqual("foobarbaz", doc._from);
|
||||
assertEqual("meow", doc._to);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check adding attributes in shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testAddAttribute : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
doc["some attribute set now"] = "aha";
|
||||
|
||||
assertTrue(doc.hasOwnProperty("some attribute set now"));
|
||||
assertEqual("aha", doc["some attribute set now"]);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionPseudoFirst : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
|
||||
// delete pseudo-attributes
|
||||
delete doc._from;
|
||||
assertFalse(doc.hasOwnProperty("_from"));
|
||||
|
||||
delete doc._to;
|
||||
assertFalse(doc.hasOwnProperty("_to"));
|
||||
|
||||
delete doc._key;
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
|
||||
delete doc._rev;
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
|
||||
delete doc._id;
|
||||
assertFalse(doc.hasOwnProperty("_id"));
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionShapedFirst : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
|
||||
// delete shaped attributes first
|
||||
delete doc.value;
|
||||
assertFalse(doc.hasOwnProperty("value"));
|
||||
assertUndefined(doc.value);
|
||||
|
||||
delete doc._from;
|
||||
assertFalse(doc.hasOwnProperty("_from"));
|
||||
assertUndefined(doc._from);
|
||||
|
||||
delete doc._to;
|
||||
assertFalse(doc.hasOwnProperty("_to"));
|
||||
assertUndefined(doc._to);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of special attributes from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionShapedKeyRev : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete special attribute _key
|
||||
delete doc._key;
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertUndefined(doc._key);
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
// delete special attribute _rev
|
||||
delete doc._rev;
|
||||
assertFalse(doc.hasOwnProperty("_rev"));
|
||||
assertFalse(doc.hasOwnProperty("_key"));
|
||||
assertUndefined(doc._rev);
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion of keys from shaped json
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionAfterUpdate : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
|
||||
doc._from = "foobar";
|
||||
assertEqual("foobar", doc._from);
|
||||
doc._from = 12345;
|
||||
assertEqual(12345, doc._from);
|
||||
doc._to = "foo";
|
||||
assertEqual("foo", doc._to);
|
||||
|
||||
delete doc._from;
|
||||
delete doc._to;
|
||||
|
||||
assertFalse(doc.hasOwnProperty("_from"));
|
||||
assertFalse(doc.hasOwnProperty("_to"));
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check deletion after deletion
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testDeletionDeletion : function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c.document("test" + i);
|
||||
|
||||
// initial state
|
||||
assertTrue(doc.hasOwnProperty("_from"));
|
||||
assertTrue(doc.hasOwnProperty("_to"));
|
||||
assertTrue(doc.hasOwnProperty("_key"));
|
||||
assertTrue(doc.hasOwnProperty("_rev"));
|
||||
assertTrue(doc.hasOwnProperty("_id"));
|
||||
assertTrue(doc.hasOwnProperty("one"));
|
||||
assertTrue(doc.hasOwnProperty("text"));
|
||||
assertTrue(doc.hasOwnProperty("value"));
|
||||
assertTrue(doc.hasOwnProperty("values"));
|
||||
|
||||
var keys = Object.keys(doc).sort();
|
||||
assertEqual([ "_from", "_id", "_key", "_rev", "_to", "one", "text", "value", "values" ], keys);
|
||||
|
||||
// delete _from
|
||||
delete doc._from;
|
||||
assertEqual([ "_id", "_key", "_rev", "_to", "one", "text", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _to
|
||||
delete doc._to;
|
||||
assertEqual([ "_id", "_key", "_rev", "one", "text", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _key
|
||||
delete doc._key;
|
||||
assertEqual([ "_id", "_rev", "one", "text", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete text
|
||||
delete doc.text;
|
||||
assertEqual([ "_id", "_rev", "one", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _id
|
||||
delete doc._id;
|
||||
assertEqual([ "_rev", "one", "value", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete value
|
||||
delete doc.value;
|
||||
assertEqual([ "_rev", "one", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete _rev
|
||||
delete doc._rev;
|
||||
assertEqual([ "one", "values" ], Object.keys(doc).sort());
|
||||
|
||||
// delete values
|
||||
delete doc.values;
|
||||
assertEqual([ "one" ], Object.keys(doc).sort());
|
||||
// skip first result (which has a distance of 0)
|
||||
for (var i = 1; i < result.length; ++i) {
|
||||
var doc = result[i];
|
||||
|
||||
// delete one
|
||||
delete doc.one;
|
||||
assertEqual([ ], Object.keys(doc).sort());
|
||||
assertTrue(doc.hasOwnProperty("lat"));
|
||||
assertTrue(doc.hasOwnProperty("lon"));
|
||||
assertTrue(doc.hasOwnProperty("distance"));
|
||||
assertTrue(doc.distance >= 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -993,8 +123,7 @@ function EdgeShapedJsonSuite () {
|
|||
/// @brief executes the test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
jsunity.run(DocumentShapedJsonSuite);
|
||||
jsunity.run(EdgeShapedJsonSuite);
|
||||
jsunity.run(GeoShapedJsonSuite);
|
||||
|
||||
return jsunity.done();
|
||||
|
||||
|
|
|
@ -29,7 +29,9 @@
|
|||
|
||||
#include "catch.hpp"
|
||||
|
||||
#include "RocksDBEngine/RocksDBComparator.h"
|
||||
#include "RocksDBEngine/RocksDBKey.h"
|
||||
#include "RocksDBEngine/RocksDBKeyBounds.h"
|
||||
#include "RocksDBEngine/RocksDBTypes.h"
|
||||
#include "Basics/Exceptions.h"
|
||||
|
||||
|
@ -39,7 +41,7 @@ using namespace arangodb;
|
|||
// --SECTION-- test suite
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
/// @brief setup
|
||||
/// @brief test RocksDBKey class
|
||||
TEST_CASE("RocksDBKeyTest", "[rocksdbkeytest]") {
|
||||
|
||||
/// @brief test database
|
||||
|
@ -227,6 +229,26 @@ SECTION("test_edge_index") {
|
|||
CHECK(s1 == std::string("5\0\0\0\0\0\0\0\0a/1\0foobar\x06", 20));
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/// @brief test RocksDBKeyBounds class
|
||||
TEST_CASE("RocksDBKeyBoundsTest", "[rocksdbkeybounds]") {
|
||||
|
||||
/// @brief test geo index key and bounds consistency
|
||||
SECTION("test_geo_index") {
|
||||
|
||||
RocksDBComparator cmp;
|
||||
|
||||
RocksDBKey k1 = RocksDBKey::GeoIndexValue(256, 128, false);
|
||||
RocksDBKeyBounds bb1 = RocksDBKeyBounds::GeoIndex(256, false);
|
||||
|
||||
CHECK(cmp.Compare(k1.string(), bb1.start()) > 0);
|
||||
CHECK(cmp.Compare(k1.string(), bb1.end()) < 0);
|
||||
|
||||
RocksDBKey k2 = RocksDBKey::GeoIndexValue(256, 128, true);
|
||||
RocksDBKeyBounds bb2 = RocksDBKeyBounds::GeoIndex(256, true);
|
||||
CHECK(cmp.Compare(k2.string(), bb2.start()) > 0);
|
||||
CHECK(cmp.Compare(k2.string(), bb2.end()) < 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue