1
0
Fork 0

Merge branch 'devel' of https://github.com/arangodb/arangodb into devel

This commit is contained in:
Jan Steemann 2016-04-13 16:29:26 +02:00
commit 576fcab12b
10 changed files with 266 additions and 49 deletions

View File

@ -61,14 +61,24 @@ list(APPEND V8_GYP_ARGS
## ICU EXPORTS
################################################################################
set(ICU_VERSION
"54.1"
set(ICU_COMPACT_VERSION
"54"
CACHE INTERNAL
"ICU: Version"
"ICU: compact Version"
)
set(ICU_VERSION
"${ICU_COMPACT_VERSION}.1"
CACHE INTERNAL
"ICU: Version"
)
set(ICU_BASEDIR ${V8_DIR}/third_party/icu/source)
set(ICU_DT "${ICU_BASEDIR}/data/in/icudtl.dat")
set(ICU_DT_DEST "icudt${ICU_COMPACT_VERSION}l.dat")
set(ICU_INCLUDE_DIR
${V8_DIR}/third_party/icu/source/common;${V8_DIR}/third_party/icu/source/i18n;${V8_DIR}/third_party/icu/source/io
${ICU_BASEDIR}/common;${ICU_BASEDIR}/i18n;${ICU_BASEDIR}/io
CACHE INTERNAL
"ICU: include path"
FORCE
@ -449,3 +459,6 @@ else ()
set(LINK_DIRECTORIES "${LINK_DIRECTORIES}" PARENT_SCOPE)
endif()
install(FILES ${ICU_DT}
DESTINATION "share/arangodb/"
RENAME ${ICU_DT_DEST})

View File

@ -192,7 +192,7 @@ NodeType Node::type() const {
// lh-value at path vector
Node& Node::operator ()(std::vector<std::string> const& pv) {
if (pv.size()) {
std::string const key = pv.at(0);
std::string const& key = pv.at(0);
if (_children.find(key) == _children.end()) {
_children[key] = std::make_shared<Node>(key, this);
}
@ -207,7 +207,7 @@ Node& Node::operator ()(std::vector<std::string> const& pv) {
// rh-value at path vector
Node const& Node::operator ()(std::vector<std::string> const& pv) const {
if (pv.size()) {
std::string const key = pv.at(0);
std::string const& key = pv.at(0);
if (_children.find(key) == _children.end()) {
throw StoreException(
std::string("Node ") + key + std::string(" not found"));
@ -866,14 +866,16 @@ void Store::dumpToBuilder (Builder& builder) const {
}
}
{
VPackObjectBuilder guard(&builder);
VPackArrayBuilder garray(&builder);
for (auto const& i : _observer_table) {
VPackObjectBuilder guard(&builder);
builder.add(i.first, VPackValue(i.second));
}
}
{
VPackObjectBuilder guard(&builder);
VPackArrayBuilder garray(&builder);
for (auto const& i : _observed_table) {
VPackObjectBuilder guard(&builder);
builder.add(i.first, VPackValue(i.second));
}
}

View File

@ -383,16 +383,18 @@ struct AstNode {
/// @brief whether or not a value node is of type attribute access that
/// refers to a variable reference
AstNode const* getAttributeAccessForVariable() const {
if (type != NODE_TYPE_ATTRIBUTE_ACCESS && type != NODE_TYPE_EXPANSION) {
AstNode const* getAttributeAccessForVariable(bool allowIndexedAccess) const {
if (type != NODE_TYPE_ATTRIBUTE_ACCESS && type != NODE_TYPE_EXPANSION
&& !(allowIndexedAccess && type == NODE_TYPE_INDEXED_ACCESS)) {
return nullptr;
}
auto node = this;
while (node->type == NODE_TYPE_ATTRIBUTE_ACCESS ||
(allowIndexedAccess && node->type == NODE_TYPE_INDEXED_ACCESS) ||
node->type == NODE_TYPE_EXPANSION) {
if (node->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
if (node->type == NODE_TYPE_ATTRIBUTE_ACCESS || node->type == NODE_TYPE_INDEXED_ACCESS) {
node = node->getMember(0);
} else {
// expansion, i.e. [*]
@ -400,7 +402,7 @@ struct AstNode {
TRI_ASSERT(node->numMembers() >= 2);
if (node->getMember(1)->type != NODE_TYPE_REFERENCE) {
if (node->getMember(1)->getAttributeAccessForVariable() == nullptr) {
if (node->getMember(1)->getAttributeAccessForVariable(allowIndexedAccess) == nullptr) {
return nullptr;
}
}
@ -421,13 +423,14 @@ struct AstNode {
/// @brief whether or not a value node is of type attribute access that
/// refers to any variable reference
bool isAttributeAccessForVariable() const {
return (getAttributeAccessForVariable() != nullptr);
return (getAttributeAccessForVariable(false) != nullptr);
}
/// @brief whether or not a value node is of type attribute access that
/// refers to the specified variable reference
bool isAttributeAccessForVariable(Variable const* variable) const {
auto node = getAttributeAccessForVariable();
bool isAttributeAccessForVariable(Variable const* variable, bool allowIndexedAccess) const {
auto node = getAttributeAccessForVariable(allowIndexedAccess);
if (node == nullptr) {
return false;

View File

@ -179,7 +179,7 @@ int IndexBlock::initialize() {
auto lhs = leaf->getMember(0);
auto rhs = leaf->getMember(1);
if (lhs->isAttributeAccessForVariable(outVariable)) {
if (lhs->isAttributeAccessForVariable(outVariable, false)) {
// Index is responsible for the left side, check if right side has to be
// evaluated
if (!rhs->isConstant()) {

View File

@ -32,13 +32,10 @@ using EN = arangodb::aql::ExecutionNode;
static bool checkPathVariableAccessFeasible(CalculationNode const* cn,
TraversalNode* tn,
Variable const* var,
bool& conditionIsImpossible,
Ast* ast) {
bool& conditionIsImpossible) {
auto node = cn->expression()->node();
if (node->containsNodeType(NODE_TYPE_OPERATOR_BINARY_OR) ||
node->containsNodeType(NODE_TYPE_OPERATOR_BINARY_IN) ||
node->containsNodeType(NODE_TYPE_OPERATOR_BINARY_NIN)) {
if (node->containsNodeType(NODE_TYPE_OPERATOR_BINARY_OR)) {
return false;
}
@ -58,6 +55,12 @@ static bool checkPathVariableAccessFeasible(CalculationNode const* cn,
// traversal (-> TraverserExpression::recursiveCheck
return false;
}
if (node->type == NODE_TYPE_OPERATOR_BINARY_IN ||
node->type == NODE_TYPE_OPERATOR_BINARY_NIN) {
if (!node->getMember(0)->isAttributeAccessForVariable(var, true)) {
return false;
}
}
}
if (onePath[len - 2]->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
@ -136,11 +139,10 @@ static bool extractSimplePathAccesses(AstNode const* node, TraversalNode* tn,
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_LT) ||
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_LE) ||
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_GT) ||
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_GE)
// || As long as we need to twist the access, this is impossible:
// (oneNode->type == NODE_TYPE_OPERATOR_BINARY_IN ) ||
// (oneNode->type == NODE_TYPE_OPERATOR_BINARY_NIN))
) {
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_GE) ||
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_IN ) ||
(oneNode->type == NODE_TYPE_OPERATOR_BINARY_NIN))
{
compareNode = oneNode;
}
}
@ -334,18 +336,21 @@ bool TraversalConditionFinder::before(ExecutionNode* en) {
if (variableType >= 0) {
if ((variableType == 2) &&
checkPathVariableAccessFeasible(cn, node, conditionVar,
conditionIsImpossible,
_plan->getAst())) {
conditionIsImpossible)) {
condition->andCombine(
it.second->expression()->node()->clone(_plan->getAst()));
foundCondition = true;
}
if (conditionIsImpossible) break;
if (conditionIsImpossible) {
break;
}
}
}
}
}
if (conditionIsImpossible) break;
if (conditionIsImpossible) {
break;
}
}
if (!conditionIsImpossible) {

View File

@ -51,7 +51,7 @@ DispatcherThread::DispatcherThread(DispatcherQueue* queue)
void DispatcherThread::run() {
double worked = 0;
double grace = 0.2;
double const grace = 0.2;
// iterate until we are shutting down
while (!_queue->_stopping.load(std::memory_order_relaxed)) {
@ -72,7 +72,7 @@ void DispatcherThread::run() {
// we need to check again if more work has arrived after we have
// aquired the lock. The lockfree queue and _nrWaiting are accessed
// using "memory_order_seq_cst", this guaranties that we do not
// using "memory_order_seq_cst", this guarantees that we do not
// miss a signal.
if (worked + grace < now) {
@ -98,8 +98,12 @@ void DispatcherThread::run() {
break;
}
} else if (worked < now) {
// we worked earlier, but not now
uintptr_t n = (uintptr_t) this;
usleep(1 + ((n >> 3) % 19));
usleep(20 + ((n >> 3) % 19));
} else {
// we worked just now. sleep a little while to avoid completely busy waiting
usleep(20);
}
}
}

View File

@ -1662,6 +1662,11 @@ int ArangoServer::startupServer() {
FATAL_ERROR_EXIT();
}
}
// Loading ageny's persistent state
if(_applicationAgency->agent() != nullptr) {
_applicationAgency->agent()->load();
}
if (_disableAuthentication) {
LOG(INFO) << "Authentication is turned off";
@ -1815,15 +1820,12 @@ void ArangoServer::waitForHeartbeat() {
////////////////////////////////////////////////////////////////////////////////
/// @brief runs the server
////////////////////////////////////////////////////////////////////////////////
int ArangoServer::runServer(TRI_vocbase_t* vocbase) {
// disabled maintenance mode
// disable maintenance mode
waitForHeartbeat();
HttpHandlerFactory::setMaintenance(false);
// Loading ageny's persistent state
if(_applicationAgency->agent()!=nullptr)
_applicationAgency->agent()->load();
// just wait until we are signalled
_applicationServer->wait();

View File

@ -29,6 +29,9 @@
#include "Utils/TransactionContext.h"
#include "VocBase/KeyGenerator.h"
#include <velocypack/Iterator.h>
#include <velocypack/velocypack-aliases.h>
using TraverserExpression = arangodb::traverser::TraverserExpression;
////////////////////////////////////////////////////////////////////////////////
@ -261,7 +264,31 @@ bool TraverserExpression::matchesCheck(arangodb::Transaction* trx,
case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_GE:
return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) >= 0;
case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_GT:
return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) > 0;
return arangodb::basics::VelocyPackHelper::compare(result, compareTo->slice(), true, options) > 0;
case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_IN: {
// In means any of the elements in compareTo is identical
VPackSlice compareArray = compareTo->slice();
for (auto const& cmp : VPackArrayIterator(compareArray)) {
if (arangodb::basics::VelocyPackHelper::compare(result, cmp, false, options) == 0) {
// One is identical
return true;
}
}
// If we get here non is identical
return false;
}
case arangodb::aql::NODE_TYPE_OPERATOR_BINARY_NIN: {
// NIN means none of the elements in compareTo is identical
VPackSlice compareArray = compareTo->slice();
for (auto const& cmp : VPackArrayIterator(compareArray)) {
if (arangodb::basics::VelocyPackHelper::compare(result, cmp, false, options) == 0) {
// One is identical
return false;
}
}
// If we get here non is identical
return true;
}
default:
TRI_ASSERT(false);
}

View File

@ -369,15 +369,16 @@ configure_file("${CMAKE_SOURCE_DIR}/CMakeCPackOptions.cmake.in"
"${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake" @ONLY)
set(CPACK_PROJECT_CONFIG_FILE "${CMAKE_BINARY_DIR}/CMakeCPackOptions.cmake")
# components
install(
FILES ${PROJECT_SOURCE_DIR}/Installation/debian/arangodb.init
PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
DESTINATION etc/init.d
RENAME arangodb
COMPONENT debian-extras
)
if (NOT(MSVC))
# components
install(
FILES ${PROJECT_SOURCE_DIR}/Installation/debian/arangodb.init
PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
DESTINATION etc/init.d
RENAME arangodb
COMPONENT debian-extras
)
endif()
# Custom targets ----------------------------------------------------------------

View File

@ -1300,12 +1300,171 @@ function complexInternaSuite () {
}
// All elements must be enumerated
assertEqual(found, amount);
}
},
};
}
function optimizeInSuite () {
var ruleName = "optimize-traversals";
var startId = vn + "/optIn";
return {
setUp: function () {
cleanup();
vc = db._create(vn, {numberOfShards: 4});
ec = db._createEdgeCollection(en, {numberOfShards: 4});
vc.save({_key: startId.split("/")[1]});
for (var i = 0; i < 100; ++i) {
var tmp = vc.save({_key: "tmp" + i, value: i});
ec.save(startId, tmp._id, {_key: "tmp" + i, value: i});
for (var j = 0; j < 100; ++j) {
var innerTmp = vc.save({_key: "innertmp" + i + "_" + j});
ec.save(tmp._id, innerTmp._id, {});
}
}
},
tearDown: cleanup,
testSingleOptimize: function () {
var vertexQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.vertices[1]._key IN @keys RETURN v._key";
var edgeQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.edges[0]._key IN @keys RETURN v._key";
var bindVars = {
"@eCol": en,
"startId": startId,
"keys": ["tmp0", "tmp1", "tmp2", "tmp3", "tmp4", "tmp5", "tmp6", "tmp7", "tmp8", "tmp9"]
};
var result = db._query(vertexQuery, bindVars);
var extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
result = db._query(edgeQuery, bindVars);
extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
// if the rule is disabled we expect to do way more filtering
var noOpt = { optimizer: { rules: [ "-all" ] } };
result = db._query(vertexQuery, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
result = db._query(edgeQuery, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
},
testCombinedAndOptimize: function () {
var vertexQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.vertices[1]._key " +
" IN @keys AND p.vertices[1].value IN @values RETURN v._key";
var edgeQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.edges[0]._key " +
"IN @keys AND p.edges[0].value IN @values RETURN v._key";
var mixedQuery1 = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.edges[0]._key " +
"IN @keys AND p.vertices[1].value IN @values RETURN v._key";
var mixedQuery2 = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER p.vertices[1]._key " +
"IN @keys AND p.edges[0].value IN @values RETURN v._key";
var bindVars = {
"@eCol": en,
"startId": startId,
"keys": ["tmp0", "tmp1", "tmp2", "tmp3", "tmp4", "tmp5", "tmp6", "tmp7", "tmp8", "tmp9"],
"values": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
};
var result = db._query(vertexQuery, bindVars);
var extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
result = db._query(edgeQuery, bindVars);
extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
result = db._query(mixedQuery1, bindVars);
extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
result = db._query(mixedQuery2, bindVars);
extra = result.getExtra();
// We have only 10 valid elements in the array.
assertEqual(extra.stats.filtered, 90);
assertEqual(result.count(), 1000);
// if the rule is disabled we expect to do way more filtering
var noOpt = { optimizer: { rules: [ "-all" ] } };
result = db._query(vertexQuery, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
result = db._query(edgeQuery, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
result = db._query(mixedQuery1, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
result = db._query(mixedQuery2, bindVars, {}, noOpt);
extra = result.getExtra();
// For each vertex not in the list we filter once for every conncted edge
assertEqual(extra.stats.filtered, 90 * 100);
assertEqual(result.count(), 1000);
},
testCombinedNoOptimize: function () {
var vertexQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER @obj IN p.vertices RETURN [v, e, p]";
var edgeQuery = "FOR v, e, p IN 2 OUTBOUND @startId @@eCol FILTER @obj IN p.edges RETURN [v, e, p]";
var bindVars = {
"@eCol": en,
"startId": startId,
"obj": {"_key": "tmp0", "value": 0}
};
var noOpt = { optimizer: { rules: [ "-all" ] } };
var opt = { optimizer: { rules: [ "-all" , "+" + ruleName ] } };
var optPlans = AQL_EXPLAIN(vertexQuery, bindVars, opt).plan;
var noOptPlans = AQL_EXPLAIN(vertexQuery, bindVars, noOpt).plan;
assertEqual(optPlans.rules, []);
// This query cannot be optimized by traversal rule
assertEqual(optPlans, noOptPlans);
optPlans = AQL_EXPLAIN(edgeQuery, bindVars, opt).plan;
noOptPlans = AQL_EXPLAIN(edgeQuery, bindVars, noOpt).plan;
assertEqual(optPlans.rules, []);
// This query cannot be optimized by traversal rule
assertEqual(optPlans, noOptPlans);
},
};
}
function complexFilteringSuite () {
/***********************************************************************
@ -2083,6 +2242,7 @@ jsunity.run(multiCollectionGraphSuite);
jsunity.run(multiEdgeCollectionGraphSuite);
jsunity.run(potentialErrorsSuite);
jsunity.run(complexInternaSuite);
jsunity.run(optimizeInSuite);
jsunity.run(complexFilteringSuite);
jsunity.run(brokenGraphSuite);
jsunity.run(multiEdgeDirectionSuite);