1
0
Fork 0

Merge branch 'vpack' of github.com:arangodb/arangodb into vpack

This commit is contained in:
Michael Hackstein 2015-12-04 17:22:00 +01:00
commit c9e07e3cf1
10 changed files with 58 additions and 87 deletions

View File

@ -84,6 +84,9 @@ struct Options {
// keep top-level object/array open when building objects with the Parser // keep top-level object/array open when building objects with the Parser
bool keepTopLevelOpen = false; bool keepTopLevelOpen = false;
// clear builder before starting to parse in Parser
bool clearBuilderBeforeParse = true;
// validate UTF-8 strings when JSON-parsing with Parser // validate UTF-8 strings when JSON-parsing with Parser
bool validateUtf8Strings = false; bool validateUtf8Strings = false;

View File

@ -146,7 +146,9 @@ class Parser {
_start = start; _start = start;
_size = size; _size = size;
_pos = 0; _pos = 0;
_b->clear(); if (options->clearBuilderBeforeParse) {
_b->clear();
}
_b->options = options; _b->options = options;
return parseInternal(multi); return parseInternal(multi);
} }

View File

@ -46,12 +46,12 @@ FOR foaf, e, path IN 2 ANY @startUser GRAPH "relations"
RETURN DISTINCT foaf RETURN DISTINCT foaf
``` ```
Also optimizer rules have been implemented to gain performance of the traversal statement. Optimizer rules have been implemented to gain performance of the traversal statement.
This rules move filter statements into the traversal statement s.t. paths which can never These rules move filter statements into the traversal statement s.t. paths which can never
pass the filter are not emitted to the variables. pass the filter are not emitted to the variables.
As an example take the query above and assume there are edges that do not have `type == "friend"`. As an example take the query above and assume there are edges that do not have `type == "friend"`.
If in the first edge step there is such a non-friend edge the second steps will never If in the first edge step there is such a non-friend edge the second steps will never
be computed as they cannot fulfill the filter condition. be computed for these edges as they cannot fulfill the filter condition.
!SUBSECTION Array Indexes !SUBSECTION Array Indexes
@ -184,10 +184,10 @@ A deadlock is a situation in which two or more concurrent operations (user trans
or AQL queries) try to access the same resources (collections, documents) and need to or AQL queries) try to access the same resources (collections, documents) and need to
wait for the others to finish, but none of them can make any progress. wait for the others to finish, but none of them can make any progress.
In case of such deadlock, there would be no progress for any of the involved In case of such a deadlock, there would be no progress for any of the involved
transactions, and none of the involved transactions could ever complete. This is transactions, and none of the involved transactions could ever complete. This is
completely undesirable, so the new automatic deadlock detection mechanism in ArangoDB completely undesirable, so the new automatic deadlock detection mechanism in ArangoDB
will automatically kick in and abort one of the transactions involved in such deadlock. will automatically kick in and abort one of the transactions involved in such a deadlock.
Aborting means that all changes done by the transaction will be rolled back and error Aborting means that all changes done by the transaction will be rolled back and error
29 (`deadlock detected`) will be thrown. 29 (`deadlock detected`) will be thrown.
@ -222,7 +222,7 @@ have been backported to ArangoDB 2.7 as well):
slave will (intentionally) idle and not poll for master log changes in case the slave will (intentionally) idle and not poll for master log changes in case the
master had sent the full logs already. master had sent the full logs already.
The `idleMaxWaitTime` value will only be used when `adapativePolling` is set The `idleMaxWaitTime` value will only be used when `adapativePolling` is set
to `true`. When `adaptivePolling` is disable, only `idleMinWaitTime` will be to `true`. When `adaptivePolling` is disabled, only `idleMinWaitTime` will be
used as a constant time span in which the slave will not poll the master for used as a constant time span in which the slave will not poll the master for
further changes. The default values are 0.5 seconds for `idleMinWaitTime` and further changes. The default values are 0.5 seconds for `idleMinWaitTime` and
2.5 seconds for `idleMaxWaitTime`, which correspond to the hard-coded values 2.5 seconds for `idleMaxWaitTime`, which correspond to the hard-coded values

View File

@ -1133,7 +1133,7 @@ size_t DistributeBlock::sendToClient (AqlItemBlock* cur) {
bool hasCreatedKeyAttribute = false; bool hasCreatedKeyAttribute = false;
if (TRI_IsStringJson(json)) { if (TRI_IsStringJson(json) && static_cast<DistributeNode const*>(_exeNode)->_allowKeyConversionToObject) {
TRI_json_t* obj = TRI_CreateObjectJson(TRI_UNKNOWN_MEM_ZONE, 1); TRI_json_t* obj = TRI_CreateObjectJson(TRI_UNKNOWN_MEM_ZONE, 1);
if (obj == nullptr) { if (obj == nullptr) {

View File

@ -153,7 +153,8 @@ DistributeNode::DistributeNode (ExecutionPlan* plan,
_collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(base.json(), "collection"))), _collection(plan->getAst()->query()->collections()->get(JsonHelper::checkAndGetStringValue(base.json(), "collection"))),
_varId(JsonHelper::checkAndGetNumericValue<VariableId>(base.json(), "varId")), _varId(JsonHelper::checkAndGetNumericValue<VariableId>(base.json(), "varId")),
_alternativeVarId(JsonHelper::checkAndGetNumericValue<VariableId>(base.json(), "alternativeVarId")), _alternativeVarId(JsonHelper::checkAndGetNumericValue<VariableId>(base.json(), "alternativeVarId")),
_createKeys(JsonHelper::checkAndGetBooleanValue(base.json(), "createKeys")) { _createKeys(JsonHelper::checkAndGetBooleanValue(base.json(), "createKeys")),
_allowKeyConversionToObject(JsonHelper::checkAndGetBooleanValue(base.json(), "allowKeyConversionToObject")) {
} }
void DistributeNode::toJsonHelper (triagens::basics::Json& nodes, void DistributeNode::toJsonHelper (triagens::basics::Json& nodes,
@ -169,7 +170,8 @@ void DistributeNode::toJsonHelper (triagens::basics::Json& nodes,
("collection", triagens::basics::Json(_collection->getName())) ("collection", triagens::basics::Json(_collection->getName()))
("varId", triagens::basics::Json(static_cast<int>(_varId))) ("varId", triagens::basics::Json(static_cast<int>(_varId)))
("alternativeVarId", triagens::basics::Json(static_cast<int>(_alternativeVarId))) ("alternativeVarId", triagens::basics::Json(static_cast<int>(_alternativeVarId)))
("createKeys", triagens::basics::Json(_createKeys)); ("createKeys", triagens::basics::Json(_createKeys))
("allowKeyConversionToObject", triagens::basics::Json(_allowKeyConversionToObject));
// And add it: // And add it:
nodes(json); nodes(json);

View File

@ -372,13 +372,15 @@ namespace triagens {
Collection const* collection, Collection const* collection,
VariableId const varId, VariableId const varId,
VariableId const alternativeVarId, VariableId const alternativeVarId,
bool createKeys) bool createKeys,
bool allowKeyConversionToObject)
: ExecutionNode(plan, id), : ExecutionNode(plan, id),
_vocbase(vocbase), _vocbase(vocbase),
_collection(collection), _collection(collection),
_varId(varId), _varId(varId),
_alternativeVarId(alternativeVarId), _alternativeVarId(alternativeVarId),
_createKeys(createKeys) { _createKeys(createKeys),
_allowKeyConversionToObject(allowKeyConversionToObject) {
} }
DistributeNode (ExecutionPlan* plan, DistributeNode (ExecutionPlan* plan,
@ -386,8 +388,9 @@ namespace triagens {
TRI_vocbase_t* vocbase, TRI_vocbase_t* vocbase,
Collection const* collection, Collection const* collection,
VariableId const varId, VariableId const varId,
bool createKeys) bool createKeys,
: DistributeNode(plan, id, vocbase, collection, varId, varId, createKeys) { bool allowKeyConversionToObject)
: DistributeNode(plan, id, vocbase, collection, varId, varId, createKeys, allowKeyConversionToObject) {
// just delegates to the other constructor // just delegates to the other constructor
} }
@ -417,7 +420,7 @@ namespace triagens {
ExecutionNode* clone (ExecutionPlan* plan, ExecutionNode* clone (ExecutionPlan* plan,
bool withDependencies, bool withDependencies,
bool withProperties) const override final { bool withProperties) const override final {
auto c = new DistributeNode(plan, _id, _vocbase, _collection, _varId, _alternativeVarId, _createKeys); auto c = new DistributeNode(plan, _id, _vocbase, _collection, _varId, _alternativeVarId, _createKeys, _allowKeyConversionToObject);
cloneHelper(c, plan, withDependencies, withProperties); cloneHelper(c, plan, withDependencies, withProperties);
@ -479,6 +482,12 @@ namespace triagens {
bool const _createKeys; bool const _createKeys;
////////////////////////////////////////////////////////////////////////////////
/// @brief allow conversion of key to object
////////////////////////////////////////////////////////////////////////////////
bool const _allowKeyConversionToObject;
}; };
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------

View File

@ -2572,7 +2572,7 @@ int triagens::aql::distributeInClusterRule (Optimizer* opt,
bool const createKeys = (nodeType == ExecutionNode::INSERT); bool const createKeys = (nodeType == ExecutionNode::INSERT);
inputVariable = node->getVariablesUsedHere()[0]; inputVariable = node->getVariablesUsedHere()[0];
distNode = new DistributeNode(plan, plan->nextId(), distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, createKeys); vocbase, collection, inputVariable->id, createKeys, true);
} }
else if (nodeType == ExecutionNode::REPLACE) { else if (nodeType == ExecutionNode::REPLACE) {
std::vector<Variable const*> v = node->getVariablesUsedHere(); std::vector<Variable const*> v = node->getVariablesUsedHere();
@ -2585,7 +2585,7 @@ int triagens::aql::distributeInClusterRule (Optimizer* opt,
inputVariable = v[0]; inputVariable = v[0];
} }
distNode = new DistributeNode(plan, plan->nextId(), distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, false); vocbase, collection, inputVariable->id, false, v.size() > 1);
} }
else if (nodeType == ExecutionNode::UPDATE) { else if (nodeType == ExecutionNode::UPDATE) {
std::vector<Variable const*> v = node->getVariablesUsedHere(); std::vector<Variable const*> v = node->getVariablesUsedHere();
@ -2600,7 +2600,7 @@ int triagens::aql::distributeInClusterRule (Optimizer* opt,
inputVariable = v[0]; inputVariable = v[0];
} }
distNode = new DistributeNode(plan, plan->nextId(), distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, false); vocbase, collection, inputVariable->id, false, v.size() > 1);
} }
else if (nodeType == ExecutionNode::UPSERT) { else if (nodeType == ExecutionNode::UPSERT) {
// an UPSERT nodes has two input variables! // an UPSERT nodes has two input variables!
@ -2608,7 +2608,7 @@ int triagens::aql::distributeInClusterRule (Optimizer* opt,
TRI_ASSERT(v.size() >= 2); TRI_ASSERT(v.size() >= 2);
distNode = new DistributeNode(plan, plan->nextId(), distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, v[0]->id, v[2]->id, false); vocbase, collection, v[0]->id, v[2]->id, false, true);
} }
else { else {
TRI_ASSERT(false); TRI_ASSERT(false);
@ -2941,6 +2941,7 @@ class RemoveToEnumCollFinder final : public WalkerWorker<ExecutionNode> {
ExecutionNode* _lastNode; ExecutionNode* _lastNode;
public: public:
RemoveToEnumCollFinder (ExecutionPlan* plan, RemoveToEnumCollFinder (ExecutionPlan* plan,
std::unordered_set<ExecutionNode*>& toUnlink) std::unordered_set<ExecutionNode*>& toUnlink)
: _plan(plan), : _plan(plan),
@ -2960,7 +2961,9 @@ class RemoveToEnumCollFinder final : public WalkerWorker<ExecutionNode> {
bool before (ExecutionNode* en) override final { bool before (ExecutionNode* en) override final {
switch (en->getType()) { switch (en->getType()) {
case EN::REMOVE: { case EN::REMOVE: {
TRI_ASSERT(_remove == false); if (_remove) {
break;
}
// find the variable we are removing . . . // find the variable we are removing . . .
auto rn = static_cast<RemoveNode*>(en); auto rn = static_cast<RemoveNode*>(en);

View File

@ -2718,12 +2718,6 @@ function AQL_IS_DATESTRING (value) {
// argument is a string // argument is a string
// append zulu time specifier if no other present
if (! value.match(/([zZ]|[+\-]\d+(:\d+)?)$/) ||
(value.match(/-\d+(:\d+)?$/) && ! value.match(/[tT ]/))) {
value += 'Z';
}
// detect invalid dates ("foo" -> "fooZ" -> getTime() == NaN) // detect invalid dates ("foo" -> "fooZ" -> getTime() == NaN)
var date = new Date(value); var date = new Date(value);
if (isNaN(date)) { if (isNaN(date)) {

View File

@ -1974,21 +1974,27 @@ function ahuacatlNumericFunctionsTestSuite () {
[ 1.321e-45 , 1.321e-45 , 1 ] [ 1.321e-45 , 1.321e-45 , 1 ]
]; ];
var valgrind = require("internal").valgrind;
data.forEach(function (value) { data.forEach(function (value) {
if (valgrind && (value[0] === 0 || value[1] >= 0 && value[1] < 1)) {
// V8 under Valgrind has an issue with Math.pow(0, 0.1);
return;
}
var query = "RETURN POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + ")"; var query = "RETURN POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + ")";
var actual = getQueryResults(query); var actual = getQueryResults(query);
if (value[2] === null) { if (value[2] === null) {
assertNull(actual[0]); assertNull(actual[0]);
} }
else { else {
assertEqual(value[2].toPrecision(6), actual[0].toPrecision(6), query); assertEqual(value[2].toPrecision(4), actual[0].toPrecision(4), value);
} }
actual = getQueryResults("RETURN NOOPT(POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + "))"); actual = getQueryResults("RETURN NOOPT(POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + "))");
if (value[2] === null) { if (value[2] === null) {
assertNull(actual[0]); assertNull(actual[0]);
} }
else { else {
assertEqual(value[2].toPrecision(6), actual[0].toPrecision(6)); assertEqual(value[2].toPrecision(4), actual[0].toPrecision(4), value);
} }
query = "RETURN NOOPT(V8(POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + ")))"; query = "RETURN NOOPT(V8(POW(" + JSON.stringify(value[0]) + ", " + JSON.stringify(value[1]) + ")))";
actual = getQueryResults(query); actual = getQueryResults(query);
@ -1996,7 +2002,7 @@ function ahuacatlNumericFunctionsTestSuite () {
assertNull(actual[0], query); assertNull(actual[0], query);
} }
else { else {
assertEqual(value[2].toPrecision(6), actual[0].toPrecision(6), query); assertEqual(value[2].toPrecision(4), actual[0].toPrecision(4), value);
} }
}); });
}, },

View File

@ -81,62 +81,6 @@ function ahuacatlModifySuite () {
c2 = null; c2 = null;
}, },
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testRemoveInSubquery : function () {
assertQueryError(errors.ERROR_QUERY_MODIFY_IN_SUBQUERY.code, "FOR d IN @@cn LET x = (REMOVE d.foobar IN @@cn) RETURN d", { "@cn": cn1 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testInsertInSubquery : function () {
assertQueryError(errors.ERROR_QUERY_MODIFY_IN_SUBQUERY.code, "FOR d IN @@cn LET x = (INSERT { _key: 'test' } IN @@cn) RETURN d", { "@cn": cn1 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testUpdateInSubquery : function () {
assertQueryError(errors.ERROR_QUERY_MODIFY_IN_SUBQUERY.code, "FOR d IN @@cn LET x = (UPDATE { _key: 'test' } IN @@cn) RETURN d", { "@cn": cn1 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testReplaceInSubquery : function () {
assertQueryError(errors.ERROR_QUERY_MODIFY_IN_SUBQUERY.code, "FOR d IN @@cn LET x = (REPLACE { _key: 'test' } IN @@cn) RETURN d", { "@cn": cn1 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testReplaceInSubquery2 : function () {
assertQueryError(errors.ERROR_QUERY_MODIFY_IN_SUBQUERY.code, "FOR d IN @@cn LET x = (FOR i IN 1..2 REPLACE { _key: 'test' } IN @@cn) RETURN d", { "@cn": cn1 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testMultiModify : function () {
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR d IN @@cn1 REMOVE d IN @@cn1 FOR e IN @@cn2 REMOVE e IN @@cn2", { "@cn1": cn1, "@cn2": cn2 });
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery
////////////////////////////////////////////////////////////////////////////////
testMultiModify2 : function () {
assertQueryError(errors.ERROR_QUERY_PARSE.code, "FOR d IN @@cn1 FOR e IN @@cn2 REMOVE d IN @@cn1 REMOVE e IN @@cn2", { "@cn1": cn1, "@cn2": cn2 });
},
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @brief test subquery /// @brief test subquery
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -1162,12 +1106,20 @@ function ahuacatlUpdateSuite () {
assertQueryError(errors.ERROR_ARANGO_DOCUMENT_TYPE_INVALID.code, "FOR d IN @@cn UPDATE [ ] IN @@cn", { "@cn": cn1 }); assertQueryError(errors.ERROR_ARANGO_DOCUMENT_TYPE_INVALID.code, "FOR d IN @@cn UPDATE [ ] IN @@cn", { "@cn": cn1 });
}, },
////////////////////////////////////////////////////////////////////////////////
/// @brief test update
////////////////////////////////////////////////////////////////////////////////
testUpdateInvalidType : function () {
assertQueryError(errors.ERROR_ARANGO_DOCUMENT_TYPE_INVALID.code, "FOR d IN @@cn UPDATE 'foo' IN @@cn", { "@cn": cn1 });
},
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @brief test update /// @brief test update
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
testUpdateInvalidKey : function () { testUpdateInvalidKey : function () {
assertQueryError(errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code, "FOR d IN @@cn UPDATE 'foo' IN @@cn", { "@cn": cn1 }); assertQueryError(errors.ERROR_ARANGO_DOCUMENT_NOT_FOUND.code, "FOR d IN @@cn UPDATE { _key: 'foo' } IN @@cn", { "@cn": cn1 });
}, },
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////