1
0
Fork 0

Merge branch 'devel' of ssh://github.com/triAGENS/ArangoDB into devel

This commit is contained in:
Max Neunhoeffer 2014-12-03 16:46:37 +01:00
commit 34c31a0980
28 changed files with 566 additions and 239 deletions

View File

@ -363,6 +363,8 @@ The following optimizer rules may appear in the `rules` attribute of a plan:
optimizations).
* `remove-redundant-sorts`: will appear if multiple *SORT* statements can be merged
into fewer sorts.
* `remove-collect-into`: will appear if an *INTO* clause was removed from a *COLLECT*
statement because the result of *INTO* is not used.
* `interchange-adjacent-enumerations`: will appear if a query contains multiple
*FOR* statements whose order were permuted. Permutation of *FOR* statements is
performed because it may enable further optimizations by other rules.

View File

@ -27,12 +27,13 @@ console.log('usernames:', usernames);
Creates a query function that performs the given query and returns the result.
The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Note that collection bind parameters need to be prefixed with an at-sign, e.g. `{'@myCollectionVar': 'my_collection_name'}`.
The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Any additional arguments will be passed to the transform function (or dropped if no transform function is defined).
*Parameter*
* *cfg*: an object with the following properties:
* *query*: an AQL query string or an ArangoDB Query Builder query object.
* *params* (optional): an array of parameter names.
* *context* (optional): an *applicationContext*.
* *model* (optional): a *Foxx.Model* that will be applied to the query results.
* *defaults* (optional): default values for the query's bind parameters. These can be overridden by passing a value for the same name to the query function.
@ -42,7 +43,9 @@ If *cfg* is a string, it will be used as the value of *cfg.query* instead.
If a *context* is specified, the values of all collection bind parameters will be passed through the context's *collectionName* method.
Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar`.
Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar` and their parameter name needs to be prefixed with an at-sign as well, e.g. `{'@myCollectionVar': 'collection_name'}`.
If *params* is provided, the query function will accept positional arguments instead of an object. If *params* is a string, it will be treated as an array containing that string.
If both *model* and *transform* are provided, the *transform* function will be applied to the result array _after_ the results have been converted into model instances. The *transform* function is always passed the entire result array and its return value will be returned by the query function.
@ -62,6 +65,16 @@ var query = Foxx.createQuery('FOR u IN _users RETURN u[@propName]');
var usernames = query({propName: 'user'});
```
Using named bind parameters:
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users RETURN u[@propName]',
params: ['propName']
);
var usernames = query('user');
```
Using models:
```js
@ -93,3 +106,44 @@ var query = Foxx.createQuery({
});
var user = query(); // first user by username
```
Using a transformation with extra arguments:
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]',
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query({propName: 'user'}, true); // username of first user in uppercase
query({propName: 'user'}, false); // username of first user in lowercase
```
Using a transformation with extra arguments (using positional arguments):
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]',
params: ['propName'],
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query('user', true); // username of first user in uppercase
query('user', false); // username of first user in lowercase
```
Using a transformation with extra arguments (and no query parameters):
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u.user',
params: false, // an empty array would work, too
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query(true); // username of first user in uppercase
query(false); // username of first user in lowercase
```

View File

@ -19,11 +19,13 @@ exports.repository = TodosRepository;
You can define custom query methods using Foxx.createQuery and Foxx.Repository.extend.
For more details see the chapter on [Foxx Queries](../Foxx/FoxxQueries.md).
*Examples*
Making a simple query in the repository and using it from the controller:
```javascript
```js
// in the repository
var Foxx = require("org/arangodb/foxx");
@ -41,29 +43,31 @@ ctrl.get("/", function(req, res) {
It is also possible to supply parameters to the query:
```javascript
```js
// in the repository
getPendingItemById: Foxx.createQuery(
'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo'
)
getPendingItemById: Foxx.createQuery({
query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo',
params: ['id']
})
// in the controller
ctrl.get("/:id", function(req, res) {
var id = req.params("id");
var rv = todosRepository.getPendingItemById({ id: id });
var rv = todosRepository.getPendingItemById(id);
res.json(rv);
});
```
The list of results can also be transformed before returning it from the repository:
```javascript
```js
// in the repository
getPendingItemById: Foxx.createQuery({
query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo',
transform: function(results, args) {
params: ['id'],
transform: function(results, extra) {
for (var i = 0; i < results.length; i++) {
results[i].extraProperty = args.extra;
results[i].extraProperty = extra;
}
}
})
@ -72,10 +76,7 @@ getPendingItemById: Foxx.createQuery({
ctrl.get("/:id", function(req, res) {
var id = req.params("id");
var extra = req.params("extra");
var rv = todosRepository.getPendingItemById(
{ id: id },
{ extra: extra }
);
var rv = todosRepository.getPendingItemById(id, extra);
res.json(rv);
});
```

View File

@ -108,6 +108,7 @@
* [FoxxController](Foxx/FoxxController.md)
* [FoxxModel](Foxx/FoxxModel.md)
* [FoxxRepository](Foxx/FoxxRepository.md)
* [Foxx Queries](Foxx/FoxxQueries.md)
* [Deploying Applications](Foxx/DeployingAnApplication.md)
* [Developing Applications](Foxx/DevelopingAnApplication.md)
* [Dependency Injection](Foxx/FoxxInjection.md)

View File

@ -560,6 +560,7 @@ SHELL_SERVER_AQL = @top_srcdir@/js/server/tests/aql-arithmetic.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-interchange-adjacent-enumerations-noncluster.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-move-calculations-up.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-move-filters-up.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-remove-collect-into.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-remove-redundant-calculations.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-remove-redundant-or.js \
@top_srcdir@/js/server/tests/aql-optimizer-rule-remove-redundant-sorts.js \

View File

@ -3408,7 +3408,7 @@ void UpdateBlock::work (std::vector<AqlItemBlock*>& blocks) {
TRI_json_t* old = TRI_JsonShapedJson(_collection->documentCollection()->getShaper(), &shapedJson);
if (old != nullptr) {
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeObjects);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, old);
if (patchedJson != nullptr) {

View File

@ -1967,6 +1967,23 @@ namespace triagens {
return _outVariable != nullptr;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief return the out variable
////////////////////////////////////////////////////////////////////////////////
Variable const* outVariable () const {
return _outVariable;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief clear the out variable
////////////////////////////////////////////////////////////////////////////////
void clearOutVariable () {
TRI_ASSERT(_outVariable != nullptr);
_outVariable = nullptr;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief getVariablesUsedHere
////////////////////////////////////////////////////////////////////////////////

View File

@ -251,8 +251,8 @@ ModificationOptions ExecutionPlan::createOptions (AstNode const* node) {
// nullMeansRemove is the opposite of keepNull
options.nullMeansRemove = value->isFalse();
}
else if (strcmp(name, "mergeArrays") == 0) {
options.mergeArrays = value->isTrue();
else if (strcmp(name, "mergeObjects") == 0) {
options.mergeObjects = value->isTrue();
}
}
}

View File

@ -36,7 +36,7 @@ ModificationOptions::ModificationOptions (Json const& json) {
ignoreErrors = JsonHelper::getBooleanValue(array.json(), "ignoreErrors", false);
waitForSync = JsonHelper::getBooleanValue(array.json(), "waitForSync", false);
nullMeansRemove = JsonHelper::getBooleanValue(array.json(), "nullMeansRemove", false);
mergeArrays = JsonHelper::getBooleanValue(array.json(), "mergeArrays", false);
mergeObjects = JsonHelper::getBooleanValue(array.json(), "mergeObjects", false);
}
void ModificationOptions::toJson (triagens::basics::Json& json,
@ -46,7 +46,7 @@ void ModificationOptions::toJson (triagens::basics::Json& json,
("ignoreErrors", Json(ignoreErrors))
("waitForSync", Json(waitForSync))
("nullMeansRemove", Json(nullMeansRemove))
("mergeArrays", Json(mergeArrays));
("mergeObjects", Json(mergeObjects));
json ("modificationFlags", flags);
}

View File

@ -54,7 +54,7 @@ namespace triagens {
: ignoreErrors(false),
waitForSync(false),
nullMeansRemove(false),
mergeArrays(false) {
mergeObjects(false) {
}
void toJson (triagens::basics::Json& json, TRI_memory_zone_t* zone) const;
@ -66,7 +66,7 @@ namespace triagens {
bool ignoreErrors;
bool waitForSync;
bool nullMeansRemove;
bool mergeArrays;
bool mergeObjects;
};

View File

@ -397,8 +397,8 @@ void Optimizer::setupRules () {
// remove redundant sort blocks
registerRule("remove-redundant-sorts",
removeRedundantSorts,
removeRedundantSorts_pass2,
removeRedundantSortsRule,
removeRedundantSortsRule_pass2,
true);
//////////////////////////////////////////////////////////////////////////////
@ -408,8 +408,8 @@ void Optimizer::setupRules () {
//////////////////////////////////////////////////////////////////////////////
registerRule("interchange-adjacent-enumerations",
interchangeAdjacentEnumerations,
interchangeAdjacentEnumerations_pass3,
interchangeAdjacentEnumerationsRule,
interchangeAdjacentEnumerationsRule_pass3,
true);
//////////////////////////////////////////////////////////////////////////////
@ -452,8 +452,14 @@ void Optimizer::setupRules () {
// remove redundant sort blocks
registerRule("remove-redundant-sorts-2",
removeRedundantSorts,
removeRedundantSorts_pass5,
removeRedundantSortsRule,
removeRedundantSortsRule_pass5,
true);
// remove INTO from COLLECT
registerRule("remove-collect-into",
removeCollectIntoRule,
removeCollectIntoRule_pass5,
true);
//////////////////////////////////////////////////////////////////////////////
@ -463,26 +469,26 @@ void Optimizer::setupRules () {
// try to replace simple OR conditions with IN
registerRule("replace-or-with-in",
replaceOrWithIn,
replaceOrWithIn_pass6,
replaceOrWithInRule,
replaceOrWithInRule_pass6,
true);
// try to remove redundant OR conditions
registerRule("remove-redundant-or",
removeRedundantOr,
removeRedundantOr_pass6,
removeRedundantOrRule,
removeRedundantOrRule_pass6,
true);
// try to find a filter after an enumerate collection and find an index . . .
registerRule("use-index-range",
useIndexRange,
useIndexRange_pass6,
useIndexRangeRule,
useIndexRangeRule_pass6,
true);
// try to find sort blocks which are superseeded by indexes
registerRule("use-index-for-sort",
useIndexForSort,
useIndexForSort_pass6,
useIndexForSortRule,
useIndexForSortRule_pass6,
true);
#if 0
@ -497,34 +503,34 @@ void Optimizer::setupRules () {
if (ExecutionEngine::isCoordinator()) {
// distribute operations in cluster
registerRule("scatter-in-cluster",
scatterInCluster,
scatterInCluster_pass10,
scatterInClusterRule,
scatterInClusterRule_pass10,
false);
registerRule("distribute-in-cluster",
distributeInCluster,
distributeInCluster_pass10,
distributeInClusterRule,
distributeInClusterRule_pass10,
false);
// distribute operations in cluster
registerRule("distribute-filtercalc-to-cluster",
distributeFilternCalcToCluster,
distributeFilternCalcToCluster_pass10,
distributeFilternCalcToClusterRule,
distributeFilternCalcToClusterRule_pass10,
true);
registerRule("distribute-sort-to-cluster",
distributeSortToCluster,
distributeSortToCluster_pass10,
distributeSortToClusterRule,
distributeSortToClusterRule_pass10,
true);
registerRule("remove-unnecessary-remote-scatter",
removeUnnecessaryRemoteScatter,
removeUnnecessaryRemoteScatter_pass10,
removeUnnecessaryRemoteScatterRule,
removeUnnecessaryRemoteScatterRule_pass10,
true);
registerRule("undistribute-remove-after-enum-coll",
undistributeRemoveAfterEnumColl,
undistributeRemoveAfterEnumColl_pass10,
undistributeRemoveAfterEnumCollRule,
undistributeRemoveAfterEnumCollRule_pass10,
true);
}

View File

@ -81,37 +81,37 @@ namespace triagens {
// "Pass 1": moving nodes "up" (potentially outside loops):
//////////////////////////////////////////////////////////////////////////////
pass1 = 100,
pass1 = 100,
// split and-combined filters into multiple smaller filters
splitFiltersRule_pass1 = 110,
splitFiltersRule_pass1 = 110,
// move calculations up the dependency chain (to pull them out of
// inner loops etc.)
moveCalculationsUpRule_pass1 = 120,
moveCalculationsUpRule_pass1 = 120,
// move filters up the dependency chain (to make result sets as small
// as possible as early as possible)
moveFiltersUpRule_pass1 = 130,
moveFiltersUpRule_pass1 = 130,
// remove calculations that are repeatedly used in a query
removeRedundantCalculationsRule_pass1 = 140,
removeRedundantCalculationsRule_pass1 = 140,
//////////////////////////////////////////////////////////////////////////////
/// "Pass 2": try to remove redundant or unnecessary nodes
//////////////////////////////////////////////////////////////////////////////
pass2 = 200,
pass2 = 200,
// remove filters from the query that are not necessary at all
// filters that are always true will be removed entirely
// filters that are always false will be replaced with a NoResults node
removeUnnecessaryFiltersRule_pass2 = 210,
removeUnnecessaryFiltersRule_pass2 = 210,
// remove calculations that are never necessary
removeUnnecessaryCalculationsRule_pass2 = 220,
removeUnnecessaryCalculationsRule_pass2 = 220,
// remove redundant sort blocks
removeRedundantSorts_pass2 = 230,
removeRedundantSortsRule_pass2 = 230,
//////////////////////////////////////////////////////////////////////////////
/// "Pass 3": interchange EnumerateCollection nodes in all possible ways
@ -119,21 +119,21 @@ namespace triagens {
/// levels go back to this or lower levels!
//////////////////////////////////////////////////////////////////////////////
pass3 = 500,
interchangeAdjacentEnumerations_pass3 = 510,
pass3 = 500,
interchangeAdjacentEnumerationsRule_pass3 = 510,
//////////////////////////////////////////////////////////////////////////////
// "Pass 4": moving nodes "up" (potentially outside loops) (second try):
//////////////////////////////////////////////////////////////////////////////
pass4 = 600,
pass4 = 600,
// move calculations up the dependency chain (to pull them out of
// inner loops etc.)
moveCalculationsUpRule_pass4 = 610,
moveCalculationsUpRule_pass4 = 610,
// move filters up the dependency chain (to make result sets as small
// as possible as early as possible)
moveFiltersUpRule_pass4 = 620,
moveFiltersUpRule_pass4 = 620,
//////////////////////////////////////////////////////////////////////////////
@ -143,61 +143,64 @@ namespace triagens {
// remove filters from the query that are not necessary at all
// filters that are always true will be removed entirely
// filters that are always false will be replaced with a NoResults node
pass5 = 700,
removeUnnecessaryFiltersRule_pass5 = 710,
pass5 = 700,
removeUnnecessaryFiltersRule_pass5 = 710,
// remove calculations that are never necessary
removeUnnecessaryCalculationsRule_pass5 = 720,
removeUnnecessaryCalculationsRule_pass5 = 720,
// remove redundant sort blocks
removeRedundantSorts_pass5 = 730,
removeRedundantSortsRule_pass5 = 730,
// remove INTO for COLLECT if appropriate
removeCollectIntoRule_pass5 = 740,
//////////////////////////////////////////////////////////////////////////////
/// "Pass 6": use indexes if possible for FILTER and/or SORT nodes
//////////////////////////////////////////////////////////////////////////////
pass6 = 800,
pass6 = 800,
// replace simple OR conditions with IN
replaceOrWithIn_pass6 = 810,
replaceOrWithInRule_pass6 = 810,
// remove redundant OR conditions
removeRedundantOr_pass6 = 820,
removeRedundantOrRule_pass6 = 820,
// try to find a filter after an enumerate collection and find an index . . .
useIndexRange_pass6 = 830,
useIndexRangeRule_pass6 = 830,
// try to find sort blocks which are superseeded by indexes
useIndexForSort_pass6 = 840,
useIndexForSortRule_pass6 = 840,
// try to remove filters covered by index ranges
removeFiltersCoveredByIndex_pass6 = 850,
removeFiltersCoveredByIndexRule_pass6 = 850,
//////////////////////////////////////////////////////////////////////////////
/// "Pass 10": final transformations for the cluster
//////////////////////////////////////////////////////////////////////////////
// make operations on sharded collections use distribute
distributeInCluster_pass10 = 1000,
distributeInClusterRule_pass10 = 1000,
// make operations on sharded collections use scatter / gather / remote
scatterInCluster_pass10 = 1010,
scatterInClusterRule_pass10 = 1010,
// move FilterNodes & Calculation nodes inbetween
// scatter(remote) <-> gather(remote) so they're
// distributed to the cluster nodes.
distributeFilternCalcToCluster_pass10 = 1020,
distributeFilternCalcToClusterRule_pass10 = 1020,
// move SortNodes into the distribution.
// adjust gathernode to also contain the sort criterions.
distributeSortToCluster_pass10 = 1030,
distributeSortToClusterRule_pass10 = 1030,
// try to get rid of a RemoteNode->ScatterNode combination which has
// only a SingletonNode and possibly some CalculationNodes as dependencies
removeUnnecessaryRemoteScatter_pass10 = 1040,
removeUnnecessaryRemoteScatterRule_pass10 = 1040,
//recognise that a RemoveNode can be moved to the shards
undistributeRemoveAfterEnumColl_pass10 = 1050
undistributeRemoveAfterEnumCollRule_pass10 = 1050
};
public:

View File

@ -53,9 +53,9 @@ using EN = triagens::aql::ExecutionNode;
/// - sorts that are covered by earlier sorts will be removed
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::removeRedundantSorts (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::removeRedundantSortsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes = plan->findNodesOfType(EN::SORT, true);
std::unordered_set<ExecutionNode*> toUnlink;
@ -254,6 +254,49 @@ int triagens::aql::removeUnnecessaryFiltersRule (Optimizer* opt,
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove INTO of a COLLECT if not used
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::removeCollectIntoRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::unordered_set<ExecutionNode*> toUnlink;
// should we enter subqueries??
std::vector<ExecutionNode*> nodes = plan->findNodesOfType(EN::AGGREGATE, true);
for (auto n : nodes) {
auto collectNode = static_cast<AggregateNode*>(n);
TRI_ASSERT(collectNode != nullptr);
auto outVariable = collectNode->outVariable();
if (outVariable == nullptr) {
// no out variable. nothing to do
continue;
}
auto varsUsedLater = n->getVarsUsedLater();
if (varsUsedLater.find(outVariable) != varsUsedLater.end()) {
// outVariable is used later
continue;
}
// outVariable is not used later. remove it!
collectNode->clearOutVariable();
modified = true;
}
if (modified) {
plan->findVarUsage();
}
opt->addPlan(plan, rule->level, modified);
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move calculations up in the plan
/// this rule modifies the plan in place
@ -1220,9 +1263,9 @@ class FilterToEnumCollFinder : public WalkerWorker<ExecutionNode> {
/// @brief useIndexRange, try to use an index for filtering
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::useIndexRange (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::useIndexRangeRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
@ -1534,9 +1577,9 @@ class SortToIndexNode : public WalkerWorker<ExecutionNode> {
}
};
int triagens::aql::useIndexForSort (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::useIndexForSortRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool planModified = false;
std::vector<ExecutionNode*> nodes
= plan->findNodesOfType(EN::SORT, true);
@ -1731,9 +1774,9 @@ struct FilterCondition {
/// @brief try to remove filters which are covered by indexes
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::removeFiltersCoveredByIndex (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::removeFiltersCoveredByIndexRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::unordered_set<ExecutionNode*> toUnlink;
std::vector<ExecutionNode*>&& nodes= plan->findNodesOfType(EN::FILTER, true);
@ -1843,9 +1886,9 @@ static bool nextPermutationTuple (std::vector<size_t>& data,
/// @brief interchange adjacent EnumerateCollectionNodes in all possible ways
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::interchangeAdjacentEnumerations (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::interchangeAdjacentEnumerationsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*>&& nodes
= plan->findNodesOfType(EN::ENUMERATE_COLLECTION,
true);
@ -1963,9 +2006,9 @@ int triagens::aql::interchangeAdjacentEnumerations (Optimizer* opt,
/// it will change plans in place
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::scatterInCluster (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::scatterInClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool wasModified = false;
if (ExecutionEngine::isCoordinator()) {
@ -2075,9 +2118,9 @@ int triagens::aql::scatterInCluster (Optimizer* opt,
/// it will change plans in place
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::distributeInCluster (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::distributeInClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool wasModified = false;
if (ExecutionEngine::isCoordinator()) {
@ -2156,9 +2199,9 @@ int triagens::aql::distributeInCluster (Optimizer* opt,
/// as small as possible as early as possible
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::distributeFilternCalcToCluster (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::distributeFilternCalcToClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes
@ -2249,9 +2292,9 @@ int triagens::aql::distributeFilternCalcToCluster (Optimizer* opt,
/// filters are not pushed beyond limits
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::distributeSortToCluster (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::distributeSortToClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes
@ -2333,9 +2376,9 @@ int triagens::aql::distributeSortToCluster (Optimizer* opt,
/// only a SingletonNode and possibly some CalculationNodes as dependencies
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::removeUnnecessaryRemoteScatter (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::removeUnnecessaryRemoteScatterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes
= plan->findNodesOfType(EN::REMOTE, true);
std::unordered_set<ExecutionNode*> toUnlink;
@ -2577,9 +2620,9 @@ class RemoveToEnumCollFinder: public WalkerWorker<ExecutionNode> {
/// @brief recognises that a RemoveNode can be moved to the shards.
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::undistributeRemoveAfterEnumColl (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::undistributeRemoveAfterEnumCollRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes
= plan->findNodesOfType(EN::REMOVE, true);
std::unordered_set<ExecutionNode*> toUnlink;
@ -2780,9 +2823,9 @@ struct OrToInConverter {
// same (single) attribute.
////////////////////////////////////////////////////////////////////////////////
int triagens::aql::replaceOrWithIn (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::replaceOrWithInRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
ENTER_BLOCK;
std::vector<ExecutionNode*> nodes
= plan->findNodesOfType(EN::FILTER, true);
@ -2979,9 +3022,9 @@ struct RemoveRedundantOr {
}
};
int triagens::aql::removeRedundantOr (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
int triagens::aql::removeRedundantOrRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
ENTER_BLOCK;
std::vector<ExecutionNode*> nodes
= plan->findNodesOfType(EN::FILTER, true);

View File

@ -45,7 +45,7 @@ namespace triagens {
/// - sorts that are covered by earlier sorts will be removed
////////////////////////////////////////////////////////////////////////////////
int removeRedundantSorts (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int removeRedundantSortsRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief remove all unnecessary filters
@ -56,6 +56,12 @@ namespace triagens {
int removeUnnecessaryFiltersRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief remove INTO of a COLLECT if not used
////////////////////////////////////////////////////////////////////////////////
int removeCollectIntoRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief move calculations up in the plan
/// this rule modifies the plan in place
@ -97,32 +103,32 @@ namespace triagens {
/// @brief prefer IndexRange nodes over EnumerateCollection nodes
////////////////////////////////////////////////////////////////////////////////
int useIndexRange (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int useIndexRangeRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief try to use the index for sorting
////////////////////////////////////////////////////////////////////////////////
int useIndexForSort (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int useIndexForSortRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief try to remove filters which are covered by indexes
////////////////////////////////////////////////////////////////////////////////
int removeFiltersCoveredByIndex (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int removeFiltersCoveredByIndexRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief interchange adjacent EnumerateCollectionNodes in all possible ways
////////////////////////////////////////////////////////////////////////////////
int interchangeAdjacentEnumerations (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int interchangeAdjacentEnumerationsRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief scatter operations in cluster - send all incoming rows to all remote
/// clients
////////////////////////////////////////////////////////////////////////////////
int scatterInCluster (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int scatterInClusterRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief distribute operations in cluster - send each incoming row to every
@ -134,18 +140,18 @@ namespace triagens {
/// The collections coll1 and coll2 do not have to be distinct for this.
////////////////////////////////////////////////////////////////////////////////
int distributeInCluster (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int distributeInClusterRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int distributeFilternCalcToCluster (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int distributeFilternCalcToClusterRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int distributeSortToCluster (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int distributeSortToClusterRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief try to get rid of a RemoteNode->ScatterNode combination which has
/// only a SingletonNode and possibly some CalculationNodes as dependencies
////////////////////////////////////////////////////////////////////////////////
int removeUnnecessaryRemoteScatter (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int removeUnnecessaryRemoteScatterRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief this rule removes Remote-Gather-Scatter/Distribute-Remote nodes from
@ -175,7 +181,7 @@ namespace triagens {
///
////////////////////////////////////////////////////////////////////////////////
int undistributeRemoveAfterEnumColl (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int undistributeRemoveAfterEnumCollRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
////////////////////////////////////////////////////////////////////////////////
/// @brief this rule replaces expressions of the type:
@ -186,9 +192,9 @@ namespace triagens {
// same (single) attribute.
////////////////////////////////////////////////////////////////////////////////
int replaceOrWithIn (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int replaceOrWithInRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int removeRedundantOr (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
int removeRedundantOrRule (Optimizer*, ExecutionPlan*, Optimizer::Rule const*);
} // namespace aql
} // namespace triagens

View File

@ -1051,7 +1051,7 @@ int modifyDocumentOnCoordinator (
bool waitForSync,
bool isPatch,
bool keepNull, // only counts for isPatch == true
bool mergeArrays, // only counts for isPatch == true
bool mergeObjects, // only counts for isPatch == true
TRI_json_t* json,
map<string, string> const& headers,
triagens::rest::HttpResponse::HttpResponseCode& responseCode,
@ -1116,11 +1116,11 @@ int modifyDocumentOnCoordinator (
if (! keepNull) {
revstr += "&keepNull=false";
}
if (mergeArrays) {
revstr += "&mergeArrays=true";
if (mergeObjects) {
revstr += "&mergeObjects=true";
}
else {
revstr += "&mergeArrays=false";
revstr += "&mergeObjects=false";
}
}
else {

View File

@ -177,7 +177,7 @@ namespace triagens {
bool waitForSync,
bool isPatch,
bool keepNull, // only counts for isPatch == true
bool mergeArrays, // only counts for isPatch == true
bool mergeObjects, // only counts for isPatch == true
TRI_json_t* json,
std::map<std::string, std::string> const& headers,
triagens::rest::HttpResponse::HttpResponseCode& responseCode,

View File

@ -1202,11 +1202,11 @@ bool RestDocumentHandler::replaceDocument () {
/// from the existing document that are contained in the patch document with an
/// attribute value of *null*.
///
/// @RESTQUERYPARAM{mergeArrays,boolean,optional}
/// Controls whether arrays (not lists) will be merged if present in both the
/// @RESTQUERYPARAM{mergeObjects,boolean,optional}
/// Controls whether objects (not arrays) will be merged if present in both the
/// existing and the patch document. If set to *false*, the value in the
/// patch document will overwrite the existing document's value. If set to
/// *true*, arrays will be merged. The default is *true*.
/// *true*, objects will be merged. The default is *true*.
///
/// @RESTQUERYPARAM{waitForSync,boolean,optional}
/// Wait until document has been synced to disk.
@ -1416,7 +1416,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
if (isPatch) {
// patching an existing document
bool nullMeansRemove;
bool mergeArrays;
bool mergeObjects;
bool found;
char const* valueStr = _request->value("keepNull", found);
if (! found || StringUtils::boolean(valueStr)) {
@ -1428,13 +1428,13 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
nullMeansRemove = true;
}
valueStr = _request->value("mergeArrays", found);
valueStr = _request->value("mergeObjects", found);
if (! found || StringUtils::boolean(valueStr)) {
// the default is true
mergeArrays = true;
mergeObjects = true;
}
else {
mergeArrays = false;
mergeObjects = false;
}
// read the existing document
@ -1487,7 +1487,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
}
}
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeObjects);
TRI_FreeJson(shaper->_memoryZone, old);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);
@ -1593,14 +1593,14 @@ bool RestDocumentHandler::modifyDocumentCoordinator (
if (! strcmp(_request->value("keepNull"), "false")) {
keepNull = false;
}
bool mergeArrays = true;
if (TRI_EqualString(_request->value("mergeArrays"), "false")) {
mergeArrays = false;
bool mergeObjects = true;
if (TRI_EqualString(_request->value("mergeObjects"), "false")) {
mergeObjects = false;
}
int error = triagens::arango::modifyDocumentOnCoordinator(
dbname, collname, key, rev, policy, waitForSync, isPatch,
keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody);
keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody);
if (error != TRI_ERROR_NO_ERROR) {
generateTransactionError(collname, error);

View File

@ -91,7 +91,7 @@ struct InsertOptions {
struct UpdateOptions {
bool overwrite = false;
bool keepNull = true;
bool mergeArrays = true;
bool mergeObjects = true;
bool waitForSync = false;
bool silent = false;
};
@ -702,7 +702,7 @@ static v8::Handle<v8::Value> ModifyVocbaseColCoordinator (
bool waitForSync,
bool isPatch,
bool keepNull, // only counts if isPatch==true
bool mergeArrays, // only counts if isPatch==true
bool mergeObjects, // only counts if isPatch==true
bool silent,
v8::Arguments const& argv) {
v8::HandleScope scope;
@ -736,7 +736,7 @@ static v8::Handle<v8::Value> ModifyVocbaseColCoordinator (
error = triagens::arango::modifyDocumentOnCoordinator(
dbname, collname, key, rev, policy, waitForSync, isPatch,
keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody);
keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody);
// Note that the json has been freed inside!
if (error != TRI_ERROR_NO_ERROR) {
@ -877,7 +877,7 @@ static v8::Handle<v8::Value> ReplaceVocbaseCol (bool useCollection,
options.waitForSync,
false, // isPatch
true, // keepNull, does not matter
false, // mergeArrays, does not matter
false, // mergeObjects, does not matter
options.silent,
argv));
}
@ -1084,7 +1084,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
TRI_v8_global_t* v8g = static_cast<TRI_v8_global_t*>(v8::Isolate::GetCurrent()->GetData());
if (argLength < 2 || argLength > 5) {
TRI_V8_EXCEPTION_USAGE(scope, "update(<document>, <data>, {overwrite: booleanValue, keepNull: booleanValue, mergeArrays: booleanValue, waitForSync: booleanValue})");
TRI_V8_EXCEPTION_USAGE(scope, "update(<document>, <data>, {overwrite: booleanValue, keepNull: booleanValue, mergeObjects: booleanValue, waitForSync: booleanValue})");
}
if (argLength > 2) {
@ -1097,8 +1097,8 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
if (optionsObject->Has(v8g->KeepNullKey)) {
options.keepNull = TRI_ObjectToBoolean(optionsObject->Get(v8g->KeepNullKey));
}
if (optionsObject->Has(v8g->MergeArraysKey)) {
options.mergeArrays = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeArraysKey));
if (optionsObject->Has(v8g->MergeObjectsKey)) {
options.mergeObjects = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeObjectsKey));
}
if (optionsObject->Has(v8g->WaitForSyncKey)) {
options.waitForSync = TRI_ObjectToBoolean(optionsObject->Get(v8g->WaitForSyncKey));
@ -1166,7 +1166,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
options.waitForSync,
true, // isPatch
options.keepNull,
options.mergeArrays,
options.mergeObjects,
options.silent,
argv));
}
@ -1233,7 +1233,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
}
}
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeObjects);
TRI_FreeJson(zone, old);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);

View File

@ -2896,7 +2896,7 @@ exports.clone = function (obj, seen) {
// Merge all the properties of source into target, source wins in conflict, and by default null and undefined from source are applied
exports.merge = function (target, source, isNullOverride /* = true */, isMergeArrays /* = true */) {
exports.merge = function (target, source, isNullOverride /* = true */, isMergeObjects /* = true */) {
exports.assert(target && typeof target === 'object', 'Invalid target value: must be an object');
exports.assert(source === null || source === undefined || typeof source === 'object', 'Invalid source value: must be null, undefined, or an object');
@ -2907,7 +2907,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr
if (Array.isArray(source)) {
exports.assert(Array.isArray(target), 'Cannot merge array onto an object');
if (isMergeArrays === false) { // isMergeArrays defaults to true
if (isMergeObjects === false) { // isMergeObjects defaults to true
target.length = 0; // Must not change target assignment
}
@ -2935,7 +2935,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr
target[key] = exports.clone(value);
}
else {
exports.merge(target[key], value, isNullOverride, isMergeArrays);
exports.merge(target[key], value, isNullOverride, isMergeObjects);
}
}
else {
@ -4692,14 +4692,14 @@ var Hoek = require('hoek');
var internals = {};
exports = module.exports = internals.Topo = function () {
exports = module.exports = internals.Topo = function () {
this._items = [];
this.nodes = [];
this.nodes = [];
};
internals.Topo.prototype.add = function (nodes, options) {
internals.Topo.prototype.add = function (nodes, options) {
var self = this;
@ -4716,17 +4716,17 @@ internals.Topo.prototype.add = function (nodes, options) {
Hoek.assert(after.indexOf(group) === -1, 'Item cannot come after itself:', group);
Hoek.assert(after.indexOf('?') === -1, 'Item cannot come after unassociated items');
([].concat(nodes)).forEach(function (node, i) {
([].concat(nodes)).forEach(function (node, i) {
var item = {
var item = {
seq: self._items.length,
before: before,
after: after,
group: group,
node: node
node: node
};
self._items.push(item);
self._items.push(item);
});
// Insert event
@ -4734,7 +4734,7 @@ internals.Topo.prototype.add = function (nodes, options) {
var error = this._sort();
Hoek.assert(!error, 'item', (group !== '?' ? 'added into group ' + group : ''), 'created a dependencies error');
return this.nodes;
return this.nodes;
};
@ -4746,7 +4746,7 @@ internals.Topo.prototype._sort = function () {
var graph = {};
var graphAfters = {};
for (var i = 0, il = this._items.length; i < il; ++i) {
for (var i = 0, il = this._items.length; i < il; ++i) {
var item = this._items[i];
var seq = item.seq; // Unique across all items
var group = item.group;
@ -4763,55 +4763,55 @@ internals.Topo.prototype._sort = function () {
// Build second intermediary graph with 'after'
var after = item.after;
for (var j = 0, jl = after.length; j < jl; ++j) {
graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq);
}
for (var j = 0, jl = after.length; j < jl; ++j) {
graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq);
}
}
// Expand intermediary graph
var graphNodes = Object.keys(graph);
for (i = 0, il = graphNodes.length; i < il; ++i) {
for (i = 0, il = graphNodes.length; i < il; ++i) {
var node = graphNodes[i];
var expandedGroups = [];
var graphNodeItems = Object.keys(graph[node]);
for (j = 0, jl = graphNodeItems.length; j < jl; ++j) {
for (j = 0, jl = graphNodeItems.length; j < jl; ++j) {
var group = graph[node][graphNodeItems[j]];
groups[group] = groups[group] || [];
groups[group].forEach(function (d) {
groups[group].forEach(function (d) {
expandedGroups.push(d);
});
expandedGroups.push(d);
});
}
graph[node] = expandedGroups;
graph[node] = expandedGroups;
}
// Merge intermediary graph using graphAfters into final graph
var afterNodes = Object.keys(graphAfters);
for (i = 0, il = afterNodes.length; i < il; ++i) {
for (i = 0, il = afterNodes.length; i < il; ++i) {
var group = afterNodes[i];
if (groups[group]) {
for (j = 0, jl = groups[group].length; j < jl; ++j) {
if (groups[group]) {
for (j = 0, jl = groups[group].length; j < jl; ++j) {
var node = groups[group][j];
graph[node] = graph[node].concat(graphAfters[group]);
}
}
graph[node] = graph[node].concat(graphAfters[group]);
}
}
}
// Compile ancestors
var ancestors = {};
graphNodes = Object.keys(graph);
for (i = 0, il = graphNodes.length; i < il; ++i) {
for (i = 0, il = graphNodes.length; i < il; ++i) {
var node = graphNodes[i];
var children = graph[node];
for (j = 0, jl = children.length; j < jl; ++j) {
ancestors[children[j]] = (ancestors[children[j]] || []).concat(node);
}
for (j = 0, jl = children.length; j < jl; ++j) {
ancestors[children[j]] = (ancestors[children[j]] || []).concat(node);
}
}
// Topo sort
@ -4819,61 +4819,61 @@ internals.Topo.prototype._sort = function () {
var visited = {};
var sorted = [];
for (i = 0, il = this._items.length; i < il; ++i) {
for (i = 0, il = this._items.length; i < il; ++i) {
var next = i;
if (ancestors[i]) {
if (ancestors[i]) {
next = null;
for (j = 0, jl = this._items.length; j < jl; ++j) {
if (visited[j] === true) {
continue;
for (j = 0, jl = this._items.length; j < jl; ++j) {
if (visited[j] === true) {
continue;
}
if (!ancestors[j]) {
ancestors[j] = [];
if (!ancestors[j]) {
ancestors[j] = [];
}
var shouldSeeCount = ancestors[j].length;
var seenCount = 0;
for (var l = 0, ll = shouldSeeCount; l < ll; ++l) {
if (sorted.indexOf(ancestors[j][l]) >= 0) {
++seenCount;
}
for (var l = 0, ll = shouldSeeCount; l < ll; ++l) {
if (sorted.indexOf(ancestors[j][l]) >= 0) {
++seenCount;
}
}
if (seenCount === shouldSeeCount) {
if (seenCount === shouldSeeCount) {
next = j;
break;
}
}
break;
}
}
}
if (next !== null) {
if (next !== null) {
next = next.toString(); // Normalize to string TODO: replace with seq
visited[next] = true;
sorted.push(next);
}
sorted.push(next);
}
}
if (sorted.length !== this._items.length) {
return new Error('Invalid dependencies');
if (sorted.length !== this._items.length) {
return new Error('Invalid dependencies');
}
var seqIndex = {};
this._items.forEach(function (item) {
this._items.forEach(function (item) {
seqIndex[item.seq] = item;
seqIndex[item.seq] = item;
});
var sortedNodes = [];
this._items = sorted.map(function (value) {
this._items = sorted.map(function (value) {
var item = seqIndex[value];
sortedNodes.push(item.node);
return item;
return item;
});
this.nodes = sortedNodes;
this.nodes = sortedNodes;
};
},{"hoek":16}],23:[function(require,module,exports){

View File

@ -1174,7 +1174,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync)
/// @param id the id of the document
/// @param overwrite (optional) a boolean value or a json object
/// @param keepNull (optional) determines if null values should saved or not
/// @param mergeArrays (optional) whether or not array values should be merged
/// @param mergeObjects (optional) whether or not object values should be merged
/// @param waitForSync (optional) a boolean value .
/// @example update("example/996280832675", { a : 1, c : 2} )
/// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true)
@ -1213,10 +1213,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -758,10 +758,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait
options.keepNull = true;
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -1173,7 +1173,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync)
/// @param id the id of the document
/// @param overwrite (optional) a boolean value or a json object
/// @param keepNull (optional) determines if null values should saved or not
/// @param mergeArrays (optional) whether or not array values should be merged
/// @param mergeObjects (optional) whether or not object values should be merged
/// @param waitForSync (optional) a boolean value .
/// @example update("example/996280832675", { a : 1, c : 2} )
/// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true)
@ -1212,10 +1212,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -757,10 +757,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait
options.keepNull = true;
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -40,11 +40,22 @@ exports.createQuery = function createQuery (cfg) {
}
var query = cfg.query,
params = cfg.params,
context = cfg.context,
Model = cfg.model,
defaults = cfg.defaults,
transform = cfg.transform;
if (params === false) {
params = [];
} else if (params && !Array.isArray(params)) {
params = [params];
}
if (params && !params.each(function (v) {return typeof v === 'string';})) {
throw new Error('Argument names must be a string, an array of strings or false.');
}
if (!query || (typeof query !== 'string' && typeof query.toAQL !== 'function')) {
throw new Error('Expected query to be a string or a QueryBuilder instance.');
}
@ -61,7 +72,17 @@ exports.createQuery = function createQuery (cfg) {
throw new Error('Expected transform to be a function.');
}
return function query(vars, trArgs) {
return function query() {
var args = Array.prototype.slice.call(arguments);
var vars;
if (params) {
vars = {};
params.forEach(function (name) {
vars[name] = args.shift();
});
} else {
vars = args.shift();
}
vars = _.extend({}, defaults, vars);
if (context) {
_.each(vars, function (value, key) {
@ -76,7 +97,7 @@ exports.createQuery = function createQuery (cfg) {
return new Model(data);
});
}
return transform ? transform(result, trArgs) : result;
args.unshift(result);
return transform ? transform.apply(null, args) : result;
};
};

View File

@ -0,0 +1,172 @@
/*jshint strict: false, maxlen: 500 */
/*global require, assertEqual, assertTrue, assertNotEqual, AQL_EXPLAIN, AQL_EXECUTE */
////////////////////////////////////////////////////////////////////////////////
/// @brief tests for optimizer rules
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Jan Steemann
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
var jsunity = require("jsunity");
var helper = require("org/arangodb/aql-helper");
var isEqual = helper.isEqual;
////////////////////////////////////////////////////////////////////////////////
/// @brief test suite
////////////////////////////////////////////////////////////////////////////////
function optimizerRuleTestSuite () {
var ruleName = "remove-collect-into";
// various choices to control the optimizer:
var paramNone = { optimizer: { rules: [ "-all" ] } };
var paramEnabled = { optimizer: { rules: [ "-all", "+remove-collect-into", "+" + ruleName ] } };
var paramDisabled = { optimizer: { rules: [ "+all", "-" + ruleName ] } };
return {
////////////////////////////////////////////////////////////////////////////////
/// @brief set up
////////////////////////////////////////////////////////////////////////////////
setUp : function () {
},
////////////////////////////////////////////////////////////////////////////////
/// @brief tear down
////////////////////////////////////////////////////////////////////////////////
tearDown : function () {
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test that rule has no effect when explicitly disabled
////////////////////////////////////////////////////////////////////////////////
testRuleDisabled : function () {
var queries = [
"FOR i IN 1..10 COLLECT a = i INTO group RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN { a: a, b : b }"
];
queries.forEach(function(query) {
var result = AQL_EXPLAIN(query, { }, paramNone);
assertEqual([ ], result.plan.rules);
});
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test that rule has no effect
////////////////////////////////////////////////////////////////////////////////
testRuleNoEffect : function () {
var queries = [
"FOR i IN 1..10 COLLECT a = i RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN { a: a, b : b, group: group }"
];
queries.forEach(function(query) {
var result = AQL_EXPLAIN(query, { }, paramEnabled);
assertTrue(result.plan.rules.indexOf(ruleName) === -1, query);
});
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test that rule has an effect
////////////////////////////////////////////////////////////////////////////////
testRuleHasEffect : function () {
var queries = [
"FOR i IN 1..10 COLLECT a = i INTO group RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN a",
"FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN { a: a, b : b }"
];
queries.forEach(function(query) {
var result = AQL_EXPLAIN(query, { }, paramEnabled);
assertNotEqual(-1, result.plan.rules.indexOf(ruleName), query);
});
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test generated plans
////////////////////////////////////////////////////////////////////////////////
testPlans : function () {
var plans = [
[ "FOR i IN 1..10 COLLECT a = i INTO group RETURN a", [ "SingletonNode", "CalculationNode", "EnumerateListNode", "SortNode", "AggregateNode", "ReturnNode" ] ],
[ "FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN a", [ "SingletonNode", "CalculationNode", "EnumerateListNode", "CalculationNode", "EnumerateListNode", "SortNode", "AggregateNode", "ReturnNode" ] ],
[ "FOR i IN 1..10 FOR j IN 1..10 COLLECT a = i, b = j INTO group RETURN { a: a, b : b }", [ "SingletonNode", "CalculationNode", "EnumerateListNode", "CalculationNode", "EnumerateListNode", "SortNode", "AggregateNode", "CalculationNode", "ReturnNode" ] ]
];
plans.forEach(function(plan) {
var result = AQL_EXPLAIN(plan[0], { }, paramEnabled);
assertNotEqual(-1, result.plan.rules.indexOf(ruleName), plan[0]);
assertEqual(plan[1], helper.getCompactPlan(result).map(function(node) { return node.type; }), plan[0]);
});
},
////////////////////////////////////////////////////////////////////////////////
/// @brief test results
////////////////////////////////////////////////////////////////////////////////
testResults : function () {
var queries = [
[ "FOR i IN 1..10 COLLECT a = i INTO group RETURN a", [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] ],
[ "FOR i IN 1..2 FOR j IN 1..2 COLLECT a = i, b = j INTO group RETURN [ a, b ]", [ [ 1, 1 ], [ 1, 2 ], [ 2, 1 ], [ 2, 2 ] ] ]
];
queries.forEach(function(query) {
var planDisabled = AQL_EXPLAIN(query[0], { }, paramDisabled);
var planEnabled = AQL_EXPLAIN(query[0], { }, paramEnabled);
var resultDisabled = AQL_EXECUTE(query[0], { }, paramDisabled).json;
var resultEnabled = AQL_EXECUTE(query[0], { }, paramEnabled).json;
assertTrue(isEqual(resultDisabled, resultEnabled), query[0]);
assertEqual(-1, planDisabled.plan.rules.indexOf(ruleName), query[0]);
assertNotEqual(-1, planEnabled.plan.rules.indexOf(ruleName), query[0]);
assertEqual(resultDisabled, query[1]);
assertEqual(resultEnabled, query[1]);
});
}
};
}
////////////////////////////////////////////////////////////////////////////////
/// @brief executes the test suite
////////////////////////////////////////////////////////////////////////////////
jsunity.run(optimizerRuleTestSuite);
return jsunity.done();
// Local Variables:
// mode: outline-minor
// outline-regexp: "^\\(/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @}\\)"
// End:

View File

@ -41,7 +41,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
TRI_json_t const* lhs,
TRI_json_t const* rhs,
bool nullMeansRemove,
bool mergeArrays) {
bool mergeObjects) {
TRI_json_t* result = TRI_CopyJson(zone, lhs);
if (result == nullptr) {
@ -66,7 +66,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
// existing array does not have the attribute => append new attribute
if (value->_type == TRI_JSON_ARRAY) {
TRI_json_t* empty = TRI_CreateArrayJson(zone);
TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeArrays);
TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeObjects);
TRI_Insert3ArrayJson(zone, result, key->_value._string.data, merged);
TRI_FreeJson(zone, empty);
@ -77,8 +77,8 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
}
else {
// existing array already has the attribute => replace attribute
if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeArrays) {
TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeArrays);
if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeObjects) {
TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeObjects);
TRI_ReplaceArrayJson(zone, result, key->_value._string.data, merged);
TRI_FreeJson(zone, merged);
}
@ -734,13 +734,13 @@ TRI_json_t* TRI_MergeJson (TRI_memory_zone_t* zone,
TRI_json_t const* lhs,
TRI_json_t const* rhs,
bool nullMeansRemove,
bool mergeArrays) {
bool mergeObjects) {
TRI_json_t* result;
TRI_ASSERT(lhs->_type == TRI_JSON_ARRAY);
TRI_ASSERT(rhs->_type == TRI_JSON_ARRAY);
result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeArrays);
result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeObjects);
return result;
}

View File

@ -92,7 +92,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate)
KeyOptionsKey(),
LengthKey(),
LifeTimeKey(),
MergeArraysKey(),
MergeObjectsKey(),
NameKey(),
OperationIDKey(),
ParametersKey(),
@ -177,7 +177,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate)
KeyOptionsKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("keyOptions"));
LengthKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("length"));
LifeTimeKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("lifeTime"));
MergeArraysKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("mergeArrays"));
MergeObjectsKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("mergeObjects"));
NameKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("name"));
OperationIDKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("operationID"));
OverwriteKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("overwrite"));

View File

@ -556,10 +556,10 @@ typedef struct TRI_v8_global_s {
v8::Persistent<v8::String> LifeTimeKey;
////////////////////////////////////////////////////////////////////////////////
/// @brief "mergeArrays" key name
/// @brief "mergeObjects" key name
////////////////////////////////////////////////////////////////////////////////
v8::Persistent<v8::String> MergeArraysKey;
v8::Persistent<v8::String> MergeObjectsKey;
////////////////////////////////////////////////////////////////////////////////
/// @brief "name" key