From 41ab3aa4d6dd25d5223ac841c165ddba18bfe08e Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 2 Dec 2014 17:03:03 +0100 Subject: [PATCH 1/7] Renamed mergeArrays->mergeObjects to make API less confusing. --- arangod/Aql/ExecutionBlock.cpp | 2 +- arangod/Aql/ExecutionPlan.cpp | 4 +- arangod/Aql/ModificationOptions.cpp | 4 +- arangod/Aql/ModificationOptions.h | 4 +- arangod/Cluster/ClusterMethods.cpp | 8 +- arangod/Cluster/ClusterMethods.h | 2 +- arangod/RestHandler/RestDocumentHandler.cpp | 24 ++-- arangod/V8Server/v8-collection.cpp | 18 +-- .../aardvark/frontend/js/lib/joi.browser.js | 112 +++++++++--------- .../modules/org/arangodb/arango-collection.js | 8 +- .../modules/org/arangodb/arango-database.js | 6 +- .../modules/org/arangodb/arango-collection.js | 8 +- .../modules/org/arangodb/arango-database.js | 6 +- lib/Basics/json-utilities.cpp | 12 +- lib/V8/v8-globals.cpp | 4 +- lib/V8/v8-globals.h | 4 +- 16 files changed, 113 insertions(+), 113 deletions(-) diff --git a/arangod/Aql/ExecutionBlock.cpp b/arangod/Aql/ExecutionBlock.cpp index 9ec159e1df..2319c54df5 100644 --- a/arangod/Aql/ExecutionBlock.cpp +++ b/arangod/Aql/ExecutionBlock.cpp @@ -3395,7 +3395,7 @@ void UpdateBlock::work (std::vector& blocks) { TRI_json_t* old = TRI_JsonShapedJson(_collection->documentCollection()->getShaper(), &shapedJson); if (old != nullptr) { - TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeArrays); + TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeObjects); TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, old); if (patchedJson != nullptr) { diff --git a/arangod/Aql/ExecutionPlan.cpp b/arangod/Aql/ExecutionPlan.cpp index 85ad758be9..cafe2e4ee1 100644 --- a/arangod/Aql/ExecutionPlan.cpp +++ b/arangod/Aql/ExecutionPlan.cpp @@ -251,8 +251,8 @@ ModificationOptions ExecutionPlan::createOptions (AstNode const* node) { // nullMeansRemove is the opposite of keepNull options.nullMeansRemove = value->isFalse(); } - else if (strcmp(name, "mergeArrays") == 0) { - options.mergeArrays = value->isTrue(); + else if (strcmp(name, "mergeObjects") == 0) { + options.mergeObjects = value->isTrue(); } } } diff --git a/arangod/Aql/ModificationOptions.cpp b/arangod/Aql/ModificationOptions.cpp index 4ab6953497..2848cc5847 100644 --- a/arangod/Aql/ModificationOptions.cpp +++ b/arangod/Aql/ModificationOptions.cpp @@ -36,7 +36,7 @@ ModificationOptions::ModificationOptions (Json const& json) { ignoreErrors = JsonHelper::getBooleanValue(array.json(), "ignoreErrors", false); waitForSync = JsonHelper::getBooleanValue(array.json(), "waitForSync", false); nullMeansRemove = JsonHelper::getBooleanValue(array.json(), "nullMeansRemove", false); - mergeArrays = JsonHelper::getBooleanValue(array.json(), "mergeArrays", false); + mergeObjects = JsonHelper::getBooleanValue(array.json(), "mergeObjects", false); } void ModificationOptions::toJson (triagens::basics::Json& json, @@ -46,7 +46,7 @@ void ModificationOptions::toJson (triagens::basics::Json& json, ("ignoreErrors", Json(ignoreErrors)) ("waitForSync", Json(waitForSync)) ("nullMeansRemove", Json(nullMeansRemove)) - ("mergeArrays", Json(mergeArrays)); + ("mergeObjects", Json(mergeObjects)); json ("modificationFlags", flags); } diff --git a/arangod/Aql/ModificationOptions.h b/arangod/Aql/ModificationOptions.h index 802184e605..d60769be42 100644 --- a/arangod/Aql/ModificationOptions.h +++ b/arangod/Aql/ModificationOptions.h @@ -54,7 +54,7 @@ namespace triagens { : ignoreErrors(false), waitForSync(false), nullMeansRemove(false), - mergeArrays(false) { + mergeObjects(false) { } void toJson (triagens::basics::Json& json, TRI_memory_zone_t* zone) const; @@ -66,7 +66,7 @@ namespace triagens { bool ignoreErrors; bool waitForSync; bool nullMeansRemove; - bool mergeArrays; + bool mergeObjects; }; diff --git a/arangod/Cluster/ClusterMethods.cpp b/arangod/Cluster/ClusterMethods.cpp index 1385a6501f..b5abe0c3e8 100644 --- a/arangod/Cluster/ClusterMethods.cpp +++ b/arangod/Cluster/ClusterMethods.cpp @@ -1051,7 +1051,7 @@ int modifyDocumentOnCoordinator ( bool waitForSync, bool isPatch, bool keepNull, // only counts for isPatch == true - bool mergeArrays, // only counts for isPatch == true + bool mergeObjects, // only counts for isPatch == true TRI_json_t* json, map const& headers, triagens::rest::HttpResponse::HttpResponseCode& responseCode, @@ -1116,11 +1116,11 @@ int modifyDocumentOnCoordinator ( if (! keepNull) { revstr += "&keepNull=false"; } - if (mergeArrays) { - revstr += "&mergeArrays=true"; + if (mergeObjects) { + revstr += "&mergeObjects=true"; } else { - revstr += "&mergeArrays=false"; + revstr += "&mergeObjects=false"; } } else { diff --git a/arangod/Cluster/ClusterMethods.h b/arangod/Cluster/ClusterMethods.h index 569c6578eb..74b05a4069 100644 --- a/arangod/Cluster/ClusterMethods.h +++ b/arangod/Cluster/ClusterMethods.h @@ -177,7 +177,7 @@ namespace triagens { bool waitForSync, bool isPatch, bool keepNull, // only counts for isPatch == true - bool mergeArrays, // only counts for isPatch == true + bool mergeObjects, // only counts for isPatch == true TRI_json_t* json, std::map const& headers, triagens::rest::HttpResponse::HttpResponseCode& responseCode, diff --git a/arangod/RestHandler/RestDocumentHandler.cpp b/arangod/RestHandler/RestDocumentHandler.cpp index 390affb4ee..4d29ff256e 100644 --- a/arangod/RestHandler/RestDocumentHandler.cpp +++ b/arangod/RestHandler/RestDocumentHandler.cpp @@ -1202,11 +1202,11 @@ bool RestDocumentHandler::replaceDocument () { /// from the existing document that are contained in the patch document with an /// attribute value of *null*. /// -/// @RESTQUERYPARAM{mergeArrays,boolean,optional} -/// Controls whether arrays (not lists) will be merged if present in both the +/// @RESTQUERYPARAM{mergeObjects,boolean,optional} +/// Controls whether objects (not arrays) will be merged if present in both the /// existing and the patch document. If set to *false*, the value in the /// patch document will overwrite the existing document's value. If set to -/// *true*, arrays will be merged. The default is *true*. +/// *true*, objects will be merged. The default is *true*. /// /// @RESTQUERYPARAM{waitForSync,boolean,optional} /// Wait until document has been synced to disk. @@ -1416,7 +1416,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) { if (isPatch) { // patching an existing document bool nullMeansRemove; - bool mergeArrays; + bool mergeObjects; bool found; char const* valueStr = _request->value("keepNull", found); if (! found || StringUtils::boolean(valueStr)) { @@ -1428,13 +1428,13 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) { nullMeansRemove = true; } - valueStr = _request->value("mergeArrays", found); + valueStr = _request->value("mergeObjects", found); if (! found || StringUtils::boolean(valueStr)) { // the default is true - mergeArrays = true; + mergeObjects = true; } else { - mergeArrays = false; + mergeObjects = false; } // read the existing document @@ -1487,7 +1487,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) { } } - TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeArrays); + TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeObjects); TRI_FreeJson(shaper->_memoryZone, old); TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json); @@ -1593,14 +1593,14 @@ bool RestDocumentHandler::modifyDocumentCoordinator ( if (! strcmp(_request->value("keepNull"), "false")) { keepNull = false; } - bool mergeArrays = true; - if (TRI_EqualString(_request->value("mergeArrays"), "false")) { - mergeArrays = false; + bool mergeObjects = true; + if (TRI_EqualString(_request->value("mergeObjects"), "false")) { + mergeObjects = false; } int error = triagens::arango::modifyDocumentOnCoordinator( dbname, collname, key, rev, policy, waitForSync, isPatch, - keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody); + keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody); if (error != TRI_ERROR_NO_ERROR) { generateTransactionError(collname, error); diff --git a/arangod/V8Server/v8-collection.cpp b/arangod/V8Server/v8-collection.cpp index 18eacb9208..f05a7f089d 100644 --- a/arangod/V8Server/v8-collection.cpp +++ b/arangod/V8Server/v8-collection.cpp @@ -91,7 +91,7 @@ struct InsertOptions { struct UpdateOptions { bool overwrite = false; bool keepNull = true; - bool mergeArrays = true; + bool mergeObjects = true; bool waitForSync = false; bool silent = false; }; @@ -702,7 +702,7 @@ static v8::Handle ModifyVocbaseColCoordinator ( bool waitForSync, bool isPatch, bool keepNull, // only counts if isPatch==true - bool mergeArrays, // only counts if isPatch==true + bool mergeObjects, // only counts if isPatch==true bool silent, v8::Arguments const& argv) { v8::HandleScope scope; @@ -736,7 +736,7 @@ static v8::Handle ModifyVocbaseColCoordinator ( error = triagens::arango::modifyDocumentOnCoordinator( dbname, collname, key, rev, policy, waitForSync, isPatch, - keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody); + keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody); // Note that the json has been freed inside! if (error != TRI_ERROR_NO_ERROR) { @@ -877,7 +877,7 @@ static v8::Handle ReplaceVocbaseCol (bool useCollection, options.waitForSync, false, // isPatch true, // keepNull, does not matter - false, // mergeArrays, does not matter + false, // mergeObjects, does not matter options.silent, argv)); } @@ -1084,7 +1084,7 @@ static v8::Handle UpdateVocbaseCol (bool useCollection, TRI_v8_global_t* v8g = static_cast(v8::Isolate::GetCurrent()->GetData()); if (argLength < 2 || argLength > 5) { - TRI_V8_EXCEPTION_USAGE(scope, "update(, , {overwrite: booleanValue, keepNull: booleanValue, mergeArrays: booleanValue, waitForSync: booleanValue})"); + TRI_V8_EXCEPTION_USAGE(scope, "update(, , {overwrite: booleanValue, keepNull: booleanValue, mergeObjects: booleanValue, waitForSync: booleanValue})"); } if (argLength > 2) { @@ -1097,8 +1097,8 @@ static v8::Handle UpdateVocbaseCol (bool useCollection, if (optionsObject->Has(v8g->KeepNullKey)) { options.keepNull = TRI_ObjectToBoolean(optionsObject->Get(v8g->KeepNullKey)); } - if (optionsObject->Has(v8g->MergeArraysKey)) { - options.mergeArrays = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeArraysKey)); + if (optionsObject->Has(v8g->MergeObjectsKey)) { + options.mergeObjects = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeObjectsKey)); } if (optionsObject->Has(v8g->WaitForSyncKey)) { options.waitForSync = TRI_ObjectToBoolean(optionsObject->Get(v8g->WaitForSyncKey)); @@ -1166,7 +1166,7 @@ static v8::Handle UpdateVocbaseCol (bool useCollection, options.waitForSync, true, // isPatch options.keepNull, - options.mergeArrays, + options.mergeObjects, options.silent, argv)); } @@ -1233,7 +1233,7 @@ static v8::Handle UpdateVocbaseCol (bool useCollection, } } - TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeArrays); + TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeObjects); TRI_FreeJson(zone, old); TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json); diff --git a/js/apps/system/aardvark/frontend/js/lib/joi.browser.js b/js/apps/system/aardvark/frontend/js/lib/joi.browser.js index 9c446249b8..87b123e644 100644 --- a/js/apps/system/aardvark/frontend/js/lib/joi.browser.js +++ b/js/apps/system/aardvark/frontend/js/lib/joi.browser.js @@ -2896,7 +2896,7 @@ exports.clone = function (obj, seen) { // Merge all the properties of source into target, source wins in conflict, and by default null and undefined from source are applied -exports.merge = function (target, source, isNullOverride /* = true */, isMergeArrays /* = true */) { +exports.merge = function (target, source, isNullOverride /* = true */, isMergeObjects /* = true */) { exports.assert(target && typeof target === 'object', 'Invalid target value: must be an object'); exports.assert(source === null || source === undefined || typeof source === 'object', 'Invalid source value: must be null, undefined, or an object'); @@ -2907,7 +2907,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr if (Array.isArray(source)) { exports.assert(Array.isArray(target), 'Cannot merge array onto an object'); - if (isMergeArrays === false) { // isMergeArrays defaults to true + if (isMergeObjects === false) { // isMergeObjects defaults to true target.length = 0; // Must not change target assignment } @@ -2935,7 +2935,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr target[key] = exports.clone(value); } else { - exports.merge(target[key], value, isNullOverride, isMergeArrays); + exports.merge(target[key], value, isNullOverride, isMergeObjects); } } else { @@ -4692,14 +4692,14 @@ var Hoek = require('hoek'); var internals = {}; -exports = module.exports = internals.Topo = function () { +exports = module.exports = internals.Topo = function () { this._items = []; - this.nodes = []; + this.nodes = []; }; -internals.Topo.prototype.add = function (nodes, options) { +internals.Topo.prototype.add = function (nodes, options) { var self = this; @@ -4716,17 +4716,17 @@ internals.Topo.prototype.add = function (nodes, options) { Hoek.assert(after.indexOf(group) === -1, 'Item cannot come after itself:', group); Hoek.assert(after.indexOf('?') === -1, 'Item cannot come after unassociated items'); - ([].concat(nodes)).forEach(function (node, i) { + ([].concat(nodes)).forEach(function (node, i) { - var item = { + var item = { seq: self._items.length, before: before, after: after, group: group, - node: node + node: node }; - self._items.push(item); + self._items.push(item); }); // Insert event @@ -4734,7 +4734,7 @@ internals.Topo.prototype.add = function (nodes, options) { var error = this._sort(); Hoek.assert(!error, 'item', (group !== '?' ? 'added into group ' + group : ''), 'created a dependencies error'); - return this.nodes; + return this.nodes; }; @@ -4746,7 +4746,7 @@ internals.Topo.prototype._sort = function () { var graph = {}; var graphAfters = {}; - for (var i = 0, il = this._items.length; i < il; ++i) { + for (var i = 0, il = this._items.length; i < il; ++i) { var item = this._items[i]; var seq = item.seq; // Unique across all items var group = item.group; @@ -4763,55 +4763,55 @@ internals.Topo.prototype._sort = function () { // Build second intermediary graph with 'after' var after = item.after; - for (var j = 0, jl = after.length; j < jl; ++j) { - graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq); - } + for (var j = 0, jl = after.length; j < jl; ++j) { + graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq); + } } // Expand intermediary graph var graphNodes = Object.keys(graph); - for (i = 0, il = graphNodes.length; i < il; ++i) { + for (i = 0, il = graphNodes.length; i < il; ++i) { var node = graphNodes[i]; var expandedGroups = []; var graphNodeItems = Object.keys(graph[node]); - for (j = 0, jl = graphNodeItems.length; j < jl; ++j) { + for (j = 0, jl = graphNodeItems.length; j < jl; ++j) { var group = graph[node][graphNodeItems[j]]; groups[group] = groups[group] || []; - groups[group].forEach(function (d) { + groups[group].forEach(function (d) { - expandedGroups.push(d); - }); + expandedGroups.push(d); + }); } - graph[node] = expandedGroups; + graph[node] = expandedGroups; } // Merge intermediary graph using graphAfters into final graph var afterNodes = Object.keys(graphAfters); - for (i = 0, il = afterNodes.length; i < il; ++i) { + for (i = 0, il = afterNodes.length; i < il; ++i) { var group = afterNodes[i]; - if (groups[group]) { - for (j = 0, jl = groups[group].length; j < jl; ++j) { + if (groups[group]) { + for (j = 0, jl = groups[group].length; j < jl; ++j) { var node = groups[group][j]; - graph[node] = graph[node].concat(graphAfters[group]); - } - } + graph[node] = graph[node].concat(graphAfters[group]); + } + } } // Compile ancestors var ancestors = {}; graphNodes = Object.keys(graph); - for (i = 0, il = graphNodes.length; i < il; ++i) { + for (i = 0, il = graphNodes.length; i < il; ++i) { var node = graphNodes[i]; var children = graph[node]; - for (j = 0, jl = children.length; j < jl; ++j) { - ancestors[children[j]] = (ancestors[children[j]] || []).concat(node); - } + for (j = 0, jl = children.length; j < jl; ++j) { + ancestors[children[j]] = (ancestors[children[j]] || []).concat(node); + } } // Topo sort @@ -4819,61 +4819,61 @@ internals.Topo.prototype._sort = function () { var visited = {}; var sorted = []; - for (i = 0, il = this._items.length; i < il; ++i) { + for (i = 0, il = this._items.length; i < il; ++i) { var next = i; - if (ancestors[i]) { + if (ancestors[i]) { next = null; - for (j = 0, jl = this._items.length; j < jl; ++j) { - if (visited[j] === true) { - continue; + for (j = 0, jl = this._items.length; j < jl; ++j) { + if (visited[j] === true) { + continue; } - if (!ancestors[j]) { - ancestors[j] = []; + if (!ancestors[j]) { + ancestors[j] = []; } var shouldSeeCount = ancestors[j].length; var seenCount = 0; - for (var l = 0, ll = shouldSeeCount; l < ll; ++l) { - if (sorted.indexOf(ancestors[j][l]) >= 0) { - ++seenCount; - } + for (var l = 0, ll = shouldSeeCount; l < ll; ++l) { + if (sorted.indexOf(ancestors[j][l]) >= 0) { + ++seenCount; + } } - if (seenCount === shouldSeeCount) { + if (seenCount === shouldSeeCount) { next = j; - break; - } - } + break; + } + } } - if (next !== null) { + if (next !== null) { next = next.toString(); // Normalize to string TODO: replace with seq visited[next] = true; - sorted.push(next); - } + sorted.push(next); + } } - if (sorted.length !== this._items.length) { - return new Error('Invalid dependencies'); + if (sorted.length !== this._items.length) { + return new Error('Invalid dependencies'); } var seqIndex = {}; - this._items.forEach(function (item) { + this._items.forEach(function (item) { - seqIndex[item.seq] = item; + seqIndex[item.seq] = item; }); var sortedNodes = []; - this._items = sorted.map(function (value) { + this._items = sorted.map(function (value) { var item = seqIndex[value]; sortedNodes.push(item.node); - return item; + return item; }); - this.nodes = sortedNodes; + this.nodes = sortedNodes; }; },{"hoek":16}],23:[function(require,module,exports){ diff --git a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-collection.js b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-collection.js index 9b5643ac32..9063045bf7 100644 --- a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-collection.js +++ b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-collection.js @@ -1174,7 +1174,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync) /// @param id the id of the document /// @param overwrite (optional) a boolean value or a json object /// @param keepNull (optional) determines if null values should saved or not -/// @param mergeArrays (optional) whether or not array values should be merged +/// @param mergeObjects (optional) whether or not object values should be merged /// @param waitForSync (optional) a boolean value . /// @example update("example/996280832675", { a : 1, c : 2} ) /// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true) @@ -1213,10 +1213,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai } params = "?keepNull=" + options.keepNull; - if (! options.hasOwnProperty("mergeArrays")) { - options.mergeArrays = true; + if (! options.hasOwnProperty("mergeObjects")) { + options.mergeObjects = true; } - params += "&mergeArrays=" + options.mergeArrays; + params += "&mergeObjects=" + options.mergeObjects; if (options.hasOwnProperty("overwrite") && options.overwrite) { params += "&policy=last"; diff --git a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-database.js b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-database.js index 66bfdd84de..5a059e562a 100644 --- a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-database.js +++ b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/arango-database.js @@ -758,10 +758,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait options.keepNull = true; } params = "?keepNull=" + options.keepNull; - if (! options.hasOwnProperty("mergeArrays")) { - options.mergeArrays = true; + if (! options.hasOwnProperty("mergeObjects")) { + options.mergeObjects = true; } - params += "&mergeArrays=" + options.mergeArrays; + params += "&mergeObjects=" + options.mergeObjects; if (options.hasOwnProperty("overwrite") && options.overwrite) { params += "&policy=last"; diff --git a/js/client/modules/org/arangodb/arango-collection.js b/js/client/modules/org/arangodb/arango-collection.js index 1a0111b15b..7dbb5718f7 100644 --- a/js/client/modules/org/arangodb/arango-collection.js +++ b/js/client/modules/org/arangodb/arango-collection.js @@ -1173,7 +1173,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync) /// @param id the id of the document /// @param overwrite (optional) a boolean value or a json object /// @param keepNull (optional) determines if null values should saved or not -/// @param mergeArrays (optional) whether or not array values should be merged +/// @param mergeObjects (optional) whether or not object values should be merged /// @param waitForSync (optional) a boolean value . /// @example update("example/996280832675", { a : 1, c : 2} ) /// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true) @@ -1212,10 +1212,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai } params = "?keepNull=" + options.keepNull; - if (! options.hasOwnProperty("mergeArrays")) { - options.mergeArrays = true; + if (! options.hasOwnProperty("mergeObjects")) { + options.mergeObjects = true; } - params += "&mergeArrays=" + options.mergeArrays; + params += "&mergeObjects=" + options.mergeObjects; if (options.hasOwnProperty("overwrite") && options.overwrite) { params += "&policy=last"; diff --git a/js/client/modules/org/arangodb/arango-database.js b/js/client/modules/org/arangodb/arango-database.js index 33186bd804..c269318cb3 100644 --- a/js/client/modules/org/arangodb/arango-database.js +++ b/js/client/modules/org/arangodb/arango-database.js @@ -757,10 +757,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait options.keepNull = true; } params = "?keepNull=" + options.keepNull; - if (! options.hasOwnProperty("mergeArrays")) { - options.mergeArrays = true; + if (! options.hasOwnProperty("mergeObjects")) { + options.mergeObjects = true; } - params += "&mergeArrays=" + options.mergeArrays; + params += "&mergeObjects=" + options.mergeObjects; if (options.hasOwnProperty("overwrite") && options.overwrite) { params += "&policy=last"; diff --git a/lib/Basics/json-utilities.cpp b/lib/Basics/json-utilities.cpp index b1fcab336e..0978b5a5e2 100644 --- a/lib/Basics/json-utilities.cpp +++ b/lib/Basics/json-utilities.cpp @@ -41,7 +41,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone, TRI_json_t const* lhs, TRI_json_t const* rhs, bool nullMeansRemove, - bool mergeArrays) { + bool mergeObjects) { TRI_json_t* result = TRI_CopyJson(zone, lhs); if (result == nullptr) { @@ -66,7 +66,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone, // existing array does not have the attribute => append new attribute if (value->_type == TRI_JSON_ARRAY) { TRI_json_t* empty = TRI_CreateArrayJson(zone); - TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeArrays); + TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeObjects); TRI_Insert3ArrayJson(zone, result, key->_value._string.data, merged); TRI_FreeJson(zone, empty); @@ -77,8 +77,8 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone, } else { // existing array already has the attribute => replace attribute - if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeArrays) { - TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeArrays); + if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeObjects) { + TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeObjects); TRI_ReplaceArrayJson(zone, result, key->_value._string.data, merged); TRI_FreeJson(zone, merged); } @@ -734,13 +734,13 @@ TRI_json_t* TRI_MergeJson (TRI_memory_zone_t* zone, TRI_json_t const* lhs, TRI_json_t const* rhs, bool nullMeansRemove, - bool mergeArrays) { + bool mergeObjects) { TRI_json_t* result; TRI_ASSERT(lhs->_type == TRI_JSON_ARRAY); TRI_ASSERT(rhs->_type == TRI_JSON_ARRAY); - result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeArrays); + result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeObjects); return result; } diff --git a/lib/V8/v8-globals.cpp b/lib/V8/v8-globals.cpp index 127959b432..ce7c29ec8c 100644 --- a/lib/V8/v8-globals.cpp +++ b/lib/V8/v8-globals.cpp @@ -92,7 +92,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate) KeyOptionsKey(), LengthKey(), LifeTimeKey(), - MergeArraysKey(), + MergeObjectsKey(), NameKey(), OperationIDKey(), ParametersKey(), @@ -177,7 +177,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate) KeyOptionsKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("keyOptions")); LengthKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("length")); LifeTimeKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("lifeTime")); - MergeArraysKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("mergeArrays")); + MergeObjectsKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("mergeObjects")); NameKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("name")); OperationIDKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("operationID")); OverwriteKey = v8::Persistent::New(isolate, TRI_V8_SYMBOL("overwrite")); diff --git a/lib/V8/v8-globals.h b/lib/V8/v8-globals.h index 972ec77249..3fb8cd08a8 100644 --- a/lib/V8/v8-globals.h +++ b/lib/V8/v8-globals.h @@ -556,10 +556,10 @@ typedef struct TRI_v8_global_s { v8::Persistent LifeTimeKey; //////////////////////////////////////////////////////////////////////////////// -/// @brief "mergeArrays" key name +/// @brief "mergeObjects" key name //////////////////////////////////////////////////////////////////////////////// - v8::Persistent MergeArraysKey; + v8::Persistent MergeObjectsKey; //////////////////////////////////////////////////////////////////////////////// /// @brief "name" key From 741bf1e4d5fc93ae0fd1455238a9fd378e543e44 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 2 Dec 2014 18:25:36 +0100 Subject: [PATCH 2/7] Support positional arguments in Foxx queries. --- .../Books/Users/Foxx/FoxxQueries.mdpp | 44 ++++++++++++++++++- .../Books/Users/Foxx/FoxxRepository.mdpp | 25 +++++------ js/server/modules/org/arangodb/foxx/query.js | 25 +++++++++-- 3 files changed, 76 insertions(+), 18 deletions(-) diff --git a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp index 46ca157f33..ab0f811a67 100644 --- a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp +++ b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp @@ -27,12 +27,13 @@ console.log('usernames:', usernames); Creates a query function that performs the given query and returns the result. -The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Note that collection bind parameters need to be prefixed with an at-sign, e.g. `{'@myCollectionVar': 'my_collection_name'}`. +The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Any additional arguments will be passed to the transform function (or dropped if no transform function is defined). *Parameter* * *cfg*: an object with the following properties: * *query*: an AQL query string or an ArangoDB Query Builder query object. + * *params* (optional): an array of parameter names. * *context* (optional): an *applicationContext*. * *model* (optional): a *Foxx.Model* that will be applied to the query results. * *defaults* (optional): default values for the query's bind parameters. These can be overridden by passing a value for the same name to the query function. @@ -42,7 +43,9 @@ If *cfg* is a string, it will be used as the value of *cfg.query* instead. If a *context* is specified, the values of all collection bind parameters will be passed through the context's *collectionName* method. -Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar`. +Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar` and their parameter name needs to be prefixed with an at-sign as well, e.g. `{'@myCollectionVar': 'collection_name'}`. + +If *params* is provided, the query function will accept positional arguments instead of an object. If *params* is a string, it will be treated as an array containing that string. If both *model* and *transform* are provided, the *transform* function will be applied to the result array _after_ the results have been converted into model instances. The *transform* function is always passed the entire result array and its return value will be returned by the query function. @@ -62,6 +65,16 @@ var query = Foxx.createQuery('FOR u IN _users RETURN u[@propName]'); var usernames = query({propName: 'user'}); ``` +Using named bind parameters: + +```js +var query = Foxx.createQuery({ + query: 'FOR u IN _users RETURN u[@propName]', + params: ['propName'] +); +var usernames = query('user'); +``` + Using models: ```js @@ -93,3 +106,30 @@ var query = Foxx.createQuery({ }); var user = query(); // first user by username ``` + +Using a transformation with extra arguments: + +```js +var query = Foxx.createQuery({ + query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]', + transform: function (results, uppercase) { + return uppercase ? results[0].toUpperCase() : results[0].toLowerCase(); + } +}); +query({propName: 'user'}, true); // username of first user in uppercase +query({propName: 'user'}, false); // username of first user in lowercase +``` + +Using a transformation with extra arguments (using positional arguments): + +```js +var query = Foxx.createQuery({ + query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]', + params: ['propName'], + transform: function (results, uppercase) { + return uppercase ? results[0].toUpperCase() : results[0].toLowerCase(); + } +}); +query('user', true); // username of first user in uppercase +query('user', false); // username of first user in lowercase +``` diff --git a/Documentation/Books/Users/Foxx/FoxxRepository.mdpp b/Documentation/Books/Users/Foxx/FoxxRepository.mdpp index 0a5c2f73ca..afa45efbc1 100644 --- a/Documentation/Books/Users/Foxx/FoxxRepository.mdpp +++ b/Documentation/Books/Users/Foxx/FoxxRepository.mdpp @@ -23,7 +23,7 @@ You can define custom query methods using Foxx.createQuery and Foxx.Repository.e Making a simple query in the repository and using it from the controller: -```javascript +```js // in the repository var Foxx = require("org/arangodb/foxx"); @@ -41,29 +41,31 @@ ctrl.get("/", function(req, res) { It is also possible to supply parameters to the query: -```javascript +```js // in the repository -getPendingItemById: Foxx.createQuery( - 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo' -) +getPendingItemById: Foxx.createQuery({ + query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo', + params: ['id'] +}) // in the controller ctrl.get("/:id", function(req, res) { var id = req.params("id"); - var rv = todosRepository.getPendingItemById({ id: id }); + var rv = todosRepository.getPendingItemById(id); res.json(rv); }); ``` The list of results can also be transformed before returning it from the repository: -```javascript +```js // in the repository getPendingItemById: Foxx.createQuery({ query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo', - transform: function(results, args) { + params: ['id'], + transform: function(results, extra) { for (var i = 0; i < results.length; i++) { - results[i].extraProperty = args.extra; + results[i].extraProperty = extra; } } }) @@ -72,10 +74,7 @@ getPendingItemById: Foxx.createQuery({ ctrl.get("/:id", function(req, res) { var id = req.params("id"); var extra = req.params("extra"); - var rv = todosRepository.getPendingItemById( - { id: id }, - { extra: extra } - ); + var rv = todosRepository.getPendingItemById(id, extra); res.json(rv); }); ``` diff --git a/js/server/modules/org/arangodb/foxx/query.js b/js/server/modules/org/arangodb/foxx/query.js index 018b9aaa79..f2739e188a 100644 --- a/js/server/modules/org/arangodb/foxx/query.js +++ b/js/server/modules/org/arangodb/foxx/query.js @@ -40,11 +40,20 @@ exports.createQuery = function createQuery (cfg) { } var query = cfg.query, + params = cfg.params, context = cfg.context, Model = cfg.model, defaults = cfg.defaults, transform = cfg.transform; + if (params && !Array.isArray(params)) { + params = [params]; + } + + if (params && !params.each(function (v) {return typeof v === 'string';})) { + throw new Error('Argument names must be a string or an array of strings.'); + } + if (!query || (typeof query !== 'string' && typeof query.toAQL !== 'function')) { throw new Error('Expected query to be a string or a QueryBuilder instance.'); } @@ -61,7 +70,17 @@ exports.createQuery = function createQuery (cfg) { throw new Error('Expected transform to be a function.'); } - return function query(vars, trArgs) { + return function query() { + var args = Array.prototype.slice.call(arguments); + var vars; + if (params) { + vars = {}; + params.forEach(function (name) { + vars[name] = args.shift(); + }); + } else { + vars = args.shift(); + } vars = _.extend({}, defaults, vars); if (context) { _.each(vars, function (value, key) { @@ -76,7 +95,7 @@ exports.createQuery = function createQuery (cfg) { return new Model(data); }); } - - return transform ? transform(result, trArgs) : result; + args.unshift(result); + return transform ? transform.apply(null, args) : result; }; }; From 382747add9cb4aad412b23bab4511ab1214f3ff5 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Wed, 3 Dec 2014 10:49:13 +0100 Subject: [PATCH 3/7] Added example with no query params. --- Documentation/Books/Users/Foxx/FoxxQueries.mdpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp index ab0f811a67..8f84f27421 100644 --- a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp +++ b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp @@ -133,3 +133,17 @@ var query = Foxx.createQuery({ query('user', true); // username of first user in uppercase query('user', false); // username of first user in lowercase ``` + +Using a transformation with extra arguments (and no query parameters): + +```js +var query = Foxx.createQuery({ + query: 'FOR u IN _users SORT u.user ASC RETURN u.user', + params: [], + transform: function (results, uppercase) { + return uppercase ? results[0].toUpperCase() : results[0].toLowerCase(); + } +}); +query(true); // username of first user in uppercase +query(false); // username of first user in lowercase +``` From 7a4076da8fecc77493242c31a42c464addf714ac Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Wed, 3 Dec 2014 10:52:47 +0100 Subject: [PATCH 4/7] Also accept `false` instead of an empty array. --- Documentation/Books/Users/Foxx/FoxxQueries.mdpp | 2 +- js/server/modules/org/arangodb/foxx/query.js | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp index 8f84f27421..f7e7e8a95a 100644 --- a/Documentation/Books/Users/Foxx/FoxxQueries.mdpp +++ b/Documentation/Books/Users/Foxx/FoxxQueries.mdpp @@ -139,7 +139,7 @@ Using a transformation with extra arguments (and no query parameters): ```js var query = Foxx.createQuery({ query: 'FOR u IN _users SORT u.user ASC RETURN u.user', - params: [], + params: false, // an empty array would work, too transform: function (results, uppercase) { return uppercase ? results[0].toUpperCase() : results[0].toLowerCase(); } diff --git a/js/server/modules/org/arangodb/foxx/query.js b/js/server/modules/org/arangodb/foxx/query.js index f2739e188a..7715004368 100644 --- a/js/server/modules/org/arangodb/foxx/query.js +++ b/js/server/modules/org/arangodb/foxx/query.js @@ -46,12 +46,14 @@ exports.createQuery = function createQuery (cfg) { defaults = cfg.defaults, transform = cfg.transform; - if (params && !Array.isArray(params)) { + if (params === false) { + params = []; + } else if (params && !Array.isArray(params)) { params = [params]; } if (params && !params.each(function (v) {return typeof v === 'string';})) { - throw new Error('Argument names must be a string or an array of strings.'); + throw new Error('Argument names must be a string, an array of strings or false.'); } if (!query || (typeof query !== 'string' && typeof query.toAQL !== 'function')) { From ac4597b802871160450117fc7b18aa6d15ab9b92 Mon Sep 17 00:00:00 2001 From: Thomas Schmidts Date: Wed, 3 Dec 2014 14:45:09 +0100 Subject: [PATCH 5/7] Fixed #1150. Added Queries to the Summary --- Documentation/Books/Users/Foxx/FoxxRepository.mdpp | 2 ++ Documentation/Books/Users/SUMMARY.md | 1 + 2 files changed, 3 insertions(+) diff --git a/Documentation/Books/Users/Foxx/FoxxRepository.mdpp b/Documentation/Books/Users/Foxx/FoxxRepository.mdpp index 0a5c2f73ca..acb9a39ff5 100644 --- a/Documentation/Books/Users/Foxx/FoxxRepository.mdpp +++ b/Documentation/Books/Users/Foxx/FoxxRepository.mdpp @@ -19,6 +19,8 @@ exports.repository = TodosRepository; You can define custom query methods using Foxx.createQuery and Foxx.Repository.extend. +For more details see the chapter on [Foxx Queries](../Foxx/FoxxQueries.md). + *Examples* Making a simple query in the repository and using it from the controller: diff --git a/Documentation/Books/Users/SUMMARY.md b/Documentation/Books/Users/SUMMARY.md index bc10d32421..fa612f9628 100644 --- a/Documentation/Books/Users/SUMMARY.md +++ b/Documentation/Books/Users/SUMMARY.md @@ -108,6 +108,7 @@ * [FoxxController](Foxx/FoxxController.md) * [FoxxModel](Foxx/FoxxModel.md) * [FoxxRepository](Foxx/FoxxRepository.md) + * [Foxx Queries](Foxx/FoxxQueries.md) * [Deploying Applications](Foxx/DeployingAnApplication.md) * [Developing Applications](Foxx/DevelopingAnApplication.md) * [Dependency Injection](Foxx/FoxxInjection.md) From 8182539b66266534eac21873587079dda84654a8 Mon Sep 17 00:00:00 2001 From: Max Neunhoeffer Date: Wed, 3 Dec 2014 16:46:18 +0100 Subject: [PATCH 6/7] Introduce invariants in processHeader. --- lib/SimpleHttpClient/SimpleHttpClient.cpp | 41 +++++++++++++++++++---- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/lib/SimpleHttpClient/SimpleHttpClient.cpp b/lib/SimpleHttpClient/SimpleHttpClient.cpp index e566f0ab35..1e989eea0e 100644 --- a/lib/SimpleHttpClient/SimpleHttpClient.cpp +++ b/lib/SimpleHttpClient/SimpleHttpClient.cpp @@ -194,8 +194,16 @@ namespace triagens { // we need to read a at least one byte to make progress bool progress; + std::cout << "ReadBufV:" << (unsigned long) _readBuffer.c_str() << " " + << _readBuffer.length() << " " + << _readBufferOffset << std::endl; + bool res = _connection->handleRead(remainingTime, _readBuffer, progress); + std::cout << "ReadBufN:" << (unsigned long) _readBuffer.c_str() << " " + << _readBuffer.length() << " " + << _readBufferOffset << std::endl; + // If there was an error, then we are doomed: if (! res) { std::cout << "doomed\n"; @@ -313,6 +321,9 @@ namespace triagens { _readBuffer.clear(); _readBufferOffset = 0; + std::cout << "ReadBufC:" << (unsigned long) _readBuffer.c_str() << " " + << _readBuffer.length() << " " + << _readBufferOffset << std::endl; if (_result) { _result->clear(); } @@ -494,11 +505,20 @@ namespace triagens { // ----------------------------------------------------------------------------- void SimpleHttpClient::processHeader () { + TRI_ASSERT(_readBufferOffset <= _readBuffer.length()); size_t remain = _readBuffer.length() - _readBufferOffset; char const* ptr = _readBuffer.c_str() + _readBufferOffset; char const* pos = (char*) memchr(ptr, '\n', remain); + // We enforce the following invariants: + // ptr = _readBuffer.c_str() + _readBufferOffset + // _readBuffer.length() >= _readBufferOffset + // remain = _readBuffer.length() - _readBufferOffset while (pos) { + TRI_ASSERT(_readBufferOffset <= _readBuffer.length()); + TRI_ASSERT(ptr == _readBuffer.c_str() + _readBufferOffset); + TRI_ASSERT(remain == _readBuffer.length() - _readBufferOffset); + if (pos > ptr && *(pos - 1) == '\r') { // adjust eol position --pos; @@ -506,12 +526,16 @@ namespace triagens { // end of header found if (*ptr == '\r' || *ptr == '\0') { - size_t len = pos - (_readBuffer.c_str() + _readBufferOffset); - _readBufferOffset += (len + 1); + size_t len = pos - ptr; + _readBufferOffset += len + 1; + ptr += len + 1; + remain -= len + 1; if (*pos == '\r') { // adjust offset if line ended with \r\n ++_readBufferOffset; + ptr++; + remain--; } // handle chunks @@ -536,6 +560,7 @@ namespace triagens { if (! _keepAlive) { _connection->disconnect(); } + return; } // found content-length header in response @@ -573,14 +598,18 @@ namespace triagens { } ptr += len + 1; - - TRI_ASSERT(remain >= (len + 1)); + _readBufferOffset += len + 1; remain -= (len + 1); - + + TRI_ASSERT(_readBufferOffset <= _readBuffer.length()); + TRI_ASSERT(ptr == _readBuffer.c_str() + _readBufferOffset); + TRI_ASSERT(remain == _readBuffer.length() - _readBufferOffset); pos = (char*) memchr(ptr, '\n', remain); if (pos == nullptr) { - _readBufferOffset = ptr - _readBuffer.c_str() + 1; + _readBufferOffset++; + ptr++; + remain--; } } } From 8d4de5b22fb8fc5ca79b16d3f02fe06a068de657 Mon Sep 17 00:00:00 2001 From: Tomas Bosak Date: Wed, 3 Dec 2014 17:20:34 +0100 Subject: [PATCH 7/7] Improve collection API docs - add keyOptions attribute description which is also part of the result - add empty line before return codes header --- js/actions/api-collection.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/js/actions/api-collection.js b/js/actions/api-collection.js index 24b456fadb..742fc2e279 100644 --- a/js/actions/api-collection.js +++ b/js/actions/api-collection.js @@ -478,6 +478,15 @@ function get_api_collections (req, res) { /// - *journalSize*: The maximal size setting for journals / datafiles /// in bytes. /// +/// - *keyOptions*: JSON object which contains key generation options: +/// - *type*: specifies the type of the key generator. The currently +/// available generators are *traditional* and *autoincrement*. +/// - *allowUserKeys*: if set to *true*, then it is allowed to supply +/// own key values in the *_key* attribute of a document. If set to +/// *false*, then the key generator is solely responsible for +/// generating keys and supplying own key values in the *_key* attribute +/// of documents is considered an error. +/// /// - *isVolatile*: If *true* then the collection data will be /// kept in memory only and ArangoDB will not write or sync the data /// to disk. @@ -487,6 +496,7 @@ function get_api_collections (req, res) { /// /// - *shardKeys*: contains the names of document attributes that are used to /// determine the target shard for documents. +/// /// @RESTRETURNCODES /// /// @RESTRETURNCODE{400}