1
0
Fork 0

Merge branch 'devel' of https://github.com/triAGENS/ArangoDB into devel

This commit is contained in:
Jan Steemann 2014-12-03 17:46:03 +01:00
commit e4b51f616c
22 changed files with 253 additions and 137 deletions

View File

@ -27,12 +27,13 @@ console.log('usernames:', usernames);
Creates a query function that performs the given query and returns the result.
The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Note that collection bind parameters need to be prefixed with an at-sign, e.g. `{'@myCollectionVar': 'my_collection_name'}`.
The returned query function optionally takes an object as its argument. If an object is provided, its properties will be used as the query's bind parameters. Any additional arguments will be passed to the transform function (or dropped if no transform function is defined).
*Parameter*
* *cfg*: an object with the following properties:
* *query*: an AQL query string or an ArangoDB Query Builder query object.
* *params* (optional): an array of parameter names.
* *context* (optional): an *applicationContext*.
* *model* (optional): a *Foxx.Model* that will be applied to the query results.
* *defaults* (optional): default values for the query's bind parameters. These can be overridden by passing a value for the same name to the query function.
@ -42,7 +43,9 @@ If *cfg* is a string, it will be used as the value of *cfg.query* instead.
If a *context* is specified, the values of all collection bind parameters will be passed through the context's *collectionName* method.
Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar`.
Note that collection bind parameters in AQL need to be referenced with two at-signs instead of one, e.g. `@@myCollectionVar` and their parameter name needs to be prefixed with an at-sign as well, e.g. `{'@myCollectionVar': 'collection_name'}`.
If *params* is provided, the query function will accept positional arguments instead of an object. If *params* is a string, it will be treated as an array containing that string.
If both *model* and *transform* are provided, the *transform* function will be applied to the result array _after_ the results have been converted into model instances. The *transform* function is always passed the entire result array and its return value will be returned by the query function.
@ -62,6 +65,16 @@ var query = Foxx.createQuery('FOR u IN _users RETURN u[@propName]');
var usernames = query({propName: 'user'});
```
Using named bind parameters:
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users RETURN u[@propName]',
params: ['propName']
);
var usernames = query('user');
```
Using models:
```js
@ -93,3 +106,44 @@ var query = Foxx.createQuery({
});
var user = query(); // first user by username
```
Using a transformation with extra arguments:
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]',
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query({propName: 'user'}, true); // username of first user in uppercase
query({propName: 'user'}, false); // username of first user in lowercase
```
Using a transformation with extra arguments (using positional arguments):
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u[@propName]',
params: ['propName'],
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query('user', true); // username of first user in uppercase
query('user', false); // username of first user in lowercase
```
Using a transformation with extra arguments (and no query parameters):
```js
var query = Foxx.createQuery({
query: 'FOR u IN _users SORT u.user ASC RETURN u.user',
params: false, // an empty array would work, too
transform: function (results, uppercase) {
return uppercase ? results[0].toUpperCase() : results[0].toLowerCase();
}
});
query(true); // username of first user in uppercase
query(false); // username of first user in lowercase
```

View File

@ -19,11 +19,13 @@ exports.repository = TodosRepository;
You can define custom query methods using Foxx.createQuery and Foxx.Repository.extend.
For more details see the chapter on [Foxx Queries](../Foxx/FoxxQueries.md).
*Examples*
Making a simple query in the repository and using it from the controller:
```javascript
```js
// in the repository
var Foxx = require("org/arangodb/foxx");
@ -41,29 +43,31 @@ ctrl.get("/", function(req, res) {
It is also possible to supply parameters to the query:
```javascript
```js
// in the repository
getPendingItemById: Foxx.createQuery(
'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo'
)
getPendingItemById: Foxx.createQuery({
query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo',
params: ['id']
})
// in the controller
ctrl.get("/:id", function(req, res) {
var id = req.params("id");
var rv = todosRepository.getPendingItemById({ id: id });
var rv = todosRepository.getPendingItemById(id);
res.json(rv);
});
```
The list of results can also be transformed before returning it from the repository:
```javascript
```js
// in the repository
getPendingItemById: Foxx.createQuery({
query: 'FOR todo IN my_todos FILTER todo.completed == false FILTER todo._key == @id RETURN todo',
transform: function(results, args) {
params: ['id'],
transform: function(results, extra) {
for (var i = 0; i < results.length; i++) {
results[i].extraProperty = args.extra;
results[i].extraProperty = extra;
}
}
})
@ -72,10 +76,7 @@ getPendingItemById: Foxx.createQuery({
ctrl.get("/:id", function(req, res) {
var id = req.params("id");
var extra = req.params("extra");
var rv = todosRepository.getPendingItemById(
{ id: id },
{ extra: extra }
);
var rv = todosRepository.getPendingItemById(id, extra);
res.json(rv);
});
```

View File

@ -108,6 +108,7 @@
* [FoxxController](Foxx/FoxxController.md)
* [FoxxModel](Foxx/FoxxModel.md)
* [FoxxRepository](Foxx/FoxxRepository.md)
* [Foxx Queries](Foxx/FoxxQueries.md)
* [Deploying Applications](Foxx/DeployingAnApplication.md)
* [Developing Applications](Foxx/DevelopingAnApplication.md)
* [Dependency Injection](Foxx/FoxxInjection.md)

View File

@ -3445,7 +3445,7 @@ void UpdateBlock::work (std::vector<AqlItemBlock*>& blocks) {
TRI_json_t* old = TRI_JsonShapedJson(_collection->documentCollection()->getShaper(), &shapedJson);
if (old != nullptr) {
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json.json(), ep->_options.nullMeansRemove, ep->_options.mergeObjects);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, old);
if (patchedJson != nullptr) {

View File

@ -251,8 +251,8 @@ ModificationOptions ExecutionPlan::createOptions (AstNode const* node) {
// nullMeansRemove is the opposite of keepNull
options.nullMeansRemove = value->isFalse();
}
else if (strcmp(name, "mergeArrays") == 0) {
options.mergeArrays = value->isTrue();
else if (strcmp(name, "mergeObjects") == 0) {
options.mergeObjects = value->isTrue();
}
}
}

View File

@ -36,7 +36,7 @@ ModificationOptions::ModificationOptions (Json const& json) {
ignoreErrors = JsonHelper::getBooleanValue(array.json(), "ignoreErrors", false);
waitForSync = JsonHelper::getBooleanValue(array.json(), "waitForSync", false);
nullMeansRemove = JsonHelper::getBooleanValue(array.json(), "nullMeansRemove", false);
mergeArrays = JsonHelper::getBooleanValue(array.json(), "mergeArrays", false);
mergeObjects = JsonHelper::getBooleanValue(array.json(), "mergeObjects", false);
}
void ModificationOptions::toJson (triagens::basics::Json& json,
@ -46,7 +46,7 @@ void ModificationOptions::toJson (triagens::basics::Json& json,
("ignoreErrors", Json(ignoreErrors))
("waitForSync", Json(waitForSync))
("nullMeansRemove", Json(nullMeansRemove))
("mergeArrays", Json(mergeArrays));
("mergeObjects", Json(mergeObjects));
json ("modificationFlags", flags);
}

View File

@ -54,7 +54,7 @@ namespace triagens {
: ignoreErrors(false),
waitForSync(false),
nullMeansRemove(false),
mergeArrays(false) {
mergeObjects(false) {
}
void toJson (triagens::basics::Json& json, TRI_memory_zone_t* zone) const;
@ -66,7 +66,7 @@ namespace triagens {
bool ignoreErrors;
bool waitForSync;
bool nullMeansRemove;
bool mergeArrays;
bool mergeObjects;
};

View File

@ -1051,7 +1051,7 @@ int modifyDocumentOnCoordinator (
bool waitForSync,
bool isPatch,
bool keepNull, // only counts for isPatch == true
bool mergeArrays, // only counts for isPatch == true
bool mergeObjects, // only counts for isPatch == true
TRI_json_t* json,
map<string, string> const& headers,
triagens::rest::HttpResponse::HttpResponseCode& responseCode,
@ -1116,11 +1116,11 @@ int modifyDocumentOnCoordinator (
if (! keepNull) {
revstr += "&keepNull=false";
}
if (mergeArrays) {
revstr += "&mergeArrays=true";
if (mergeObjects) {
revstr += "&mergeObjects=true";
}
else {
revstr += "&mergeArrays=false";
revstr += "&mergeObjects=false";
}
}
else {

View File

@ -177,7 +177,7 @@ namespace triagens {
bool waitForSync,
bool isPatch,
bool keepNull, // only counts for isPatch == true
bool mergeArrays, // only counts for isPatch == true
bool mergeObjects, // only counts for isPatch == true
TRI_json_t* json,
std::map<std::string, std::string> const& headers,
triagens::rest::HttpResponse::HttpResponseCode& responseCode,

View File

@ -1202,11 +1202,11 @@ bool RestDocumentHandler::replaceDocument () {
/// from the existing document that are contained in the patch document with an
/// attribute value of *null*.
///
/// @RESTQUERYPARAM{mergeArrays,boolean,optional}
/// Controls whether arrays (not lists) will be merged if present in both the
/// @RESTQUERYPARAM{mergeObjects,boolean,optional}
/// Controls whether objects (not arrays) will be merged if present in both the
/// existing and the patch document. If set to *false*, the value in the
/// patch document will overwrite the existing document's value. If set to
/// *true*, arrays will be merged. The default is *true*.
/// *true*, objects will be merged. The default is *true*.
///
/// @RESTQUERYPARAM{waitForSync,boolean,optional}
/// Wait until document has been synced to disk.
@ -1416,7 +1416,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
if (isPatch) {
// patching an existing document
bool nullMeansRemove;
bool mergeArrays;
bool mergeObjects;
bool found;
char const* valueStr = _request->value("keepNull", found);
if (! found || StringUtils::boolean(valueStr)) {
@ -1428,13 +1428,13 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
nullMeansRemove = true;
}
valueStr = _request->value("mergeArrays", found);
valueStr = _request->value("mergeObjects", found);
if (! found || StringUtils::boolean(valueStr)) {
// the default is true
mergeArrays = true;
mergeObjects = true;
}
else {
mergeArrays = false;
mergeObjects = false;
}
// read the existing document
@ -1487,7 +1487,7 @@ bool RestDocumentHandler::modifyDocument (bool isPatch) {
}
}
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, nullMeansRemove, mergeObjects);
TRI_FreeJson(shaper->_memoryZone, old);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);
@ -1593,14 +1593,14 @@ bool RestDocumentHandler::modifyDocumentCoordinator (
if (! strcmp(_request->value("keepNull"), "false")) {
keepNull = false;
}
bool mergeArrays = true;
if (TRI_EqualString(_request->value("mergeArrays"), "false")) {
mergeArrays = false;
bool mergeObjects = true;
if (TRI_EqualString(_request->value("mergeObjects"), "false")) {
mergeObjects = false;
}
int error = triagens::arango::modifyDocumentOnCoordinator(
dbname, collname, key, rev, policy, waitForSync, isPatch,
keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody);
keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody);
if (error != TRI_ERROR_NO_ERROR) {
generateTransactionError(collname, error);

View File

@ -91,7 +91,7 @@ struct InsertOptions {
struct UpdateOptions {
bool overwrite = false;
bool keepNull = true;
bool mergeArrays = true;
bool mergeObjects = true;
bool waitForSync = false;
bool silent = false;
};
@ -702,7 +702,7 @@ static v8::Handle<v8::Value> ModifyVocbaseColCoordinator (
bool waitForSync,
bool isPatch,
bool keepNull, // only counts if isPatch==true
bool mergeArrays, // only counts if isPatch==true
bool mergeObjects, // only counts if isPatch==true
bool silent,
v8::Arguments const& argv) {
v8::HandleScope scope;
@ -736,7 +736,7 @@ static v8::Handle<v8::Value> ModifyVocbaseColCoordinator (
error = triagens::arango::modifyDocumentOnCoordinator(
dbname, collname, key, rev, policy, waitForSync, isPatch,
keepNull, mergeArrays, json, headers, responseCode, resultHeaders, resultBody);
keepNull, mergeObjects, json, headers, responseCode, resultHeaders, resultBody);
// Note that the json has been freed inside!
if (error != TRI_ERROR_NO_ERROR) {
@ -877,7 +877,7 @@ static v8::Handle<v8::Value> ReplaceVocbaseCol (bool useCollection,
options.waitForSync,
false, // isPatch
true, // keepNull, does not matter
false, // mergeArrays, does not matter
false, // mergeObjects, does not matter
options.silent,
argv));
}
@ -1084,7 +1084,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
TRI_v8_global_t* v8g = static_cast<TRI_v8_global_t*>(v8::Isolate::GetCurrent()->GetData());
if (argLength < 2 || argLength > 5) {
TRI_V8_EXCEPTION_USAGE(scope, "update(<document>, <data>, {overwrite: booleanValue, keepNull: booleanValue, mergeArrays: booleanValue, waitForSync: booleanValue})");
TRI_V8_EXCEPTION_USAGE(scope, "update(<document>, <data>, {overwrite: booleanValue, keepNull: booleanValue, mergeObjects: booleanValue, waitForSync: booleanValue})");
}
if (argLength > 2) {
@ -1097,8 +1097,8 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
if (optionsObject->Has(v8g->KeepNullKey)) {
options.keepNull = TRI_ObjectToBoolean(optionsObject->Get(v8g->KeepNullKey));
}
if (optionsObject->Has(v8g->MergeArraysKey)) {
options.mergeArrays = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeArraysKey));
if (optionsObject->Has(v8g->MergeObjectsKey)) {
options.mergeObjects = TRI_ObjectToBoolean(optionsObject->Get(v8g->MergeObjectsKey));
}
if (optionsObject->Has(v8g->WaitForSyncKey)) {
options.waitForSync = TRI_ObjectToBoolean(optionsObject->Get(v8g->WaitForSyncKey));
@ -1166,7 +1166,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
options.waitForSync,
true, // isPatch
options.keepNull,
options.mergeArrays,
options.mergeObjects,
options.silent,
argv));
}
@ -1233,7 +1233,7 @@ static v8::Handle<v8::Value> UpdateVocbaseCol (bool useCollection,
}
}
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeArrays);
TRI_json_t* patchedJson = TRI_MergeJson(TRI_UNKNOWN_MEM_ZONE, old, json, ! options.keepNull, options.mergeObjects);
TRI_FreeJson(zone, old);
TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, json);

View File

@ -478,6 +478,15 @@ function get_api_collections (req, res) {
/// - *journalSize*: The maximal size setting for journals / datafiles
/// in bytes.
///
/// - *keyOptions*: JSON object which contains key generation options:
/// - *type*: specifies the type of the key generator. The currently
/// available generators are *traditional* and *autoincrement*.
/// - *allowUserKeys*: if set to *true*, then it is allowed to supply
/// own key values in the *_key* attribute of a document. If set to
/// *false*, then the key generator is solely responsible for
/// generating keys and supplying own key values in the *_key* attribute
/// of documents is considered an error.
///
/// - *isVolatile*: If *true* then the collection data will be
/// kept in memory only and ArangoDB will not write or sync the data
/// to disk.
@ -487,6 +496,7 @@ function get_api_collections (req, res) {
///
/// - *shardKeys*: contains the names of document attributes that are used to
/// determine the target shard for documents.
///
/// @RESTRETURNCODES
///
/// @RESTRETURNCODE{400}

View File

@ -2896,7 +2896,7 @@ exports.clone = function (obj, seen) {
// Merge all the properties of source into target, source wins in conflict, and by default null and undefined from source are applied
exports.merge = function (target, source, isNullOverride /* = true */, isMergeArrays /* = true */) {
exports.merge = function (target, source, isNullOverride /* = true */, isMergeObjects /* = true */) {
exports.assert(target && typeof target === 'object', 'Invalid target value: must be an object');
exports.assert(source === null || source === undefined || typeof source === 'object', 'Invalid source value: must be null, undefined, or an object');
@ -2907,7 +2907,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr
if (Array.isArray(source)) {
exports.assert(Array.isArray(target), 'Cannot merge array onto an object');
if (isMergeArrays === false) { // isMergeArrays defaults to true
if (isMergeObjects === false) { // isMergeObjects defaults to true
target.length = 0; // Must not change target assignment
}
@ -2935,7 +2935,7 @@ exports.merge = function (target, source, isNullOverride /* = true */, isMergeAr
target[key] = exports.clone(value);
}
else {
exports.merge(target[key], value, isNullOverride, isMergeArrays);
exports.merge(target[key], value, isNullOverride, isMergeObjects);
}
}
else {
@ -4692,14 +4692,14 @@ var Hoek = require('hoek');
var internals = {};
exports = module.exports = internals.Topo = function () {
exports = module.exports = internals.Topo = function () {
this._items = [];
this.nodes = [];
this.nodes = [];
};
internals.Topo.prototype.add = function (nodes, options) {
internals.Topo.prototype.add = function (nodes, options) {
var self = this;
@ -4716,17 +4716,17 @@ internals.Topo.prototype.add = function (nodes, options) {
Hoek.assert(after.indexOf(group) === -1, 'Item cannot come after itself:', group);
Hoek.assert(after.indexOf('?') === -1, 'Item cannot come after unassociated items');
([].concat(nodes)).forEach(function (node, i) {
([].concat(nodes)).forEach(function (node, i) {
var item = {
var item = {
seq: self._items.length,
before: before,
after: after,
group: group,
node: node
node: node
};
self._items.push(item);
self._items.push(item);
});
// Insert event
@ -4734,7 +4734,7 @@ internals.Topo.prototype.add = function (nodes, options) {
var error = this._sort();
Hoek.assert(!error, 'item', (group !== '?' ? 'added into group ' + group : ''), 'created a dependencies error');
return this.nodes;
return this.nodes;
};
@ -4746,7 +4746,7 @@ internals.Topo.prototype._sort = function () {
var graph = {};
var graphAfters = {};
for (var i = 0, il = this._items.length; i < il; ++i) {
for (var i = 0, il = this._items.length; i < il; ++i) {
var item = this._items[i];
var seq = item.seq; // Unique across all items
var group = item.group;
@ -4763,55 +4763,55 @@ internals.Topo.prototype._sort = function () {
// Build second intermediary graph with 'after'
var after = item.after;
for (var j = 0, jl = after.length; j < jl; ++j) {
graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq);
}
for (var j = 0, jl = after.length; j < jl; ++j) {
graphAfters[after[j]] = (graphAfters[after[j]] || []).concat(seq);
}
}
// Expand intermediary graph
var graphNodes = Object.keys(graph);
for (i = 0, il = graphNodes.length; i < il; ++i) {
for (i = 0, il = graphNodes.length; i < il; ++i) {
var node = graphNodes[i];
var expandedGroups = [];
var graphNodeItems = Object.keys(graph[node]);
for (j = 0, jl = graphNodeItems.length; j < jl; ++j) {
for (j = 0, jl = graphNodeItems.length; j < jl; ++j) {
var group = graph[node][graphNodeItems[j]];
groups[group] = groups[group] || [];
groups[group].forEach(function (d) {
groups[group].forEach(function (d) {
expandedGroups.push(d);
});
expandedGroups.push(d);
});
}
graph[node] = expandedGroups;
graph[node] = expandedGroups;
}
// Merge intermediary graph using graphAfters into final graph
var afterNodes = Object.keys(graphAfters);
for (i = 0, il = afterNodes.length; i < il; ++i) {
for (i = 0, il = afterNodes.length; i < il; ++i) {
var group = afterNodes[i];
if (groups[group]) {
for (j = 0, jl = groups[group].length; j < jl; ++j) {
if (groups[group]) {
for (j = 0, jl = groups[group].length; j < jl; ++j) {
var node = groups[group][j];
graph[node] = graph[node].concat(graphAfters[group]);
}
}
graph[node] = graph[node].concat(graphAfters[group]);
}
}
}
// Compile ancestors
var ancestors = {};
graphNodes = Object.keys(graph);
for (i = 0, il = graphNodes.length; i < il; ++i) {
for (i = 0, il = graphNodes.length; i < il; ++i) {
var node = graphNodes[i];
var children = graph[node];
for (j = 0, jl = children.length; j < jl; ++j) {
ancestors[children[j]] = (ancestors[children[j]] || []).concat(node);
}
for (j = 0, jl = children.length; j < jl; ++j) {
ancestors[children[j]] = (ancestors[children[j]] || []).concat(node);
}
}
// Topo sort
@ -4819,61 +4819,61 @@ internals.Topo.prototype._sort = function () {
var visited = {};
var sorted = [];
for (i = 0, il = this._items.length; i < il; ++i) {
for (i = 0, il = this._items.length; i < il; ++i) {
var next = i;
if (ancestors[i]) {
if (ancestors[i]) {
next = null;
for (j = 0, jl = this._items.length; j < jl; ++j) {
if (visited[j] === true) {
continue;
for (j = 0, jl = this._items.length; j < jl; ++j) {
if (visited[j] === true) {
continue;
}
if (!ancestors[j]) {
ancestors[j] = [];
if (!ancestors[j]) {
ancestors[j] = [];
}
var shouldSeeCount = ancestors[j].length;
var seenCount = 0;
for (var l = 0, ll = shouldSeeCount; l < ll; ++l) {
if (sorted.indexOf(ancestors[j][l]) >= 0) {
++seenCount;
}
for (var l = 0, ll = shouldSeeCount; l < ll; ++l) {
if (sorted.indexOf(ancestors[j][l]) >= 0) {
++seenCount;
}
}
if (seenCount === shouldSeeCount) {
if (seenCount === shouldSeeCount) {
next = j;
break;
}
}
break;
}
}
}
if (next !== null) {
if (next !== null) {
next = next.toString(); // Normalize to string TODO: replace with seq
visited[next] = true;
sorted.push(next);
}
sorted.push(next);
}
}
if (sorted.length !== this._items.length) {
return new Error('Invalid dependencies');
if (sorted.length !== this._items.length) {
return new Error('Invalid dependencies');
}
var seqIndex = {};
this._items.forEach(function (item) {
this._items.forEach(function (item) {
seqIndex[item.seq] = item;
seqIndex[item.seq] = item;
});
var sortedNodes = [];
this._items = sorted.map(function (value) {
this._items = sorted.map(function (value) {
var item = seqIndex[value];
sortedNodes.push(item.node);
return item;
return item;
});
this.nodes = sortedNodes;
this.nodes = sortedNodes;
};
},{"hoek":16}],23:[function(require,module,exports){

View File

@ -1174,7 +1174,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync)
/// @param id the id of the document
/// @param overwrite (optional) a boolean value or a json object
/// @param keepNull (optional) determines if null values should saved or not
/// @param mergeArrays (optional) whether or not array values should be merged
/// @param mergeObjects (optional) whether or not object values should be merged
/// @param waitForSync (optional) a boolean value .
/// @example update("example/996280832675", { a : 1, c : 2} )
/// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true)
@ -1213,10 +1213,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -758,10 +758,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait
options.keepNull = true;
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -1173,7 +1173,7 @@ ArangoCollection.prototype.replace = function (id, data, overwrite, waitForSync)
/// @param id the id of the document
/// @param overwrite (optional) a boolean value or a json object
/// @param keepNull (optional) determines if null values should saved or not
/// @param mergeArrays (optional) whether or not array values should be merged
/// @param mergeObjects (optional) whether or not object values should be merged
/// @param waitForSync (optional) a boolean value .
/// @example update("example/996280832675", { a : 1, c : 2} )
/// @example update("example/996280832675", { a : 1, c : 2, x: null}, true, true, true)
@ -1212,10 +1212,10 @@ ArangoCollection.prototype.update = function (id, data, overwrite, keepNull, wai
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -757,10 +757,10 @@ ArangoDatabase.prototype._update = function (id, data, overwrite, keepNull, wait
options.keepNull = true;
}
params = "?keepNull=" + options.keepNull;
if (! options.hasOwnProperty("mergeArrays")) {
options.mergeArrays = true;
if (! options.hasOwnProperty("mergeObjects")) {
options.mergeObjects = true;
}
params += "&mergeArrays=" + options.mergeArrays;
params += "&mergeObjects=" + options.mergeObjects;
if (options.hasOwnProperty("overwrite") && options.overwrite) {
params += "&policy=last";

View File

@ -40,11 +40,22 @@ exports.createQuery = function createQuery (cfg) {
}
var query = cfg.query,
params = cfg.params,
context = cfg.context,
Model = cfg.model,
defaults = cfg.defaults,
transform = cfg.transform;
if (params === false) {
params = [];
} else if (params && !Array.isArray(params)) {
params = [params];
}
if (params && !params.each(function (v) {return typeof v === 'string';})) {
throw new Error('Argument names must be a string, an array of strings or false.');
}
if (!query || (typeof query !== 'string' && typeof query.toAQL !== 'function')) {
throw new Error('Expected query to be a string or a QueryBuilder instance.');
}
@ -61,7 +72,17 @@ exports.createQuery = function createQuery (cfg) {
throw new Error('Expected transform to be a function.');
}
return function query(vars, trArgs) {
return function query() {
var args = Array.prototype.slice.call(arguments);
var vars;
if (params) {
vars = {};
params.forEach(function (name) {
vars[name] = args.shift();
});
} else {
vars = args.shift();
}
vars = _.extend({}, defaults, vars);
if (context) {
_.each(vars, function (value, key) {
@ -76,7 +97,7 @@ exports.createQuery = function createQuery (cfg) {
return new Model(data);
});
}
return transform ? transform(result, trArgs) : result;
args.unshift(result);
return transform ? transform.apply(null, args) : result;
};
};

View File

@ -41,7 +41,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
TRI_json_t const* lhs,
TRI_json_t const* rhs,
bool nullMeansRemove,
bool mergeArrays) {
bool mergeObjects) {
TRI_json_t* result = TRI_CopyJson(zone, lhs);
if (result == nullptr) {
@ -66,7 +66,7 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
// existing array does not have the attribute => append new attribute
if (value->_type == TRI_JSON_ARRAY) {
TRI_json_t* empty = TRI_CreateArrayJson(zone);
TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeArrays);
TRI_json_t* merged = MergeRecursive(zone, empty, value, nullMeansRemove, mergeObjects);
TRI_Insert3ArrayJson(zone, result, key->_value._string.data, merged);
TRI_FreeJson(zone, empty);
@ -77,8 +77,8 @@ static TRI_json_t* MergeRecursive (TRI_memory_zone_t* zone,
}
else {
// existing array already has the attribute => replace attribute
if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeArrays) {
TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeArrays);
if (lhsValue->_type == TRI_JSON_ARRAY && value->_type == TRI_JSON_ARRAY && mergeObjects) {
TRI_json_t* merged = MergeRecursive(zone, lhsValue, value, nullMeansRemove, mergeObjects);
TRI_ReplaceArrayJson(zone, result, key->_value._string.data, merged);
TRI_FreeJson(zone, merged);
}
@ -734,13 +734,13 @@ TRI_json_t* TRI_MergeJson (TRI_memory_zone_t* zone,
TRI_json_t const* lhs,
TRI_json_t const* rhs,
bool nullMeansRemove,
bool mergeArrays) {
bool mergeObjects) {
TRI_json_t* result;
TRI_ASSERT(lhs->_type == TRI_JSON_ARRAY);
TRI_ASSERT(rhs->_type == TRI_JSON_ARRAY);
result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeArrays);
result = MergeRecursive(zone, lhs, rhs, nullMeansRemove, mergeObjects);
return result;
}

View File

@ -194,8 +194,16 @@ namespace triagens {
// we need to read a at least one byte to make progress
bool progress;
std::cout << "ReadBufV:" << (unsigned long) _readBuffer.c_str() << " "
<< _readBuffer.length() << " "
<< _readBufferOffset << std::endl;
bool res = _connection->handleRead(remainingTime, _readBuffer, progress);
std::cout << "ReadBufN:" << (unsigned long) _readBuffer.c_str() << " "
<< _readBuffer.length() << " "
<< _readBufferOffset << std::endl;
// If there was an error, then we are doomed:
if (! res) {
std::cout << "doomed\n";
@ -313,6 +321,9 @@ namespace triagens {
_readBuffer.clear();
_readBufferOffset = 0;
std::cout << "ReadBufC:" << (unsigned long) _readBuffer.c_str() << " "
<< _readBuffer.length() << " "
<< _readBufferOffset << std::endl;
if (_result) {
_result->clear();
}
@ -494,11 +505,20 @@ namespace triagens {
// -----------------------------------------------------------------------------
void SimpleHttpClient::processHeader () {
TRI_ASSERT(_readBufferOffset <= _readBuffer.length());
size_t remain = _readBuffer.length() - _readBufferOffset;
char const* ptr = _readBuffer.c_str() + _readBufferOffset;
char const* pos = (char*) memchr(ptr, '\n', remain);
// We enforce the following invariants:
// ptr = _readBuffer.c_str() + _readBufferOffset
// _readBuffer.length() >= _readBufferOffset
// remain = _readBuffer.length() - _readBufferOffset
while (pos) {
TRI_ASSERT(_readBufferOffset <= _readBuffer.length());
TRI_ASSERT(ptr == _readBuffer.c_str() + _readBufferOffset);
TRI_ASSERT(remain == _readBuffer.length() - _readBufferOffset);
if (pos > ptr && *(pos - 1) == '\r') {
// adjust eol position
--pos;
@ -506,12 +526,16 @@ namespace triagens {
// end of header found
if (*ptr == '\r' || *ptr == '\0') {
size_t len = pos - (_readBuffer.c_str() + _readBufferOffset);
_readBufferOffset += (len + 1);
size_t len = pos - ptr;
_readBufferOffset += len + 1;
ptr += len + 1;
remain -= len + 1;
if (*pos == '\r') {
// adjust offset if line ended with \r\n
++_readBufferOffset;
ptr++;
remain--;
}
// handle chunks
@ -536,6 +560,7 @@ namespace triagens {
if (! _keepAlive) {
_connection->disconnect();
}
return;
}
// found content-length header in response
@ -573,14 +598,18 @@ namespace triagens {
}
ptr += len + 1;
TRI_ASSERT(remain >= (len + 1));
_readBufferOffset += len + 1;
remain -= (len + 1);
TRI_ASSERT(_readBufferOffset <= _readBuffer.length());
TRI_ASSERT(ptr == _readBuffer.c_str() + _readBufferOffset);
TRI_ASSERT(remain == _readBuffer.length() - _readBufferOffset);
pos = (char*) memchr(ptr, '\n', remain);
if (pos == nullptr) {
_readBufferOffset = ptr - _readBuffer.c_str() + 1;
_readBufferOffset++;
ptr++;
remain--;
}
}
}

View File

@ -92,7 +92,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate)
KeyOptionsKey(),
LengthKey(),
LifeTimeKey(),
MergeArraysKey(),
MergeObjectsKey(),
NameKey(),
OperationIDKey(),
ParametersKey(),
@ -177,7 +177,7 @@ TRI_v8_global_s::TRI_v8_global_s (v8::Isolate* isolate)
KeyOptionsKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("keyOptions"));
LengthKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("length"));
LifeTimeKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("lifeTime"));
MergeArraysKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("mergeArrays"));
MergeObjectsKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("mergeObjects"));
NameKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("name"));
OperationIDKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("operationID"));
OverwriteKey = v8::Persistent<v8::String>::New(isolate, TRI_V8_SYMBOL("overwrite"));

View File

@ -556,10 +556,10 @@ typedef struct TRI_v8_global_s {
v8::Persistent<v8::String> LifeTimeKey;
////////////////////////////////////////////////////////////////////////////////
/// @brief "mergeArrays" key name
/// @brief "mergeObjects" key name
////////////////////////////////////////////////////////////////////////////////
v8::Persistent<v8::String> MergeArraysKey;
v8::Persistent<v8::String> MergeObjectsKey;
////////////////////////////////////////////////////////////////////////////////
/// @brief "name" key