From fcf3c392c976b92e15c109c6d90f6b139638bbe8 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Tue, 8 Dec 2015 18:15:18 +0100 Subject: [PATCH 01/22] Add geo locations to the city navigator. --- .../org/arangodb/graph-examples/example-graph.js | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/js/common/modules/org/arangodb/graph-examples/example-graph.js b/js/common/modules/org/arangodb/graph-examples/example-graph.js index e4ee20f1fb..f179c905b6 100644 --- a/js/common/modules/org/arangodb/graph-examples/example-graph.js +++ b/js/common/modules/org/arangodb/graph-examples/example-graph.js @@ -81,11 +81,13 @@ var createRoutePlannerGraph = function() { ); var g = Graph._create("routeplanner", edgeDefinition); - var berlin = g.germanCity.save({_key: "Berlin", population : 3000000, isCapital : true}); - var cologne = g.germanCity.save({_key: "Cologne", population : 1000000, isCapital : false}); - var hamburg = g.germanCity.save({_key: "Hamburg", population : 1000000, isCapital : false}); - var lyon = g.frenchCity.save({_key: "Lyon", population : 80000, isCapital : false}); - var paris = g.frenchCity.save({_key: "Paris", population : 4000000, isCapital : true}); + var berlin = g.germanCity.save({_key: "Berlin", population : 3000000, isCapital : true, loc: [52.5167, 13.3833]}); + var cologne = g.germanCity.save({_key: "Cologne", population : 1000000, isCapital : false, loc: [50.9364, 6.9528]}); + var hamburg = g.germanCity.save({_key: "Hamburg", population : 1000000, isCapital : false, loc: [53.5653, 10.0014]}); + var lyon = g.frenchCity.save({_key: "Lyon", population : 80000, isCapital : false, loc: [45.7600, 4.8400]}); + var paris = g.frenchCity.save({_key: "Paris", population : 4000000, isCapital : true, loc: [48.8567, 2.3508]}); + g.germanCity.ensureGeoIndex("loc"); + g.frenchCity.ensureGeoIndex("loc"); g.germanHighway.save(berlin._id, cologne._id, {distance: 850}); g.germanHighway.save(berlin._id, hamburg._id, {distance: 400}); g.germanHighway.save(hamburg._id, cologne._id, {distance: 500}); From fb38aac8763d4825924c95e4a9d0b207acfb75d1 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 8 Dec 2015 18:20:37 +0100 Subject: [PATCH 02/22] updated CHANGELOG --- CHANGELOG | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index 9137123b5e..68232054ed 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,6 +1,12 @@ v2.8.0 (XXXX-XX-XX) ------------------- +* better error reporting for arangodump and arangorestore + +* arangodump will now fail by default when trying to dump edges that + refer to already dropped collections. This can be circumvented by + specifying the option `--force true` when invoking arangodump + * fixed cluster upgrade procedure From 6b4fbdc5a7c93c9dba98675c89aac0312f89f3fc Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 8 Dec 2015 18:41:53 +0100 Subject: [PATCH 03/22] Make the Foxx console behave as documented --- .../Books/Users/Foxx/Develop/Console.mdpp | 36 +++++++--- js/common/bootstrap/modules/console.js | 1 + .../modules/org/arangodb/foxx/console.js | 67 +++++++++++++++++-- .../modules/org/arangodb/foxx/service.js | 1 + 4 files changed, 90 insertions(+), 15 deletions(-) diff --git a/Documentation/Books/Users/Foxx/Develop/Console.mdpp b/Documentation/Books/Users/Foxx/Develop/Console.mdpp index c6666cdb1a..cc4b463023 100644 --- a/Documentation/Books/Users/Foxx/Develop/Console.mdpp +++ b/Documentation/Books/Users/Foxx/Develop/Console.mdpp @@ -1,9 +1,13 @@ !CHAPTER Foxx console -Foxx injects a **console** object into each Foxx app that allows writing log entries to the database and querying them from within the app itself. +Foxx injects a **console** object into each Foxx app that allows writing log entries to the database in addition to the ArangoDB log file and querying them from within the app itself. The **console** object supports the CommonJS Console API found in Node.js and modern browsers, while also providing some ArangoDB-specific additions. +ArangoDB also provides [the `console` module](../../ModuleConsole/README.md) which only supports the CommonJS Console API and only writes log entries to the ArangoDB log. + +When working with transactions, keep in mind that the Foxx console will attempt to write to the `_foxxlog` system collection. This behaviour can be disabled using the `setDatabaseLogging` method if you don't want to explicitly allow writing to the log collection during transactions or for performance reasons. + !SECTION Logging !SUBSECTION Logging console messages @@ -19,8 +23,6 @@ If the first argument is not a formatting string or any of the additional argume **Examples** ```js -var console = require("console"); - console.log("%s, %s!", "Hello", "World"); // => "Hello, World!" console.log("%s, World!", "Hello", "extra"); // => "Hello, World! extra" console.log("Hello,", "beautiful", "world!"); // => "Hello, beautiful world!" @@ -40,11 +42,11 @@ By default, `console.log` uses log level **INFO**, making it functionally equiva The built-in log levels are: -* -2: **TRACE** -* -1: **DEBUG** +* -200: **TRACE** +* -100: **DEBUG** * 0: **INFO** -* 1: **WARN** -* 2: **ERROR** +* 100: **WARN** +* 200: **ERROR** !SUBSECTION Logging with timers @@ -160,7 +162,7 @@ This method returns a function that logs messages with the given log level (e.g. **Parameter** * **name**: name of the log level as it appears in the database, usually all-uppercase -* **value** (optional): value of the log level. Default: `999` +* **value** (optional): value of the log level. Default: `50` The **value** is used when determining whether a log entry meets the minimum log level that can be defined in various places. For a list of the built-in log levels and their values see the section on logging with different log levels above. @@ -188,6 +190,24 @@ If **trace** is set to `true`, all log entries will be logged with a parsed stac Because this results in every logging call creating a stack trace (which may have a significant performance impact), this option is disabled by default. +!SUBSECTION Disabling logging to the ArangoDB console + +You can toggle whether logs should be written to the ArangoDB console. + +`console.setNativeLogging(nativeLogging)` + +If **nativeLogging** is set to `false`, log entries will not be logged to the ArangoDB console (which usually writes to the file system). + +!SUBSECTION Disabling logging to the database + +You can toggle whether logs should be written to the database. + +`console.setDatabaseLogging(databaseLogging)` + +If **databaseLogging** is set to `false`, log entries will not be logged to the internal `_foxxlog` collection. + +This is only useful if logging to the ArangoDB console is not also disabled. + !SUBSECTION Enabling assertion errors You can toggle whether console assertions should throw if they fail. diff --git a/js/common/bootstrap/modules/console.js b/js/common/bootstrap/modules/console.js index 41b5251a6a..157c4323d8 100644 --- a/js/common/bootstrap/modules/console.js +++ b/js/common/bootstrap/modules/console.js @@ -337,6 +337,7 @@ exports.infoLines = function () { //////////////////////////////////////////////////////////////////////////////// exports.log = exports.info; +exports._log = log; //////////////////////////////////////////////////////////////////////////////// /// @brief logLines diff --git a/js/server/modules/org/arangodb/foxx/console.js b/js/server/modules/org/arangodb/foxx/console.js index 1e6bff7157..1703ec2a05 100644 --- a/js/server/modules/org/arangodb/foxx/console.js +++ b/js/server/modules/org/arangodb/foxx/console.js @@ -31,11 +31,38 @@ var qb = require('aqb'); var util = require('util'); var extend = require('underscore').extend; +var arangoConsole = require('console'); var ErrorStackParser = require('error-stack-parser'); var AssertionError = require('assert').AssertionError; var exists = require('org/arangodb/is').existy; var db = require('org/arangodb').db; +const NATIVE_LOG_LEVELS = ['debug', 'info', 'warn', 'error']; + +function nativeLogger(level, levelNum, mount) { + let logLevel = String(level).toLowerCase(); + if (logLevel === 'trace' && levelNum === -200) { + logLevel = 'info'; // require('console').trace also uses INFO level + } + if (NATIVE_LOG_LEVELS.indexOf(logLevel) !== -1) { + return function (message) { + arangoConsole._log(logLevel, `${mount} ${message}`); + }; + } + if (levelNum >= 200) { + logLevel = 'error'; + } else if (levelNum >= 100) { + logLevel = 'warn'; + } else if (levelNum <= -100) { + logLevel = 'debug'; + } else { + logLevel = 'info'; + } + return function (message) { + arangoConsole._log(logLevel, `(${level}) ${mount} ${message}`); + }; +} + function ConsoleLogs(console) { this._console = console; this.defaultMaxAge = 2 * 60 * 60 * 1000; @@ -131,8 +158,10 @@ function Console(mount, tracing) { this._mount = mount; this._timers = Object.create(null); this._tracing = Boolean(tracing); + this._nativeLogging = true; + this._databaseLogging = true; this._logLevel = -999; - this._logLevels = {TRACE: -2}; + this._logLevels = {TRACE: -200}; this._assertThrows = false; this.logs = new ConsoleLogs(this); @@ -142,10 +171,10 @@ function Console(mount, tracing) { } }.bind(this)); - this.debug = this.custom('DEBUG', -1); + this.debug = this.custom('DEBUG', -100); this.info = this.custom('INFO', 0); - this.warn = this.custom('WARN', 1); - this.error = this.custom('ERROR', 2); + this.warn = this.custom('WARN', 100); + this.error = this.custom('ERROR', 200); this.assert.level = 'ERROR'; this.dir.level = 'INFO'; @@ -170,14 +199,28 @@ extend(Console.prototype, { level: level, levelNum: this._logLevels[level], time: Date.now(), - message: message + message: String(message) }; + let logLine; + + if (this._nativeLogging) { + logLine = nativeLogger(level, doc.levelNum, doc.mount); + doc.message.split('\n').forEach(logLine); + } + if (this._tracing) { - var e = new Error(); + let e = new Error(); Error.captureStackTrace(e, callee || this._log); e.stack = e.stack.replace(/\n+$/, ''); doc.stack = ErrorStackParser.parse(e).slice(1); + if (this._nativeLogging) { + e.stack.split('\n').slice(2).forEach(logLine); + } + } + + if (!this._databaseLogging) { + return; } if (!db._foxxlog) { @@ -240,7 +283,7 @@ extend(Console.prototype, { custom: function (level, weight) { level = String(level); weight = Number(weight); - weight = weight === weight ? weight : 999; + weight = weight === weight ? weight : 50; this._logLevels[level] = weight; var logWithLevel = function() { this._log(level, util.format.apply(null, arguments), logWithLevel); @@ -264,6 +307,16 @@ extend(Console.prototype, { return this._tracing; }, + setNativeLogging: function (nativeLogging) { + this._nativeLogging = Boolean(nativeLogging); + return this._nativeLogging; + }, + + setDatabaseLogging: function (databaseLogging) { + this._databaseLogging = Boolean(databaseLogging); + return this._databaseLogging; + }, + setAssertThrows: function (assertThrows) { this._assertThrows = Boolean(assertThrows); return this._assertThrows; diff --git a/js/server/modules/org/arangodb/foxx/service.js b/js/server/modules/org/arangodb/foxx/service.js index 28fd17a060..efd4d08c28 100644 --- a/js/server/modules/org/arangodb/foxx/service.js +++ b/js/server/modules/org/arangodb/foxx/service.js @@ -346,6 +346,7 @@ class FoxxService { filename = path.resolve(this.main.context.__dirname, filename); var module = new Module(filename, this.main); + module.context.console = this.main.context.console; module.context.applicationContext = _.extend( new AppContext(this.main.context.applicationContext._service), this.main.context.applicationContext, From 6cedb210df1d843c0598fd92d608e0d82a71e13a Mon Sep 17 00:00:00 2001 From: Max Neunhoeffer Date: Tue, 8 Dec 2015 18:54:05 +0100 Subject: [PATCH 04/22] First try to fix deadlock problem. Tests not yet tried. --- arangod/Cluster/AgencyComm.cpp | 19 ++ arangod/Cluster/AgencyComm.h | 6 + arangod/Cluster/ClusterInfo.cpp | 327 ++++++++++++++++--------------- arangod/Cluster/ClusterInfo.h | 2 +- arangod/V8Server/v8-vocindex.cpp | 2 +- 5 files changed, 195 insertions(+), 161 deletions(-) diff --git a/arangod/Cluster/AgencyComm.cpp b/arangod/Cluster/AgencyComm.cpp index 5b8bfeb675..c965d9f3b3 100644 --- a/arangod/Cluster/AgencyComm.cpp +++ b/arangod/Cluster/AgencyComm.cpp @@ -35,6 +35,7 @@ #include "Basics/WriteLocker.h" #include "Basics/json.h" #include "Basics/logging.h" +#include "Basics/random.h" #include "Cluster/ServerState.h" #include "Rest/Endpoint.h" #include "Rest/HttpRequest.h" @@ -992,6 +993,24 @@ bool AgencyComm::increaseVersion (std::string const& key) { return result.successful(); } +//////////////////////////////////////////////////////////////////////////////// +/// @brief update a version number in the agency, retry until it works +//////////////////////////////////////////////////////////////////////////////// + +void AgencyComm::increaseVersionRepeated (std::string const& key) { + bool ok = false; + while (! ok) { + ok = increaseVersion(key); + if (ok) { + return; + } + uint32_t val = 300 + TRI_UInt32Random() % 400; + LOG_INFO("Could not increase %s in agency, retrying in %dms!", + key.c_str(), val); + usleep(val * 1000); + } +} + //////////////////////////////////////////////////////////////////////////////// /// @brief creates a directory in the backend //////////////////////////////////////////////////////////////////////////////// diff --git a/arangod/Cluster/AgencyComm.h b/arangod/Cluster/AgencyComm.h index a0a38e75f8..c667e54d7b 100644 --- a/arangod/Cluster/AgencyComm.h +++ b/arangod/Cluster/AgencyComm.h @@ -449,6 +449,12 @@ namespace triagens { bool increaseVersion (std::string const&); +//////////////////////////////////////////////////////////////////////////////// +/// @brief update a version number in the agency, retry until it works +//////////////////////////////////////////////////////////////////////////////// + + void increaseVersionRepeated (std::string const& key); + //////////////////////////////////////////////////////////////////////////////// /// @brief creates a directory in the backend //////////////////////////////////////////////////////////////////////////////// diff --git a/arangod/Cluster/ClusterInfo.cpp b/arangod/Cluster/ClusterInfo.cpp index b352c7d380..e0badaefc0 100644 --- a/arangod/Cluster/ClusterInfo.cpp +++ b/arangod/Cluster/ClusterInfo.cpp @@ -332,7 +332,7 @@ void ClusterInfo::flush () { loadCurrentCoordinators(); loadPlannedDatabases(); loadCurrentDatabases(); - loadPlannedCollections(true); + loadPlannedCollections(); loadCurrentCollections(true); } @@ -648,7 +648,7 @@ void ClusterInfo::loadCurrentDatabases () { static const std::string prefixPlannedCollections = "Plan/Collections"; -void ClusterInfo::loadPlannedCollections (bool acquireLock) { +void ClusterInfo::loadPlannedCollections () { uint64_t storedVersion = _plannedCollectionsProt.version; MUTEX_LOCKER(_plannedCollectionsProt.mutex); @@ -660,19 +660,14 @@ void ClusterInfo::loadPlannedCollections (bool acquireLock) { // Now contact the agency: AgencyCommResult result; { - if (acquireLock) { - AgencyCommLocker locker("Plan", "READ"); + AgencyCommLocker locker("Plan", "READ"); - if (locker.successful()) { - result = _agency.getValues(prefixPlannedCollections, true); - } - else { - LOG_ERROR("Error while locking %s", prefixPlannedCollections.c_str()); - return; - } + if (locker.successful()) { + result = _agency.getValues(prefixPlannedCollections, true); } else { - result = _agency.getValues(prefixPlannedCollections, true); + LOG_ERROR("Error while locking %s", prefixPlannedCollections.c_str()); + return; } } @@ -768,7 +763,7 @@ shared_ptr ClusterInfo::getCollection int tries = 0; if (! _plannedCollectionsProt.isValid) { - loadPlannedCollections(true); + loadPlannedCollections(); ++tries; } @@ -792,7 +787,7 @@ shared_ptr ClusterInfo::getCollection } // must load collections outside the lock - loadPlannedCollections(true); + loadPlannedCollections(); } return shared_ptr(new CollectionInfo()); @@ -848,7 +843,7 @@ const std::vector > ClusterInfo::getCollections std::vector > result; // always reload - loadPlannedCollections(true); + loadPlannedCollections(); READ_LOCKER(_plannedCollectionsProt.lock); // look up database by id @@ -1187,7 +1182,7 @@ int ClusterInfo::dropDatabaseCoordinator (string const& name, string& errorMsg, // Load our own caches: loadPlannedDatabases(); - loadPlannedCollections(true); + loadPlannedCollections(); // Now wait for it to appear and be complete: res.clear(); @@ -1242,50 +1237,43 @@ int ClusterInfo::createCollectionCoordinator (string const& databaseName, const double endTime = TRI_microtime() + realTimeout; const double interval = getPollInterval(); { - AgencyCommLocker locker("Plan", "WRITE"); + // check if a collection with the same name is already planned + loadPlannedCollections(); + READ_LOCKER(_plannedCollectionsProt.lock); + AllCollections::const_iterator it = _plannedCollections.find(databaseName); + if (it != _plannedCollections.end()) { + const std::string name = JsonHelper::getStringValue(json, "name", ""); - if (! locker.successful()) { - return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN, errorMsg); - } + DatabaseCollections::const_iterator it2 = (*it).second.find(name); - { - // check if a collection with the same name is already planned - loadPlannedCollections(false); - - READ_LOCKER(_plannedCollectionsProt.lock); - AllCollections::const_iterator it = _plannedCollections.find(databaseName); - if (it != _plannedCollections.end()) { - const std::string name = JsonHelper::getStringValue(json, "name", ""); - - DatabaseCollections::const_iterator it2 = (*it).second.find(name); - - if (it2 != (*it).second.end()) { - // collection already exists! - return TRI_ERROR_ARANGO_DUPLICATE_NAME; - } + if (it2 != (*it).second.end()) { + // collection already exists! + return TRI_ERROR_ARANGO_DUPLICATE_NAME; } } - - if (! ac.exists("Plan/Databases/" + databaseName)) { - return setErrormsg(TRI_ERROR_ARANGO_DATABASE_NOT_FOUND, errorMsg); - } - - if (ac.exists("Plan/Collections/" + databaseName + "/" + collectionID)) { - return setErrormsg(TRI_ERROR_CLUSTER_COLLECTION_ID_EXISTS, errorMsg); - } - - AgencyCommResult result - = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, - json, 0.0); - if (!result.successful()) { - return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN, - errorMsg); - } } + if (! ac.exists("Plan/Databases/" + databaseName)) { + return setErrormsg(TRI_ERROR_ARANGO_DATABASE_NOT_FOUND, errorMsg); + } + + if (ac.exists("Plan/Collections/" + databaseName + "/" + collectionID)) { + return setErrormsg(TRI_ERROR_CLUSTER_COLLECTION_ID_EXISTS, errorMsg); + } + + AgencyCommResult result + = ac.casValue("Plan/Collections/" + databaseName + "/" + collectionID, + json, false, 0.0, 0.0); + if (!result.successful()) { + return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN, + errorMsg); + } + + ac.increaseVersionRepeated("Plan/Version"); + // Update our cache: - loadPlannedCollections(true); + loadPlannedCollections(); // Now wait for it to appear and be complete: AgencyCommResult res = ac.getValues("Current/Version", false); @@ -1329,7 +1317,7 @@ int ClusterInfo::createCollectionCoordinator (string const& databaseName, errorMsg = "Error in creation of collection:" + tmpMsg; return TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION; } - loadPlannedCollections(true); + loadPlannedCollections(); return setErrormsg(TRI_ERROR_NO_ERROR, errorMsg); } } @@ -1383,7 +1371,7 @@ int ClusterInfo::dropCollectionCoordinator (string const& databaseName, } // Update our own cache: - loadPlannedCollections(true); + loadPlannedCollections(); // Now wait for it to appear and be complete: res.clear(); @@ -1436,56 +1424,58 @@ int ClusterInfo::setCollectionPropertiesCoordinator (string const& databaseName, AgencyComm ac; AgencyCommResult res; - AgencyCommLocker locker("Plan", "WRITE"); + { + AgencyCommLocker locker("Plan", "WRITE"); - if (! locker.successful()) { - return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN; + if (! locker.successful()) { + return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN; + } + + if (! ac.exists("Plan/Databases/" + databaseName)) { + return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND; + } + + res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false); + + if (! res.successful()) { + return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; + } + + res.parse("", false); + std::map::const_iterator it = res._values.begin(); + + if (it == res._values.end()) { + return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; + } + + TRI_json_t* json = (*it).second._json; + if (json == nullptr) { + return TRI_ERROR_OUT_OF_MEMORY; + } + + TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json); + if (copy == nullptr) { + return TRI_ERROR_OUT_OF_MEMORY; + } + + TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact"); + TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize"); + TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync"); + TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets"); + + TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_doCompact)); + TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_maximalSize)); + TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_waitForSync)); + TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_indexBuckets)); + + res.clear(); + res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0); + + TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy); } - if (! ac.exists("Plan/Databases/" + databaseName)) { - return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND; - } - - res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false); - - if (! res.successful()) { - return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; - } - - res.parse("", false); - std::map::const_iterator it = res._values.begin(); - - if (it == res._values.end()) { - return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; - } - - TRI_json_t* json = (*it).second._json; - if (json == nullptr) { - return TRI_ERROR_OUT_OF_MEMORY; - } - - TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json); - if (copy == nullptr) { - return TRI_ERROR_OUT_OF_MEMORY; - } - - TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact"); - TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize"); - TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync"); - TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets"); - - TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "doCompact", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_doCompact)); - TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "journalSize", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_maximalSize)); - TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "waitForSync", TRI_CreateBooleanJson(TRI_UNKNOWN_MEM_ZONE, info->_waitForSync)); - TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "indexBuckets", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, info->_indexBuckets)); - - res.clear(); - res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0); - - TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy); - if (res.successful()) { - loadPlannedCollections(false); + loadPlannedCollections(); return TRI_ERROR_NO_ERROR; } @@ -1502,56 +1492,58 @@ int ClusterInfo::setCollectionStatusCoordinator (string const& databaseName, AgencyComm ac; AgencyCommResult res; - AgencyCommLocker locker("Plan", "WRITE"); + { + AgencyCommLocker locker("Plan", "WRITE"); - if (! locker.successful()) { - return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN; + if (! locker.successful()) { + return TRI_ERROR_CLUSTER_COULD_NOT_LOCK_PLAN; + } + + if (! ac.exists("Plan/Databases/" + databaseName)) { + return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND; + } + + res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false); + + if (! res.successful()) { + return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; + } + + res.parse("", false); + std::map::const_iterator it = res._values.begin(); + + if (it == res._values.end()) { + return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; + } + + TRI_json_t* json = (*it).second._json; + if (json == nullptr) { + return TRI_ERROR_OUT_OF_MEMORY; + } + + TRI_vocbase_col_status_e old = (TRI_vocbase_col_status_e) triagens::basics::JsonHelper::getNumericValue(json, "status", (int) TRI_VOC_COL_STATUS_CORRUPTED); + + if (old == status) { + // no status change + return TRI_ERROR_NO_ERROR; + } + + TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json); + if (copy == nullptr) { + return TRI_ERROR_OUT_OF_MEMORY; + } + + TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status"); + TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, status)); + + res.clear(); + res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0); + + TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy); } - if (! ac.exists("Plan/Databases/" + databaseName)) { - return TRI_ERROR_ARANGO_DATABASE_NOT_FOUND; - } - - res = ac.getValues("Plan/Collections/" + databaseName + "/" + collectionID, false); - - if (! res.successful()) { - return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; - } - - res.parse("", false); - std::map::const_iterator it = res._values.begin(); - - if (it == res._values.end()) { - return TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND; - } - - TRI_json_t* json = (*it).second._json; - if (json == nullptr) { - return TRI_ERROR_OUT_OF_MEMORY; - } - - TRI_vocbase_col_status_e old = (TRI_vocbase_col_status_e) triagens::basics::JsonHelper::getNumericValue(json, "status", (int) TRI_VOC_COL_STATUS_CORRUPTED); - - if (old == status) { - // no status change - return TRI_ERROR_NO_ERROR; - } - - TRI_json_t* copy = TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, json); - if (copy == nullptr) { - return TRI_ERROR_OUT_OF_MEMORY; - } - - TRI_DeleteObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status"); - TRI_Insert3ObjectJson(TRI_UNKNOWN_MEM_ZONE, copy, "status", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, status)); - - res.clear(); - res = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, copy, 0.0); - - TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, copy); - if (res.successful()) { - loadPlannedCollections(false); + loadPlannedCollections(); return TRI_ERROR_NO_ERROR; } @@ -1596,6 +1588,17 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, string const idString = triagens::basics::StringUtils::itoa(iid); + string const key = "Plan/Collections/" + databaseName + "/" + collectionID; + AgencyCommResult previous = ac.getValues(key, false); + previous.parse("", false); + auto it = previous._values.begin(); + TRI_ASSERT(it != previous._values.end()); + TRI_json_t const* previousVal = it->second._json; + + loadPlannedCollections(); + // It is possible that between the fetching of the planned collections + // and the write lock we acquire below something has changed. Therefore + // we first get the previous value and then do a compare and swap operation. { TRI_json_t* collectionJson = nullptr; AgencyCommLocker locker("Plan", "WRITE"); @@ -1605,7 +1608,6 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, } { - loadPlannedCollections(false); shared_ptr c = getCollection(databaseName, collectionID); @@ -1704,9 +1706,8 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, TRI_PushBack3ArrayJson(TRI_UNKNOWN_MEM_ZONE, idx, TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, newIndex)); - AgencyCommResult result = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, - collectionJson, - 0.0); + AgencyCommResult result = ac.casValue(key, previousVal, collectionJson, + 0.0, 0.0); TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, collectionJson); @@ -1718,7 +1719,7 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, } // reload our own cache: - loadPlannedCollections(true); + loadPlannedCollections(); TRI_ASSERT(numberOfShards > 0); @@ -1819,6 +1820,17 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName, int numberOfShards = 0; string const idString = triagens::basics::StringUtils::itoa(iid); + string const key = "Plan/Collections/" + databaseName + "/" + collectionID; + AgencyCommResult previous = ac.getValues(key, false); + previous.parse("", false); + auto it = previous._values.begin(); + TRI_ASSERT(it != previous._values.end()); + TRI_json_t const* previousVal = it->second._json; + + loadPlannedCollections(); + // It is possible that between the fetching of the planned collections + // and the write lock we acquire below something has changed. Therefore + // we first get the previous value and then do a compare and swap operation. { AgencyCommLocker locker("Plan", "WRITE"); @@ -1830,8 +1842,6 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName, TRI_json_t const* indexes = nullptr; { - loadPlannedCollections(false); - shared_ptr c = getCollection(databaseName, collectionID); READ_LOCKER(_plannedCollectionsProt.lock); @@ -1907,9 +1917,8 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName, return setErrormsg(TRI_ERROR_ARANGO_INDEX_NOT_FOUND, errorMsg); } - AgencyCommResult result = ac.setValue("Plan/Collections/" + databaseName + "/" + collectionID, - collectionJson, - 0.0); + AgencyCommResult result = ac.casValue(key, previousVal, collectionJson, + 0.0, 0.0); TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, collectionJson); @@ -1920,7 +1929,7 @@ int ClusterInfo::dropIndexCoordinator (string const& databaseName, } // load our own cache: - loadPlannedCollections(true); + loadPlannedCollections(); TRI_ASSERT(numberOfShards > 0); @@ -2358,7 +2367,7 @@ int ClusterInfo::getResponsibleShard (CollectionID const& collectionID, // from Plan, since they are immutable. Later we will have to switch // this to Current, when we allow to add and remove shards. if (! _plannedCollectionsProt.isValid) { - loadPlannedCollections(true); + loadPlannedCollections(); } int tries = 0; @@ -2397,7 +2406,7 @@ int ClusterInfo::getResponsibleShard (CollectionID const& collectionID, if (++tries >= 2) { break; } - loadPlannedCollections(true); + loadPlannedCollections(); } if (! found) { diff --git a/arangod/Cluster/ClusterInfo.h b/arangod/Cluster/ClusterInfo.h index e15d148723..a1b86c3db0 100644 --- a/arangod/Cluster/ClusterInfo.h +++ b/arangod/Cluster/ClusterInfo.h @@ -808,7 +808,7 @@ namespace triagens { /// Usually one does not have to call this directly. //////////////////////////////////////////////////////////////////////////////// - void loadPlannedCollections (bool); + void loadPlannedCollections (); //////////////////////////////////////////////////////////////////////////////// /// @brief (re-)load the information about planned databases diff --git a/arangod/V8Server/v8-vocindex.cpp b/arangod/V8Server/v8-vocindex.cpp index 90698c5fa7..ad1fc52e53 100644 --- a/arangod/V8Server/v8-vocindex.cpp +++ b/arangod/V8Server/v8-vocindex.cpp @@ -1124,7 +1124,7 @@ static void CreateCollectionCoordinator (const v8::FunctionCallbackInfoloadPlannedCollections(true); + ci->loadPlannedCollections(); shared_ptr const& c = ci->getCollection(databaseName, cid); TRI_vocbase_col_t* newcoll = CoordinatorCollection(vocbase, *c); From 1e54b063eead0fdaab68fae36d92115cf310c650 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Tue, 8 Dec 2015 19:32:29 +0100 Subject: [PATCH 05/22] Add a subquery example that combines geo indices with graph traversals. --- .../AqlExamples/CombiningGraphTraversals.mdpp | 78 +++++++++++++++++++ Documentation/Books/Users/SUMMARY.md | 1 + 2 files changed, 79 insertions(+) create mode 100644 Documentation/Books/Users/AqlExamples/CombiningGraphTraversals.mdpp diff --git a/Documentation/Books/Users/AqlExamples/CombiningGraphTraversals.mdpp b/Documentation/Books/Users/AqlExamples/CombiningGraphTraversals.mdpp new file mode 100644 index 0000000000..caff0153da --- /dev/null +++ b/Documentation/Books/Users/AqlExamples/CombiningGraphTraversals.mdpp @@ -0,0 +1,78 @@ +!CHAPTER Combining Graph Traversals +!SUBSECTION Finding the start vertex via a geo query +Our first example will locate the start vertex for a graph traversal via [a geo index](../IndexHandling/Geo.md). +We use [the city graph](../Graphs/README.md#the-city-graph) and its geo indices: + +![Cities Example Graph](../Graphs/cities_graph.png) + + + @startDocuBlockInline COMBINING_GRAPH_01_create_graph + @EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_01_create_graph} + ~addIgnoreCollection("germanHighway"); + ~addIgnoreCollection("germanCity"); + ~addIgnoreCollection("frenchHighway"); + ~addIgnoreCollection("frenchCity"); + ~addIgnoreCollection("internationalHighway"); + var examples = require("org/arangodb/graph-examples/example-graph.js"); + var g = examples.loadGraph("routeplanner"); + var bonn=[50.7340, 7.0998]; + |db._query(`FOR startCity IN + | WITHIN(germanCity, @lat, @long, @radius) + | RETURN startCity`, + | {lat: bonn[0], long: bonn[1], radius: 400000} + ).toArray() + @END_EXAMPLE_ARANGOSH_OUTPUT + @endDocuBlock COMBINING_GRAPH_01_create_graph + +We search all german cities in a range of 400 km around the ex-capital **Bonn**: **Hambung** and **Cologne**. +We won't find **Paris** since its in the `frenchCity` collection. + + @startDocuBlockInline COMBINING_GRAPH_02_combine + @EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_02_combine} + ~var bonn=[50.7340, 7.0998] + |db._query(`FOR startCity IN + | WITHIN(germanCity, @lat, @long, @radius) + | FOR v, e, p IN 1..1 OUTBOUND startCity + | GRAPH 'routeplanner' + | RETURN {startcity: startCity._key, traversedCity: v}`, + |{ + | lat: bonn[0], + | long: bonn[1], + | radius: 400000 + } ).toArray() + @END_EXAMPLE_ARANGOSH_OUTPUT + @endDocuBlock COMBINING_GRAPH_02_combine + +The geo index query returns us `startCity` (**Cologne** and **Hamburg**) which we then use as starting point for our graph traversal. For simplicity we only return their direct neighbours. We format the return result so we can see from which `startCity` the traversal came. + +Alternatively we could use a `LET` statement with a subquery to group the traversals by their `startCity` efficiently: + + @startDocuBlockInline COMBINING_GRAPH_03_combine_let + @EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_03_combine_let} + ~var bonn=[50.7340, 7.0998]; + |db._query(`FOR startCity IN + | WITHIN(germanCity, @lat, @long, @radius) + | LET oneCity = (FOR v, e, p IN 1..1 OUTBOUND startCity + | GRAPH 'routeplanner' RETURN v) + | return {startCity: startCity._key, connectedCities: oneCity}`, + |{ + | lat: bonn[0], + | long: bonn[1], + | radius: 400000 + } ).toArray(); + @END_EXAMPLE_ARANGOSH_OUTPUT + @endDocuBlock COMBINING_GRAPH_03_combine_let + +Finaly we clean up again: + + @startDocuBlockInline COMBINING_GRAPH_04_cleanup + @EXAMPLE_ARANGOSH_OUTPUT{COMBINING_GRAPH_04_cleanup} + ~var examples = require("org/arangodb/graph-examples/example-graph.js"); + examples.dropGraph("routeplanner"); + ~removeIgnoreCollection("germanHighway"); + ~removeIgnoreCollection("germanCity"); + ~removeIgnoreCollection("frenchHighway"); + ~removeIgnoreCollection("frenchCity"); + ~removeIgnoreCollection("internationalHighway"); + @END_EXAMPLE_ARANGOSH_OUTPUT + @endDocuBlock COMBINING_GRAPH_04_cleanup diff --git a/Documentation/Books/Users/SUMMARY.md b/Documentation/Books/Users/SUMMARY.md index 23531231d5..222eba8fa7 100644 --- a/Documentation/Books/Users/SUMMARY.md +++ b/Documentation/Books/Users/SUMMARY.md @@ -102,6 +102,7 @@ * [Projections and filters](AqlExamples/ProjectionsAndFilters.md) * [Joins](AqlExamples/Join.md) * [Grouping](AqlExamples/Grouping.md) + * [Traversals](AqlExamples/CombiningGraphTraversals.md) * [Graphs](Graphs/README.md) * [General Graphs](GeneralGraphs/README.md) * [Graph Management](GeneralGraphs/Management.md) From 9d072c195d693cbca6378aafdff0419cfc241d52 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Tue, 8 Dec 2015 19:34:17 +0100 Subject: [PATCH 06/22] New samples --- .../COMBINING_GRAPH_01_create_graph.generated | 32 ++++++++ .../COMBINING_GRAPH_02_combine.generated | 82 +++++++++++++++++++ .../COMBINING_GRAPH_03_combine_let.generated | 77 +++++++++++++++++ .../COMBINING_GRAPH_04_cleanup.generated | 2 + 4 files changed, 193 insertions(+) create mode 100644 Documentation/Examples/COMBINING_GRAPH_01_create_graph.generated create mode 100644 Documentation/Examples/COMBINING_GRAPH_02_combine.generated create mode 100644 Documentation/Examples/COMBINING_GRAPH_03_combine_let.generated create mode 100644 Documentation/Examples/COMBINING_GRAPH_04_cleanup.generated diff --git a/Documentation/Examples/COMBINING_GRAPH_01_create_graph.generated b/Documentation/Examples/COMBINING_GRAPH_01_create_graph.generated new file mode 100644 index 0000000000..77b4973f22 --- /dev/null +++ b/Documentation/Examples/COMBINING_GRAPH_01_create_graph.generated @@ -0,0 +1,32 @@ +arangosh> var examples = require("org/arangodb/graph-examples/example-graph.js"); +arangosh> var g = examples.loadGraph("routeplanner"); +arangosh> var bonn=[50.7340, 7.0998]; +arangosh> db._query(`FOR startCity IN +........> WITHIN(germanCity, @lat, @long, @radius) +........> RETURN startCity`, +........> {lat: bonn[0], long: bonn[1], radius: 400000} +........> ).toArray() +[ + { + "isCapital" : false, + "population" : 1000000, + "loc" : [ + 50.9364, + 6.9528 + ], + "_id" : "germanCity/Cologne", + "_rev" : "24341039", + "_key" : "Cologne" + }, + { + "isCapital" : false, + "population" : 1000000, + "loc" : [ + 53.5653, + 10.0014 + ], + "_id" : "germanCity/Hamburg", + "_rev" : "24537647", + "_key" : "Hamburg" + } +] diff --git a/Documentation/Examples/COMBINING_GRAPH_02_combine.generated b/Documentation/Examples/COMBINING_GRAPH_02_combine.generated new file mode 100644 index 0000000000..03263e32eb --- /dev/null +++ b/Documentation/Examples/COMBINING_GRAPH_02_combine.generated @@ -0,0 +1,82 @@ +arangosh> db._query(`FOR startCity IN +........> WITHIN(germanCity, @lat, @long, @radius) +........> FOR v, e, p IN 1..1 OUTBOUND startCity +........> GRAPH 'routeplanner' +........> RETURN {startcity: startCity._key, traversedCity: v}`, +........> { +........> lat: bonn[0], +........> long: bonn[1], +........> radius: 400000 +........> } ).toArray() +[ + { + "startcity" : "Cologne", + "traversedCity" : { + "isCapital" : false, + "population" : 80000, + "loc" : [ + 45.76, + 4.84 + ], + "_id" : "frenchCity/Lyon", + "_rev" : "25061935", + "_key" : "Lyon" + } + }, + { + "startcity" : "Cologne", + "traversedCity" : { + "isCapital" : true, + "population" : 4000000, + "loc" : [ + 48.8567, + 2.3508 + ], + "_id" : "frenchCity/Paris", + "_rev" : "25258543", + "_key" : "Paris" + } + }, + { + "startcity" : "Hamburg", + "traversedCity" : { + "isCapital" : true, + "population" : 4000000, + "loc" : [ + 48.8567, + 2.3508 + ], + "_id" : "frenchCity/Paris", + "_rev" : "25258543", + "_key" : "Paris" + } + }, + { + "startcity" : "Hamburg", + "traversedCity" : { + "isCapital" : false, + "population" : 80000, + "loc" : [ + 45.76, + 4.84 + ], + "_id" : "frenchCity/Lyon", + "_rev" : "25061935", + "_key" : "Lyon" + } + }, + { + "startcity" : "Hamburg", + "traversedCity" : { + "isCapital" : false, + "population" : 1000000, + "loc" : [ + 50.9364, + 6.9528 + ], + "_id" : "germanCity/Cologne", + "_rev" : "24341039", + "_key" : "Cologne" + } + } +] diff --git a/Documentation/Examples/COMBINING_GRAPH_03_combine_let.generated b/Documentation/Examples/COMBINING_GRAPH_03_combine_let.generated new file mode 100644 index 0000000000..3b5245d509 --- /dev/null +++ b/Documentation/Examples/COMBINING_GRAPH_03_combine_let.generated @@ -0,0 +1,77 @@ +arangosh> db._query(`FOR startCity IN +........> WITHIN(germanCity, @lat, @long, @radius) +........> LET oneCity = (FOR v, e, p IN 1..1 OUTBOUND startCity +........> GRAPH 'routeplanner' RETURN v) +........> return {startCity: startCity._key, connectedCities: oneCity}`, +........> { +........> lat: bonn[0], +........> long: bonn[1], +........> radius: 400000 +........> } ).toArray(); +[ + { + "startCity" : "Cologne", + "connectedCities" : [ + { + "isCapital" : false, + "population" : 80000, + "loc" : [ + 45.76, + 4.84 + ], + "_id" : "frenchCity/Lyon", + "_rev" : "25061935", + "_key" : "Lyon" + }, + { + "isCapital" : true, + "population" : 4000000, + "loc" : [ + 48.8567, + 2.3508 + ], + "_id" : "frenchCity/Paris", + "_rev" : "25258543", + "_key" : "Paris" + } + ] + }, + { + "startCity" : "Hamburg", + "connectedCities" : [ + { + "isCapital" : true, + "population" : 4000000, + "loc" : [ + 48.8567, + 2.3508 + ], + "_id" : "frenchCity/Paris", + "_rev" : "25258543", + "_key" : "Paris" + }, + { + "isCapital" : false, + "population" : 80000, + "loc" : [ + 45.76, + 4.84 + ], + "_id" : "frenchCity/Lyon", + "_rev" : "25061935", + "_key" : "Lyon" + }, + { + "isCapital" : false, + "population" : 1000000, + "loc" : [ + 50.9364, + 6.9528 + ], + "_id" : "germanCity/Cologne", + "_rev" : "24341039", + "_key" : "Cologne" + } + ] + } +] diff --git a/Documentation/Examples/COMBINING_GRAPH_04_cleanup.generated b/Documentation/Examples/COMBINING_GRAPH_04_cleanup.generated new file mode 100644 index 0000000000..5b45e41fc5 --- /dev/null +++ b/Documentation/Examples/COMBINING_GRAPH_04_cleanup.generated @@ -0,0 +1,2 @@ +arangosh> examples.dropGraph("routeplanner"); +true From 4cd2621d06c963f86e1bd727db9c905e331250a4 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 8 Dec 2015 19:45:32 +0100 Subject: [PATCH 07/22] adjusted AQL function return values --- CHANGELOG | 5 ++ arangod/Aql/Functions.cpp | 42 +++++++-- js/server/modules/org/arangodb/aql.js | 118 +++++++++++++++----------- js/server/tests/aql-queries-geo.js | 18 ++-- 4 files changed, 120 insertions(+), 63 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 68232054ed..9ec9c2f23d 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -9,6 +9,11 @@ v2.8.0 (XXXX-XX-XX) * fixed cluster upgrade procedure +* the AQL functions `NEAR` and `WITHIN` now have stricter validations + for their input parameters `radius` and `distance`. They may now throw + exceptions when invalid parameters are passed that may have not led + to exceptions in previous versions. + v2.8.0-beta1 (2015-12-06) ------------------------- diff --git a/arangod/Aql/Functions.cpp b/arangod/Aql/Functions.cpp index 9d59ece9b9..2996e6a563 100644 --- a/arangod/Aql/Functions.cpp +++ b/arangod/Aql/Functions.cpp @@ -3029,6 +3029,7 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx, std::string const& collectionName, std::string const& identifier) { std::vector parts = triagens::basics::StringUtils::split(identifier, "/"); + TRI_doc_mptr_copy_t mptr; if (parts.size() == 1) { int res = trx->readSingle(collection, &mptr, parts[0]); @@ -3059,24 +3060,37 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx, } //////////////////////////////////////////////////////////////////////////////// -/// @brief Helper function to get a document by it's _id +/// @brief Helper function to get a document by its _id /// This function will lazy read-lock the collection. +/// this function will not throw if the document or the collection cannot be +/// found //////////////////////////////////////////////////////////////////////////////// static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx, CollectionNameResolver const* resolver, - std::string& identifier) { + std::string const& identifier) { std::vector parts = triagens::basics::StringUtils::split(identifier, "/"); + if (parts.size() != 2) { return Json(Json::Null); } std::string collectionName = parts[0]; TRI_transaction_collection_t* collection = nullptr; TRI_voc_cid_t cid = 0; - RegisterCollectionInTransaction(trx, collectionName, cid, collection); + try { + RegisterCollectionInTransaction(trx, collectionName, cid, collection); + } + catch (triagens::basics::Exception const& ex) { + // don't throw if collection is not found + if (ex.code() == TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) { + return Json(Json::Null); + } + throw ex; + } TRI_doc_mptr_copy_t mptr; int res = trx->readSingle(collection, &mptr, parts[1]); + if (res != TRI_ERROR_NO_ERROR) { return Json(Json::Null); } @@ -3087,8 +3101,7 @@ static Json getDocumentByIdentifier (triagens::arango::AqlTransaction* trx, cid, &mptr ); -}; - +} //////////////////////////////////////////////////////////////////////////////// /// @brief function Document @@ -3144,15 +3157,32 @@ AqlValue Functions::Document (triagens::aql::Query* query, TRI_transaction_collection_t* collection = nullptr; TRI_voc_cid_t cid; - RegisterCollectionInTransaction(trx, collectionName, cid, collection); + bool notFound = false; + + try { + RegisterCollectionInTransaction(trx, collectionName, cid, collection); + } + catch (triagens::basics::Exception const& ex) { + // don't throw if collection is not found + if (ex.code() != TRI_ERROR_ARANGO_COLLECTION_NOT_FOUND) { + throw ex; + } + notFound = true; + } Json id = ExtractFunctionParameter(trx, parameters, 1, false); if (id.isString()) { + if (notFound) { + return AqlValue(new Json(Json::Null)); + } std::string identifier = triagens::basics::JsonHelper::getStringValue(id.json(), ""); Json result = getDocumentByIdentifier(trx, resolver, collection, cid, collectionName, identifier); return AqlValue(new Json(TRI_UNKNOWN_MEM_ZONE, result.steal())); } else if (id.isArray()) { + if (notFound) { + return AqlValue(new Json(Json::Array)); + } size_t const n = id.size(); Json result(Json::Array, n); for (size_t i = 0; i < n; ++i) { diff --git a/js/server/modules/org/arangodb/aql.js b/js/server/modules/org/arangodb/aql.js index 300ccddccf..229e87ea61 100644 --- a/js/server/modules/org/arangodb/aql.js +++ b/js/server/modules/org/arangodb/aql.js @@ -693,20 +693,27 @@ function INDEX (collection, indexTypes) { /// @brief get access to a collection //////////////////////////////////////////////////////////////////////////////// -function COLLECTION (name) { +function COLLECTION (name, func) { 'use strict'; if (typeof name !== 'string') { - THROW(null, INTERNAL.errors.ERROR_INTERNAL); + THROW(func, INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, func); } + var c; if (name.substring(0, 1) === '_') { // system collections need to be accessed slightly differently as they // are not returned by the propertyGetter of db - return INTERNAL.db._collection(name); + c = INTERNAL.db._collection(name); + } + else { + c = INTERNAL.db[name]; } - return INTERNAL.db[name]; + if (c === null || c === undefined) { + THROW(func, INTERNAL.errors.ERROR_ARANGO_COLLECTION_NOT_FOUND, String(name)); + } + return c; } //////////////////////////////////////////////////////////////////////////////// @@ -1311,7 +1318,7 @@ function AQL_DOCUMENT (collection, id) { } if (TYPEWEIGHT(id) === TYPEWEIGHT_ARRAY) { - var c = COLLECTION(collection); + var c = COLLECTION(collection, "DOCUMENT"); var result = [ ], i; for (i = 0; i < id.length; ++i) { @@ -1325,7 +1332,7 @@ function AQL_DOCUMENT (collection, id) { } try { - return COLLECTION(collection).document(id); + return COLLECTION(collection, "DOCUMENT").document(id); } catch (e2) { return null; @@ -1336,16 +1343,16 @@ function AQL_DOCUMENT (collection, id) { /// @brief get all documents from the specified collection //////////////////////////////////////////////////////////////////////////////// -function GET_DOCUMENTS (collection) { +function GET_DOCUMENTS (collection, func) { 'use strict'; WARN(null, INTERNAL.errors.ERROR_QUERY_COLLECTION_USED_IN_EXPRESSION, AQL_TO_STRING(collection)); if (isCoordinator) { - return COLLECTION(collection).all().toArray(); + return COLLECTION(collection, func).all().toArray(); } - return COLLECTION(collection).ALL(0, null).documents; + return COLLECTION(collection, func).ALL(0, null).documents; } //////////////////////////////////////////////////////////////////////////////// @@ -3928,27 +3935,30 @@ function AQL_NEAR (collection, latitude, longitude, limit, distanceAttribute) { limit = 100; } else { + if (TYPEWEIGHT(limit) !== TYPEWEIGHT_NUMBER) { + THROW("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); + } limit = AQL_TO_NUMBER(limit); } var weight = TYPEWEIGHT(distanceAttribute); if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_STRING) { - WARN("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); + THROW("NEAR", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); } if (isCoordinator) { - var query = COLLECTION(collection).near(latitude, longitude); + var query = COLLECTION(collection, "NEAR").near(latitude, longitude); query._distance = distanceAttribute; return query.limit(limit).toArray(); } - var idx = INDEX(COLLECTION(collection), [ "geo1", "geo2" ]); + var idx = INDEX(COLLECTION(collection, "NEAR"), [ "geo1", "geo2" ]); if (idx === null) { THROW("NEAR", INTERNAL.errors.ERROR_QUERY_GEO_INDEX_MISSING, collection); } - var result = COLLECTION(collection).NEAR(idx.id, latitude, longitude, limit); + var result = COLLECTION(collection, "NEAR").NEAR(idx.id, latitude, longitude, limit); if (distanceAttribute === null || distanceAttribute === undefined) { return result.documents; @@ -3976,22 +3986,28 @@ function AQL_WITHIN (collection, latitude, longitude, radius, distanceAttribute) var weight = TYPEWEIGHT(distanceAttribute); if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_STRING) { - WARN("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); + THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); } + + weight = TYPEWEIGHT(radius); + if (weight !== TYPEWEIGHT_NULL && weight !== TYPEWEIGHT_NUMBER) { + THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH); + } + radius = AQL_TO_NUMBER(radius); if (isCoordinator) { - var query = COLLECTION(collection).within(latitude, longitude, radius); + var query = COLLECTION(collection, "WITHIN").within(latitude, longitude, radius); query._distance = distanceAttribute; return query.toArray(); } - var idx = INDEX(COLLECTION(collection), [ "geo1", "geo2" ]); + var idx = INDEX(COLLECTION(collection, "WITHIN"), [ "geo1", "geo2" ]); if (idx === null) { THROW("WITHIN", INTERNAL.errors.ERROR_QUERY_GEO_INDEX_MISSING, collection); } - var result = COLLECTION(collection).WITHIN(idx.id, latitude, longitude, radius); + var result = COLLECTION(collection, "WITHIN").WITHIN(idx.id, latitude, longitude, radius); if (distanceAttribute === null || distanceAttribute === undefined) { return result.documents; @@ -4023,7 +4039,7 @@ function AQL_WITHIN_RECTANGLE (collection, latitude1, longitude1, latitude2, lon return null; } - return COLLECTION(collection).withinRectangle(latitude1, longitude1, latitude2, longitude2).toArray(); + return COLLECTION(collection, "WITHIN_RECTANGLE").withinRectangle(latitude1, longitude1, latitude2, longitude2).toArray(); } //////////////////////////////////////////////////////////////////////////////// @@ -4105,7 +4121,7 @@ function AQL_IS_IN_POLYGON (points, latitude, longitude) { function AQL_FULLTEXT (collection, attribute, query, limit) { 'use strict'; - var idx = INDEX_FULLTEXT(COLLECTION(collection), attribute); + var idx = INDEX_FULLTEXT(COLLECTION(collection, "FULLTEXT"), attribute); if (idx === null) { THROW("FULLTEXT", INTERNAL.errors.ERROR_QUERY_FULLTEXT_INDEX_MISSING, collection); @@ -4113,12 +4129,12 @@ function AQL_FULLTEXT (collection, attribute, query, limit) { if (isCoordinator) { if (limit !== undefined && limit !== null && limit > 0) { - return COLLECTION(collection).fulltext(attribute, query, idx).limit(limit).toArray(); + return COLLECTION(collection, "FULLTEXT").fulltext(attribute, query, idx).limit(limit).toArray(); } - return COLLECTION(collection).fulltext(attribute, query, idx).toArray(); + return COLLECTION(collection, "FULLTEXT").fulltext(attribute, query, idx).toArray(); } - return COLLECTION(collection).FULLTEXT(idx, query, limit).documents; + return COLLECTION(collection, "FULLTEXT").FULLTEXT(idx, query, limit).documents; } // ----------------------------------------------------------------------------- @@ -5486,7 +5502,7 @@ function AQL_PATHS (vertices, edgeCollection, direction, options) { } var searchAttributes = { - edgeCollection : COLLECTION(edgeCollection), + edgeCollection : COLLECTION(edgeCollection, "PATHS"), minLength : minLength, maxLength : maxLength, direction : searchDirection, @@ -5612,7 +5628,7 @@ function AQL_GRAPH_PATHS (graphName, options) { return null; } if (edgeCollections.indexOf(def.collection) === -1) { - edgeCollections.push(COLLECTION(def.collection)); + edgeCollections.push(COLLECTION(def.collection, "GRAPH_PATHS")); } }); @@ -5633,7 +5649,7 @@ function AQL_GRAPH_PATHS (graphName, options) { followCycles : followCycles }; - var vertices = GET_DOCUMENTS(startCollection); + var vertices = GET_DOCUMENTS(startCollection, "GRAPH_PATHS"); var n = vertices.length, i, j; for (i = 0; i < n; ++i) { var vertex = vertices[i]; @@ -6018,11 +6034,11 @@ function FILTER_RESTRICTION (list, restrictionList) { /// @brief get all document _ids matching the given examples //////////////////////////////////////////////////////////////////////////////// -function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) { +function DOCUMENT_IDS_BY_EXAMPLE (func, collectionList, example) { var res = [ ]; if (example === "null" || example === null || ! example) { collectionList.forEach(function (c) { - res = res.concat(COLLECTION(c).toArray().map(function(t) { return t._id; })); + res = res.concat(COLLECTION(c, func).toArray().map(function(t) { return t._id; })); }); return res; } @@ -6045,7 +6061,7 @@ function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) { }); collectionList.forEach(function (c) { tmp.forEach(function (e) { - res = res.concat(COLLECTION(c).byExample(e).toArray().map(function(t) { + res = res.concat(COLLECTION(c, func).byExample(e).toArray().map(function(t) { return t._id; })); }); @@ -6057,11 +6073,11 @@ function DOCUMENT_IDS_BY_EXAMPLE (collectionList, example) { /// @brief getAllDocsByExample //////////////////////////////////////////////////////////////////////////////// -function DOCUMENTS_BY_EXAMPLE (collectionList, example) { +function DOCUMENTS_BY_EXAMPLE (func, collectionList, example) { var res = [ ]; if (example === "null" || example === null || ! example) { collectionList.forEach(function (c) { - res = res.concat(COLLECTION(c).toArray()); + res = res.concat(COLLECTION(c, func).toArray()); }); return res; } @@ -6082,7 +6098,7 @@ function DOCUMENTS_BY_EXAMPLE (collectionList, example) { }); collectionList.forEach(function (c) { tmp.forEach(function (e) { - res = res.concat(COLLECTION(c).byExample(e).toArray()); + res = res.concat(COLLECTION(c, func).byExample(e).toArray()); }); }); return res; @@ -6152,7 +6168,7 @@ function RESOLVE_GRAPH_TO_FROM_VERTICES (graphname, options, funcname) { if (options.includeOrphans) { collections.fromCollections = collections.fromCollections.concat(collections.orphanCollections); } - return DOCUMENTS_BY_EXAMPLE( + return DOCUMENTS_BY_EXAMPLE(funcname, collections.fromCollections.filter(removeDuplicates), options.fromVertexExample ); } @@ -6168,7 +6184,7 @@ function RESOLVE_GRAPH_TO_TO_VERTICES (graphname, options, funcname) { return self.indexOf(elem) === pos; }; - return DOCUMENTS_BY_EXAMPLE( + return DOCUMENTS_BY_EXAMPLE(funcname, collections.toCollection.filter(removeDuplicates), options.toVertexExample ); } @@ -6188,7 +6204,7 @@ function RESOLVE_GRAPH_START_VERTICES (graphName, options, funcname) { var removeDuplicates = function(elem, pos, self) { return self.indexOf(elem) === pos; }; - return DOCUMENTS_BY_EXAMPLE( + return DOCUMENTS_BY_EXAMPLE(funcname, collections.fromCollections.filter(removeDuplicates), options.fromVertexExample ); } @@ -6211,13 +6227,13 @@ function RESOLVE_GRAPH_TO_DOCUMENTS (graphname, options, funcname) { }; var result = { - fromVertices : DOCUMENTS_BY_EXAMPLE( + fromVertices : DOCUMENTS_BY_EXAMPLE(funcname, collections.fromCollections.filter(removeDuplicates), options.fromVertexExample ), - toVertices : DOCUMENTS_BY_EXAMPLE( + toVertices : DOCUMENTS_BY_EXAMPLE(funcname, collections.toCollection.filter(removeDuplicates), options.toVertexExample ), - edges : DOCUMENTS_BY_EXAMPLE( + edges : DOCUMENTS_BY_EXAMPLE(funcname, collections.edgeCollections.filter(removeDuplicates), options.edgeExamples ), edgeCollections : collections.edgeCollections, @@ -6367,7 +6383,7 @@ function AQL_SHORTEST_PATH (vertexCollection, ) { params = SHORTEST_PATH_PARAMS(params); var a = TRAVERSAL_FUNC("SHORTEST_PATH", - TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)), + TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "SHORTEST_PATH")), TO_ID(startVertex, vertexCollection), TO_ID(endVertex, vertexCollection), direction, @@ -6900,14 +6916,14 @@ function AQL_GRAPH_SHORTEST_PATH (graphName, let startVertices; if (options.hasOwnProperty("startVertexCollectionRestriction") && Array.isArray(options.startVertexCollectionRestriction)) { - startVertices = DOCUMENT_IDS_BY_EXAMPLE(options.startVertexCollectionRestriction, startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", options.startVertexCollectionRestriction, startVertexExample); } else if (options.hasOwnProperty("startVertexCollectionRestriction") && typeof options.startVertexCollectionRestriction === 'string') { - startVertices = DOCUMENT_IDS_BY_EXAMPLE([ options.startVertexCollectionRestriction ], startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", [ options.startVertexCollectionRestriction ], startVertexExample); } else { - startVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", vertexCollections, startVertexExample); } if (startVertices.length === 0) { return []; @@ -6916,14 +6932,14 @@ function AQL_GRAPH_SHORTEST_PATH (graphName, let endVertices; if (options.hasOwnProperty("endVertexCollectionRestriction") && Array.isArray(options.endVertexCollectionRestriction)) { - endVertices = DOCUMENT_IDS_BY_EXAMPLE(options.endVertexCollectionRestriction, endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", options.endVertexCollectionRestriction, endVertexExample); } else if (options.hasOwnProperty("endVertexCollectionRestriction") && typeof options.endVertexCollectionRestriction === 'string') { - endVertices = DOCUMENT_IDS_BY_EXAMPLE([ options.endVertexCollectionRestriction ], endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", [ options.endVertexCollectionRestriction ], endVertexExample); } else { - endVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", vertexCollections, endVertexExample); } if (endVertices.length === 0) { return []; @@ -6971,7 +6987,7 @@ function AQL_TRAVERSAL (vertexCollection, params = TRAVERSAL_PARAMS(params); return TRAVERSAL_FUNC("TRAVERSAL", - TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)), + TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "TRAVERSAL")), TO_ID(startVertex, vertexCollection), undefined, direction, @@ -7148,7 +7164,7 @@ function AQL_TRAVERSAL_TREE (vertexCollection, } var result = TRAVERSAL_FUNC("TRAVERSAL_TREE", - TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection)), + TRAVERSAL.collectionDatasourceFactory(COLLECTION(edgeCollection, "TRAVERSAL_TREE")), TO_ID(startVertex, vertexCollection), undefined, direction, @@ -7327,7 +7343,7 @@ function AQL_EDGES (edgeCollection, options) { 'use strict'; - var c = COLLECTION(edgeCollection), result; + var c = COLLECTION(edgeCollection, "EDGES"), result; // validate arguments if (direction === "outbound") { @@ -7704,7 +7720,7 @@ function AQL_GRAPH_NEIGHBORS (graphName, } } let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();}); - let startVertices = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertexExample); + let startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_NEIGHBORS", vertexCollections, vertexExample); if (startVertices.length === 0) { return []; } @@ -8044,13 +8060,13 @@ function AQL_GRAPH_COMMON_NEIGHBORS (graphName, let graph = graphModule._graph(graphName); let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();}); - let vertices1 = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertex1Examples); + let vertices1 = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_COMMON_NEIGHBORS", vertexCollections, vertex1Examples); let vertices2; if (vertex1Examples === vertex2Examples) { vertices2 = vertices1; } else { - vertices2 = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, vertex2Examples); + vertices2 = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_COMMON_NEIGHBORS", vertexCollections, vertex2Examples); } // Use ES6 Map. Higher performance then Object. let tmpNeighborsLeft = new Map(); @@ -8846,7 +8862,7 @@ function AQL_GRAPH_ABSOLUTE_BETWEENNESS (graphName, options) { options.includeData = false; let graph = graphModule._graph(graphName); let vertexCollections = graph._vertexCollections().map(function (c) { return c.name();}); - let vertexIds = DOCUMENT_IDS_BY_EXAMPLE(vertexCollections, {}); + let vertexIds = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_ABSOLUTE_BETWEENNESS", vertexCollections, {}); let result = {}; let distanceMap = AQL_GRAPH_SHORTEST_PATH(graphName, vertexIds , vertexIds, options); for (let k = 0; k < vertexIds.length; k++) { diff --git a/js/server/tests/aql-queries-geo.js b/js/server/tests/aql-queries-geo.js index 985125e2ad..4880206db6 100644 --- a/js/server/tests/aql-queries-geo.js +++ b/js/server/tests/aql-queries-geo.js @@ -229,6 +229,17 @@ function ahuacatlGeoTestSuite () { assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN NEAR(\"" + locationsNon.name() + "\", 0, 0, 10, true)"); }, +//////////////////////////////////////////////////////////////////////////////// +/// @brief test invalid WITHIN arguments count +//////////////////////////////////////////////////////////////////////////////// + + testInvalidWithinArgument : function () { + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, \"foo\")"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, true)"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, 0, true)"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(\"" + locationsNon.name() + "\", 0, 0, 0, [ ])"); + }, + //////////////////////////////////////////////////////////////////////////////// /// @brief test invalid collection parameter //////////////////////////////////////////////////////////////////////////////// @@ -241,12 +252,7 @@ function ahuacatlGeoTestSuite () { assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(true, 0, 0, 10)"); assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN([ ], 0, 0, 10)"); assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN({ }, 0, 0, 10)"); - if (cluster.isCluster()) { - assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" }); - } - else { - assertQueryError(errors.ERROR_ARANGO_COLLECTION_NOT_FOUND.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" }); - } + assertQueryError(errors.ERROR_ARANGO_COLLECTION_NOT_FOUND.code, "RETURN WITHIN(@name, 0, 0, 10)", { name: "foobarbazcoll" }); assertQueryError(errors.ERROR_QUERY_BIND_PARAMETER_MISSING.code, "RETURN WITHIN(@name, 0, 0, 10)"); } From 62959b2d04da9d0be712834ee32f280794f4eab2 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 8 Dec 2015 20:07:44 +0100 Subject: [PATCH 08/22] updated documentation --- CHANGELOG | 2 +- .../Books/Users/Upgrading/UpgradingChanges28.mdpp | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index 9ec9c2f23d..0b47a8f9ec 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -10,7 +10,7 @@ v2.8.0 (XXXX-XX-XX) * fixed cluster upgrade procedure * the AQL functions `NEAR` and `WITHIN` now have stricter validations - for their input parameters `radius` and `distance`. They may now throw + for their input parameters `limit`, `radius` and `distance`. They may now throw exceptions when invalid parameters are passed that may have not led to exceptions in previous versions. diff --git a/Documentation/Books/Users/Upgrading/UpgradingChanges28.mdpp b/Documentation/Books/Users/Upgrading/UpgradingChanges28.mdpp index 406d915691..64ee4dda5b 100644 --- a/Documentation/Books/Users/Upgrading/UpgradingChanges28.mdpp +++ b/Documentation/Books/Users/Upgrading/UpgradingChanges28.mdpp @@ -26,6 +26,13 @@ FOR doc IN `OUTBOUND` RETURN doc.`any` ``` +!SUBSECTION Changed behavior + +The AQL functions `NEAR` and `WITHIN` now have stricter validations +for their input parameters `limit`, `radius` and `distance`. They may now throw +exceptions when invalid parameters are passed that may have not led +to exceptions in previous versions. + !SUBSECTION Deadlock handling @@ -123,3 +130,10 @@ let response = request({ * the built-in support for CoffeeScript source files is deprecated, it will raise a warning if you use it. Please pre-compile CoffeeScript source files. + + +!SECTION Client tools + +arangodump will now fail by default when trying to dump edges that +refer to already dropped collections. This can be circumvented by +specifying the option `--force true` when invoking arangodump From adde8c2867fd0b5f07367c9e0ddcd68050eebe59 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Tue, 8 Dec 2015 19:38:54 +0100 Subject: [PATCH 09/22] Yak shaving --- js/common/bootstrap/modules/fs.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/js/common/bootstrap/modules/fs.js b/js/common/bootstrap/modules/fs.js index 834879abcb..d205fb1aa3 100644 --- a/js/common/bootstrap/modules/fs.js +++ b/js/common/bootstrap/modules/fs.js @@ -350,11 +350,9 @@ else { exports.safeJoin = function () { var args = Array.prototype.slice.call(arguments); - var path = safeJoin(args.shift(), args.shift()); - while (args.length) { - path = safeJoin(path, args.shift()); - } - return path; + return args.reduce(function (base, relative) { + return safeJoin(base, relative); + }, args.shift()); }; //////////////////////////////////////////////////////////////////////////////// From 588fddb53c97cb62bd14f8657d91e2a0795718d6 Mon Sep 17 00:00:00 2001 From: Max Neunhoeffer Date: Tue, 8 Dec 2015 20:50:35 +0100 Subject: [PATCH 10/22] Better error message instead of failed assertion. --- arangod/Cluster/ClusterInfo.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/arangod/Cluster/ClusterInfo.cpp b/arangod/Cluster/ClusterInfo.cpp index e0badaefc0..758f8a4b61 100644 --- a/arangod/Cluster/ClusterInfo.cpp +++ b/arangod/Cluster/ClusterInfo.cpp @@ -1592,7 +1592,11 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, AgencyCommResult previous = ac.getValues(key, false); previous.parse("", false); auto it = previous._values.begin(); - TRI_ASSERT(it != previous._values.end()); + if (it == previous._values.end()) { + LOG_ERROR("Entry for collection in Plan does not exist!"); + return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION, errorMsg); + } + TRI_json_t const* previousVal = it->second._json; loadPlannedCollections(); From 06a719692421cbd76ddebe9291197239d9523c8a Mon Sep 17 00:00:00 2001 From: Max Neunhoeffer Date: Tue, 8 Dec 2015 21:31:46 +0100 Subject: [PATCH 11/22] Fix test that checks etcd version numbers. --- js/server/tests/shell-cluster-agency.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/js/server/tests/shell-cluster-agency.js b/js/server/tests/shell-cluster-agency.js index d54d6e1bdf..e2125ac68b 100644 --- a/js/server/tests/shell-cluster-agency.js +++ b/js/server/tests/shell-cluster-agency.js @@ -83,7 +83,8 @@ function AgencySuite () { testVersion : function () { var agencyVersion = JSON.parse(agency.version()); - assertEqual(agencyVersion.internalVersion, "2"); + assertEqual(agencyVersion.etcdserver, "2.2.2"); + assertEqual(agencyVersion.etcdcluster, "2.2.0"); }, //////////////////////////////////////////////////////////////////////////////// From f38d70844f94c123519284a16d955a9bf3e5d886 Mon Sep 17 00:00:00 2001 From: Max Neunhoeffer Date: Tue, 8 Dec 2015 21:32:06 +0100 Subject: [PATCH 12/22] Fix a corner case in index generation. --- arangod/Cluster/ClusterInfo.cpp | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/arangod/Cluster/ClusterInfo.cpp b/arangod/Cluster/ClusterInfo.cpp index 758f8a4b61..42872db642 100644 --- a/arangod/Cluster/ClusterInfo.cpp +++ b/arangod/Cluster/ClusterInfo.cpp @@ -1592,12 +1592,14 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, AgencyCommResult previous = ac.getValues(key, false); previous.parse("", false); auto it = previous._values.begin(); + TRI_json_t const* previousVal; if (it == previous._values.end()) { - LOG_ERROR("Entry for collection in Plan does not exist!"); - return setErrormsg(TRI_ERROR_CLUSTER_COULD_NOT_CREATE_COLLECTION, errorMsg); + LOG_INFO("Entry for collection in Plan does not exist!"); + previousVal = nullptr; + } + else { + previousVal = it->second._json; } - - TRI_json_t const* previousVal = it->second._json; loadPlannedCollections(); // It is possible that between the fetching of the planned collections @@ -1710,8 +1712,13 @@ int ClusterInfo::ensureIndexCoordinator (string const& databaseName, TRI_PushBack3ArrayJson(TRI_UNKNOWN_MEM_ZONE, idx, TRI_CopyJson(TRI_UNKNOWN_MEM_ZONE, newIndex)); - AgencyCommResult result = ac.casValue(key, previousVal, collectionJson, - 0.0, 0.0); + AgencyCommResult result; + if (previousVal != nullptr) { + result = ac.casValue(key, previousVal, collectionJson, 0.0, 0.0); + } + else { // only when there is no previous value + result = ac.setValue(key, collectionJson, 0.0); + } TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, collectionJson); From 18da21200315bfa00578644aa3a7559d16fe5f19 Mon Sep 17 00:00:00 2001 From: jsteemann Date: Tue, 8 Dec 2015 22:57:46 +0100 Subject: [PATCH 13/22] fixed edge handler response --- arangod/RestHandler/RestEdgesHandler.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/arangod/RestHandler/RestEdgesHandler.cpp b/arangod/RestHandler/RestEdgesHandler.cpp index 55f677a474..addb6375d7 100644 --- a/arangod/RestHandler/RestEdgesHandler.cpp +++ b/arangod/RestHandler/RestEdgesHandler.cpp @@ -47,7 +47,6 @@ RestEdgesHandler::RestEdgesHandler (HttpRequest* request) : RestVocbaseBaseHandler(request) { } - // ----------------------------------------------------------------------------- // --SECTION-- Handler methods // ----------------------------------------------------------------------------- @@ -274,8 +273,10 @@ bool RestEdgesHandler::readEdges (std::vector c generateError(responseCode, res); return false; } + resultDocument.set("error", triagens::basics::Json(false)); resultDocument.set("code", triagens::basics::Json(200)); + generateResult(resultDocument.json()); return true; } @@ -375,7 +376,6 @@ bool RestEdgesHandler::readEdges (std::vector c return true; } - //////////////////////////////////////////////////////////////////////////////// /// Internal function for optimized edge retrieval. /// Allows to send an TraverserExpression for filtering in the body @@ -402,7 +402,7 @@ bool RestEdgesHandler::readFilteredEdges () { if (! TRI_IsArrayJson(json.get())) { generateError(HttpResponse::BAD, TRI_ERROR_HTTP_BAD_PARAMETER, - "Expected a list of traverser expressions as body parameter"); + "Expected an array of traverser expressions as body parameter"); return false; } @@ -419,3 +419,4 @@ bool RestEdgesHandler::readFilteredEdges () { } return readEdges(expressions); } + From 2fe6df20e325ab956a410cab5bc1fdc22de9d230 Mon Sep 17 00:00:00 2001 From: jsteemann Date: Tue, 8 Dec 2015 22:58:10 +0100 Subject: [PATCH 14/22] added derived file --- .../_admin/aardvark/APP/frontend/js/bootstrap/modules/console.js | 1 + 1 file changed, 1 insertion(+) diff --git a/js/apps/system/_admin/aardvark/APP/frontend/js/bootstrap/modules/console.js b/js/apps/system/_admin/aardvark/APP/frontend/js/bootstrap/modules/console.js index 41b5251a6a..157c4323d8 100644 --- a/js/apps/system/_admin/aardvark/APP/frontend/js/bootstrap/modules/console.js +++ b/js/apps/system/_admin/aardvark/APP/frontend/js/bootstrap/modules/console.js @@ -337,6 +337,7 @@ exports.infoLines = function () { //////////////////////////////////////////////////////////////////////////////// exports.log = exports.info; +exports._log = log; //////////////////////////////////////////////////////////////////////////////// /// @brief logLines From 4f4087a944cbf8a0a2b01f91787f1ef9fcaa07ce Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Tue, 8 Dec 2015 23:45:36 +0100 Subject: [PATCH 15/22] Fix link to the error codes --- arangod/RestHandler/RestCursorHandler.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangod/RestHandler/RestCursorHandler.cpp b/arangod/RestHandler/RestCursorHandler.cpp index 1ed3acc600..a961d34100 100644 --- a/arangod/RestHandler/RestCursorHandler.cpp +++ b/arangod/RestHandler/RestCursorHandler.cpp @@ -515,7 +515,7 @@ triagens::basics::Json RestCursorHandler::buildExtra (triagens::aql::QueryResult /// error occurs during query processing, the server will respond with *HTTP 400*. /// Again, the body of the response will contain details about the error. /// -/// A list of query errors can be found (../ArangoErrors/README.md) here. +/// A [list of query errors can be found here](../ErrorCodes/README.md). /// /// /// @RESTRETURNCODE{404} From 8f10c2b8a10950b5e1c27fe8657ed03a019e226a Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Tue, 8 Dec 2015 23:47:18 +0100 Subject: [PATCH 16/22] Generate markdown from the restdescritpion instead of html. --- Documentation/Books/generateMdFiles.py | 40 +++++----- Documentation/Scripts/generateSwagger.py | 95 ++++++++++-------------- 2 files changed, 63 insertions(+), 72 deletions(-) diff --git a/Documentation/Books/generateMdFiles.py b/Documentation/Books/generateMdFiles.py index 508964d451..64be174dd7 100644 --- a/Documentation/Books/generateMdFiles.py +++ b/Documentation/Books/generateMdFiles.py @@ -43,6 +43,16 @@ def getReference(name, source, verb): raise Exception("invalid reference: " + ref + " in " + fn) return ref +removeDoubleLF = re.compile("\n\n") +removeLF = re.compile("\n") + +def TrimThisParam(text, indent): + text = text.rstrip('\n').lstrip('\n') + text = removeDoubleLF.sub("\n", text) + if (indent > 0): + indent = (indent + 2) # align the text right of the list... + return removeLF.sub("\n" + ' ' * indent, text) + def unwrapPostJson(reference, layer): global swagger rc = '' @@ -54,35 +64,29 @@ def unwrapPostJson(reference, layer): if '$ref' in thisParam: subStructRef = getReference(thisParam, reference, None) - rc += "
  • " + param + ": " - rc += swagger['definitions'][subStructRef]['description'] + "
      " + rc += ' ' * layer + " - **" + param + "**:\n" rc += unwrapPostJson(subStructRef, layer + 1) - rc += "
    " - + elif thisParam['type'] == 'object': - rc += ' ' * layer + "
  • " + param + ": " + brTrim(thisParam['description']) + "
  • " + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) + "\n" elif swagger['definitions'][reference]['properties'][param]['type'] == 'array': - rc += ' ' * layer + "
  • " + param + ": " + brTrim(thisParam['description']) + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) if 'type' in thisParam['items']: - rc += " of type " + thisParam['items']['type']# + rc += " of type " + thisParam['items']['type'] + "\n" else: if len(thisParam['items']) == 0: - rc += "anonymous json object" + rc += "anonymous json object\n" else: try: subStructRef = getReference(thisParam['items'], reference, None) except: print >>sys.stderr, "while analyzing: " + param print >>sys.stderr, thisParam - rc += "\n
      " - rc += unwrapPostJson(subStructRef, layer + 1) - rc += "
    " - rc += '
  • ' + rc += "\n" + unwrapPostJson(subStructRef, layer + 1) else: - rc += ' ' * layer + "
  • " + param + ": " + thisParam['description'] + '
  • ' + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(thisParam['description'], layer) + '\n' return rc - def getRestBodyParam(): rc = "\n**Body Parameters**\n" addText = '' @@ -93,13 +97,13 @@ def getRestBodyParam(): if 'additionalProperties' in thisVerb['parameters'][nParam]['schema']: addText = "free style json body" else: - addText = "
      " + unwrapPostJson( - getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0) + "
    " + addText = unwrapPostJson( + getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0) rc += addText return rc def getRestReplyBodyParam(param): - rc = "\n**Reply Body**\n
      " + rc = "\n**Reply Body**\n" try: rc += unwrapPostJson(getReference(thisVerb['responses'][param]['schema'], route, verb), 0) @@ -107,7 +111,7 @@ def getRestReplyBodyParam(param): print >>sys.stderr,"failed to search " + param + " in: " print >>sys.stderr,json.dumps(thisVerb, indent=4, separators=(', ',': '), sort_keys=True) raise - return rc + "
    \n" + return rc + "\n" SIMPL_REPL_DICT = { diff --git a/Documentation/Scripts/generateSwagger.py b/Documentation/Scripts/generateSwagger.py index e84de71240..a838361e14 100755 --- a/Documentation/Scripts/generateSwagger.py +++ b/Documentation/Scripts/generateSwagger.py @@ -289,10 +289,10 @@ def Typography(txt): txt = txt[0:-1] # txt = BackTicks(txt) - txt = AsteriskBold(txt) - txt = AsteriskItalic(txt) +# txt = AsteriskBold(txt) +# txt = AsteriskItalic(txt) # txt = FN(txt) - txt = LIT(txt) +# txt = LIT(txt) # txt = FA(txt) # # no way to find out the correct link for Swagger, @@ -487,32 +487,8 @@ def generic_handler_desc(cargo, r, message, op, para, name): continue line = Typography(line) + para[name] += line + '\n' - if r.DESCRIPTION_LI.match(line): - line = "
  • " + line[2:] - inLI = True - elif inLI and r.DESCRIPTION_SP.match(line): - line = line[2:] - elif inLI and r.DESCRIPTION_BL.match(line): - line = "" - else: - inLI = False - - if not inUL and inLI: - line = "
      " + line - inUL = True - elif inUL and r.EMPTY_LINE.match(line): - line = "
    " + line - inUL = False - - elif inLI and r.EMPTY_LINE.match(line): - line = "
  • " + line - inUL = False - - if not inLI and r.EMPTY_LINE.match(line): - line = "
    " - - para[name] += line + ' ' para[name] = removeTrailingBR.sub("", para[name]) def start_docublock(cargo, r=Regexen()): @@ -675,7 +651,7 @@ def restbodyparam(cargo, r=Regexen()): if restBodyParam == None: # https://github.com/swagger-api/swagger-ui/issues/1430 # once this is solved we can skip this: - operation['description'] += "**A json post document with these Properties is required:**" + operation['description'] += "**A json post document with these Properties is required:**\n" restBodyParam = { 'name': 'Json Post Body', 'x-description-offset': len(swagger['paths'][httpPath][method]['description']), @@ -913,7 +889,7 @@ def restreplybody(cargo, r=Regexen()): if restReplyBodyParam == None: # https://github.com/swagger-api/swagger-ui/issues/1430 # once this is solved we can skip this: - operation['description'] += "**A json document with these Properties is returned:**" + operation['description'] += "**A json document with these Properties is returned:**\n" swagger['paths'][httpPath][method]['responses'][currentReturnCode][ 'x-description-offset'] = len(swagger['paths'][httpPath][method]['description']) swagger['paths'][httpPath][method]['responses'][currentReturnCode]['schema'] = { @@ -1169,6 +1145,16 @@ def getReference(name, source, verb): raise Exception("invalid reference: " + ref + " in " + fn) return ref +removeDoubleLF = re.compile("\n\n") +removeLF = re.compile("\n") + +def TrimThisParam(text, indent): + text = text.rstrip('\n').lstrip('\n') + text = removeDoubleLF.sub("\n", text) + if (indent > 0): + indent = (indent + 2) # align the text right of the list... + return removeLF.sub("\n" + ' ' * indent, text) + def unwrapPostJson(reference, layer): global swagger rc = '' @@ -1180,32 +1166,27 @@ def unwrapPostJson(reference, layer): if '$ref' in thisParam: subStructRef = getReference(thisParam, reference, None) - rc += "
  • " + param + ": " - rc += swagger['definitions'][subStructRef]['description'] + "
      " + rc += ' ' * layer + " - **" + param + "**:\n" rc += unwrapPostJson(subStructRef, layer + 1) - rc += "
    " - + elif thisParam['type'] == 'object': - rc += ' ' * layer + "
  • " + param + ": " + brTrim(thisParam['description']) + "
  • " + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) + "\n" elif swagger['definitions'][reference]['properties'][param]['type'] == 'array': - rc += ' ' * layer + "
  • " + param + ": " + brTrim(thisParam['description']) + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(brTrim(thisParam['description']), layer) if 'type' in thisParam['items']: - rc += " of type " + thisParam['items']['type']# + rc += " of type " + thisParam['items']['type'] + "\n" else: if len(thisParam['items']) == 0: - rc += "anonymous json object" + rc += "anonymous json object\n" else: try: subStructRef = getReference(thisParam['items'], reference, None) except: print >>sys.stderr, "while analyzing: " + param print >>sys.stderr, thisParam - rc += "\n
      " - rc += unwrapPostJson(subStructRef, layer + 1) - rc += "
    " - rc += '
  • ' + rc += "\n" + unwrapPostJson(subStructRef, layer + 1) else: - rc += ' ' * layer + "
  • " + param + ": " + thisParam['description'] + '
  • ' + rc += ' ' * layer + " - **" + param + "**: " + TrimThisParam(thisParam['description'], layer) + '\n' return rc @@ -1299,12 +1280,14 @@ for route in swagger['paths'].keys(): if thisVerb['parameters'][nParam]['in'] == 'body': descOffset = thisVerb['parameters'][nParam]['x-description-offset'] addText = '' - postText = thisVerb['description'][:descOffset] + postText = '' + paramDesc = thisVerb['description'][:descOffset] + if len(paramDesc) > 0: + postText += paramDesc if 'additionalProperties' in thisVerb['parameters'][nParam]['schema']: - addText = "free style json body" + addText = "\nfree style json body\n\n" else: - addText = "
      " + unwrapPostJson( - getReference(thisVerb['parameters'][nParam]['schema'], route, verb),0) + "
    " + addText = "\n" + unwrapPostJson(getReference(thisVerb['parameters'][nParam]['schema'], route, verb),1) + "\n\n" postText += addText postText += thisVerb['description'][descOffset:] @@ -1315,30 +1298,34 @@ for route in swagger['paths'].keys(): if 'responses' in thisVerb: for nRC in thisVerb['responses']: if 'x-description-offset' in thisVerb['responses'][nRC]: - descOffset = thisVerb['responses'][nRC]['x-description-offset'] #print descOffset #print offsetPlus descOffset += offsetPlus - addText = '' + addText = '\n##HTTP ' + nRC #print thisVerb['responses'][nRC]['description'] postText = thisVerb['description'][:descOffset] #print postText + replyDescription = TrimThisParam(thisVerb['responses'][nRC]['description'], 0) + if (len(replyDescription) > 0): + addText += '\n' + replyDescription + '\n' if 'additionalProperties' in thisVerb['responses'][nRC]['schema']: - addText = "free style json body" + addText += "\n free style json body\n" else: - addText = "
      " + unwrapPostJson( - getReference(thisVerb['responses'][nRC]['schema'], route, verb),0) + "
    " + addText += "\n" + unwrapPostJson( + getReference(thisVerb['responses'][nRC]['schema'], route, verb),0) + '\n' #print addText postText += addText - postText += thisVerb['responses'][nRC]['description'][descOffset:] + postText += thisVerb['description'][descOffset:] offsetPlus += len(addText) thisVerb['description'] = postText + #print '-'*80 + #print thisVerb['description'] # Append the examples to the description: if 'x-examples' in thisVerb and len(thisVerb['x-examples']) > 0: - thisVerb['description'] += '
    ' + thisVerb['description'] += '\n' for nExample in range(0, len(thisVerb['x-examples'])): thisVerb['description'] += thisVerb['x-examples'][nExample] thisVerb['x-examples'] = []# todo unset! From 8d31426eb3451a522bfa1613ca80c2919875f377 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Wed, 9 Dec 2015 00:00:49 +0100 Subject: [PATCH 17/22] Move python scripts into the scripts directory --- Documentation/Books/Makefile | 12 +++--------- Documentation/{Books => Scripts}/codeBlockReader.py | 0 Documentation/{Books => Scripts}/deprecated.py | 0 Documentation/{Books => Scripts}/generateMdFiles.py | 0 4 files changed, 3 insertions(+), 9 deletions(-) rename Documentation/{Books => Scripts}/codeBlockReader.py (100%) rename Documentation/{Books => Scripts}/deprecated.py (100%) rename Documentation/{Books => Scripts}/generateMdFiles.py (100%) diff --git a/Documentation/Books/Makefile b/Documentation/Books/Makefile index d7e0885b85..2ddec76eba 100644 --- a/Documentation/Books/Makefile +++ b/Documentation/Books/Makefile @@ -137,7 +137,7 @@ build-book: echo cp $${WD}/$${pic} $${pic}; \ cp $${WD}/$${pic} $${pic}; \ done - python generateMdFiles.py $(NAME) ppbooks/ ../../js/apps/system/_admin/aardvark/APP/api-docs.json $(FILTER) + python ../Scripts/generateMdFiles.py $(NAME) ppbooks/ ../../js/apps/system/_admin/aardvark/APP/api-docs.json $(FILTER) cd ppbooks/$(NAME) && sed -i -e 's/VERSION_NUMBER/v$(newVersionNumber)/g' styles/header.js cd ppbooks/$(NAME) && sed -i -e 's/VERSION_NUMBER/v$(newVersionNumber)/g' README.md @@ -154,7 +154,7 @@ build-book: cd ppbooks/$(NAME) && gitbook install cd ppbooks/$(NAME) && gitbook build ./ ./../../books/$(NAME) - python deprecated.py + python ../Scripts/deprecated.py make book-check-markdown-leftovers @@ -183,8 +183,6 @@ check-docublocks: grep -v ppbook |\ grep -v allComments.txt |\ grep -v Makefile |\ - grep -v codeBlockReader.py |\ - grep -v generateMdFiles.py |\ grep -v '.*~:.*' |\ grep -v '.*#.*:.*' \ > /tmp/rawindoc.txt @@ -192,8 +190,6 @@ check-docublocks: grep -v ppbook |\ grep -v allComments.txt |\ grep -v Makefile |\ - grep -v codeBlockReader.py |\ - grep -v generateMdFiles.py |\ grep -v '.*~:.*' |\ grep -v '.*#.*:.*' \ >> /tmp/rawindoc.txt @@ -204,8 +200,6 @@ check-docublocks: grep -v ppbook |\ grep -v allComments.txt |\ grep -v Makefile |\ - grep -v codeBlockReader.py |\ - grep -v generateMdFiles.py |\ grep -v '.*~:.*' |\ grep -v '.*#.*:.*' \ >> /tmp/rawinprog.txt @@ -248,7 +242,7 @@ clean: clean-intermediate build-books-keep-md: @test -d books || mkdir books - python codeBlockReader.py + python ../Scripts/codeBlockReader.py make build-book NAME=Users build-books: clean-intermediate build-books-keep-md check-docublocks diff --git a/Documentation/Books/codeBlockReader.py b/Documentation/Scripts/codeBlockReader.py similarity index 100% rename from Documentation/Books/codeBlockReader.py rename to Documentation/Scripts/codeBlockReader.py diff --git a/Documentation/Books/deprecated.py b/Documentation/Scripts/deprecated.py similarity index 100% rename from Documentation/Books/deprecated.py rename to Documentation/Scripts/deprecated.py diff --git a/Documentation/Books/generateMdFiles.py b/Documentation/Scripts/generateMdFiles.py similarity index 100% rename from Documentation/Books/generateMdFiles.py rename to Documentation/Scripts/generateMdFiles.py From f13ffb4dffcdfe9670a54107cf0c9fc889fc9452 Mon Sep 17 00:00:00 2001 From: jsteemann Date: Wed, 9 Dec 2015 00:21:55 +0100 Subject: [PATCH 18/22] jslint --- js/server/modules/org/arangodb/aql.js | 25 ++++++++++++++++++------- js/server/tests/aql-queries-geo.js | 2 -- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/js/server/modules/org/arangodb/aql.js b/js/server/modules/org/arangodb/aql.js index 229e87ea61..a17ff908b4 100644 --- a/js/server/modules/org/arangodb/aql.js +++ b/js/server/modules/org/arangodb/aql.js @@ -4039,7 +4039,12 @@ function AQL_WITHIN_RECTANGLE (collection, latitude1, longitude1, latitude2, lon return null; } - return COLLECTION(collection, "WITHIN_RECTANGLE").withinRectangle(latitude1, longitude1, latitude2, longitude2).toArray(); + return COLLECTION(collection, "WITHIN_RECTANGLE").withinRectangle( + latitude1, + longitude1, + latitude2, + longitude2 + ).toArray(); } //////////////////////////////////////////////////////////////////////////////// @@ -6916,14 +6921,17 @@ function AQL_GRAPH_SHORTEST_PATH (graphName, let startVertices; if (options.hasOwnProperty("startVertexCollectionRestriction") && Array.isArray(options.startVertexCollectionRestriction)) { - startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", options.startVertexCollectionRestriction, startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE( + "GRAPH_SHORTEST_PATH", options.startVertexCollectionRestriction, startVertexExample); } else if (options.hasOwnProperty("startVertexCollectionRestriction") && typeof options.startVertexCollectionRestriction === 'string') { - startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", [ options.startVertexCollectionRestriction ], startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", + [ options.startVertexCollectionRestriction ], startVertexExample); } else { - startVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", vertexCollections, startVertexExample); + startVertices = DOCUMENT_IDS_BY_EXAMPLE( + "GRAPH_SHORTEST_PATH", vertexCollections, startVertexExample); } if (startVertices.length === 0) { return []; @@ -6932,14 +6940,17 @@ function AQL_GRAPH_SHORTEST_PATH (graphName, let endVertices; if (options.hasOwnProperty("endVertexCollectionRestriction") && Array.isArray(options.endVertexCollectionRestriction)) { - endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", options.endVertexCollectionRestriction, endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE( + "GRAPH_SHORTEST_PATH", options.endVertexCollectionRestriction, endVertexExample); } else if (options.hasOwnProperty("endVertexCollectionRestriction") && typeof options.endVertexCollectionRestriction === 'string') { - endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", [ options.endVertexCollectionRestriction ], endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE( + "GRAPH_SHORTEST_PATH", [ options.endVertexCollectionRestriction ], endVertexExample); } else { - endVertices = DOCUMENT_IDS_BY_EXAMPLE("GRAPH_SHORTEST_PATH", vertexCollections, endVertexExample); + endVertices = DOCUMENT_IDS_BY_EXAMPLE( + "GRAPH_SHORTEST_PATH", vertexCollections, endVertexExample); } if (endVertices.length === 0) { return []; diff --git a/js/server/tests/aql-queries-geo.js b/js/server/tests/aql-queries-geo.js index 4880206db6..32960c8081 100644 --- a/js/server/tests/aql-queries-geo.js +++ b/js/server/tests/aql-queries-geo.js @@ -245,8 +245,6 @@ function ahuacatlGeoTestSuite () { //////////////////////////////////////////////////////////////////////////////// testInvalidCollectionArgument : function () { - var cluster = require("org/arangodb/cluster"); - assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(1234, 0, 0, 10)"); assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(false, 0, 0, 10)"); assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN WITHIN(true, 0, 0, 10)"); From 209f4a6d3ccd98c53698be53f4bb61bb5337f4d0 Mon Sep 17 00:00:00 2001 From: jsteemann Date: Wed, 9 Dec 2015 00:25:04 +0100 Subject: [PATCH 19/22] activate travis tests for 2.8 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 334c18824c..70f0498852 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,7 @@ branches: - "2.5" - "2.6" - "2.7" + - "2.8" language: cpp compiler: g++ From c99f4244c70c2268f0ffbbcfc4e70d28fd7ecfb7 Mon Sep 17 00:00:00 2001 From: jsteemann Date: Wed, 9 Dec 2015 00:26:18 +0100 Subject: [PATCH 20/22] bugfix for cluster edges API --- arangod/Cluster/ClusterMethods.cpp | 42 +++++++++++++++++------------- lib/Rest/HttpResponse.cpp | 1 + 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/arangod/Cluster/ClusterMethods.cpp b/arangod/Cluster/ClusterMethods.cpp index e69af33c03..a5f87fe3b8 100644 --- a/arangod/Cluster/ClusterMethods.cpp +++ b/arangod/Cluster/ClusterMethods.cpp @@ -1230,13 +1230,13 @@ int getAllDocumentsOnCoordinator ( //////////////////////////////////////////////////////////////////////////////// int getAllEdgesOnCoordinator ( - string const& dbname, - string const& collname, - string const& vertex, + std::string const& dbname, + std::string const& collname, + std::string const& vertex, TRI_edge_direction_e const& direction, triagens::rest::HttpResponse::HttpResponseCode& responseCode, - string& contentType, - string& resultBody ) { + std::string& contentType, + std::string& resultBody) { triagens::basics::Json result(triagens::basics::Json::Object); std::vector expTmp; int res = getFilteredEdgesOnCoordinator(dbname, collname, vertex, direction, expTmp, responseCode, contentType, result); @@ -1245,14 +1245,14 @@ int getAllEdgesOnCoordinator ( } int getFilteredEdgesOnCoordinator ( - string const& dbname, - string const& collname, - string const& vertex, + std::string const& dbname, + std::string const& collname, + std::string const& vertex, TRI_edge_direction_e const& direction, std::vector const& expressions, triagens::rest::HttpResponse::HttpResponseCode& responseCode, - string& contentType, - triagens::basics::Json& result ) { + std::string& contentType, + triagens::basics::Json& result) { TRI_ASSERT(result.isObject()); TRI_ASSERT(result.members() == 0); @@ -1268,8 +1268,8 @@ int getFilteredEdgesOnCoordinator ( ClusterCommResult* res; - map shards = collinfo->shardIds(); - map::iterator it; + std::map shards = collinfo->shardIds(); + std::map::iterator it; CoordTransactionID coordTransactionID = TRI_NewTickServer(); std::string queryParameters = "?vertex=" + StringUtils::urlEncode(vertex); if (direction == TRI_EDGE_IN) { @@ -1289,7 +1289,7 @@ int getFilteredEdgesOnCoordinator ( reqBodyString->append(body.toString()); } for (it = shards.begin(); it != shards.end(); ++it) { - map* headers = new map; + std::map* headers = new std::map; res = cc->asyncRequest("", coordTransactionID, "shard:" + it->first, triagens::rest::HttpRequest::HTTP_REQUEST_PUT, "/_db/" + StringUtils::urlEncode(dbname) + "/_api/edges/" + it->first + queryParameters, @@ -1312,19 +1312,27 @@ int getFilteredEdgesOnCoordinator ( cc->drop( "", coordTransactionID, 0, ""); return TRI_ERROR_CLUSTER_TIMEOUT; } - if (res->status == CL_COMM_ERROR || res->status == CL_COMM_DROPPED || - res->answer_code == triagens::rest::HttpResponse::NOT_FOUND) { + if (res->status == CL_COMM_ERROR || res->status == CL_COMM_DROPPED) { delete res; cc->drop( "", coordTransactionID, 0, ""); return TRI_ERROR_INTERNAL; } - + if (res->status == CL_COMM_RECEIVED) { + } + std::unique_ptr shardResult(TRI_JsonString(TRI_UNKNOWN_MEM_ZONE, res->answer->body())); if (shardResult == nullptr || ! TRI_IsObjectJson(shardResult.get())) { delete res; return TRI_ERROR_INTERNAL; } + + bool const isError = triagens::basics::JsonHelper::checkAndGetBooleanValue(shardResult.get(), "error"); + if (isError) { + // shared returned an error + delete res; + return triagens::basics::JsonHelper::getNumericValue(shardResult.get(), "errorNum", TRI_ERROR_INTERNAL); + } auto docs = TRI_LookupObjectJson(shardResult.get(), "edges"); @@ -1364,8 +1372,6 @@ int getFilteredEdgesOnCoordinator ( return TRI_ERROR_NO_ERROR; } - - //////////////////////////////////////////////////////////////////////////////// /// @brief modify a document in a coordinator //////////////////////////////////////////////////////////////////////////////// diff --git a/lib/Rest/HttpResponse.cpp b/lib/Rest/HttpResponse.cpp index 5658d25924..1c47ac71d6 100644 --- a/lib/Rest/HttpResponse.cpp +++ b/lib/Rest/HttpResponse.cpp @@ -213,6 +213,7 @@ HttpResponse::HttpResponseCode HttpResponse::responseCode (int code) { case TRI_ERROR_ARANGO_DOCUMENT_KEY_BAD: case TRI_ERROR_ARANGO_DOCUMENT_KEY_UNEXPECTED: case TRI_ERROR_ARANGO_DOCUMENT_TYPE_INVALID: + case TRI_ERROR_ARANGO_DOCUMENT_HANDLE_BAD: case TRI_ERROR_CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES: case TRI_ERROR_CLUSTER_MUST_NOT_SPECIFY_KEY: case TRI_ERROR_TYPE_ERROR: From 7042542548aadd538436c40d8203dac38ed40643 Mon Sep 17 00:00:00 2001 From: Michael Hackstein Date: Wed, 9 Dec 2015 09:52:01 +0100 Subject: [PATCH 21/22] Fixed memleak for cluster Traverser edge data --- arangod/Cluster/ClusterTraverser.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/arangod/Cluster/ClusterTraverser.h b/arangod/Cluster/ClusterTraverser.h index 2d394bf32b..f59a318634 100644 --- a/arangod/Cluster/ClusterTraverser.h +++ b/arangod/Cluster/ClusterTraverser.h @@ -66,6 +66,9 @@ namespace triagens { for (auto& it : _vertices) { TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, it.second); } + for (auto& it : _edges) { + TRI_FreeJson(TRI_UNKNOWN_MEM_ZONE, it.second); + } } void setStartVertex (VertexId const& v) override; From aca6c2421b5e58a3e5e98ec765656c4cbb40df81 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Wed, 9 Dec 2015 10:17:08 +0100 Subject: [PATCH 22/22] Add crosslink to CombiningGraphTraversals.mdpp --- Documentation/Books/Users/Aql/GraphTraversals.mdpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Documentation/Books/Users/Aql/GraphTraversals.mdpp b/Documentation/Books/Users/Aql/GraphTraversals.mdpp index 27409866fe..7916bbf880 100644 --- a/Documentation/Books/Users/Aql/GraphTraversals.mdpp +++ b/Documentation/Books/Users/Aql/GraphTraversals.mdpp @@ -273,3 +273,5 @@ And finally clean it up again: If this traversal is not powerful enough for your needs, so you cannot describe your conditions as AQL filter statements you might want to look at [manually crafted traverser](../Traversals/README.md). + +[See here for more traversal examples](../AqlExamples/CombiningGraphTraversals.md).