From c2c574d828dbc20ba3502ea3e555952b8313b9e9 Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Thu, 16 Jan 2014 14:08:28 +0100 Subject: [PATCH 01/21] added missing encoding --- js/server/version-check.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 js/server/version-check.js diff --git a/js/server/version-check.js b/js/server/version-check.js old mode 100644 new mode 100755 index 01680b2be3..e9b01fcf55 --- a/js/server/version-check.js +++ b/js/server/version-check.js @@ -416,7 +416,7 @@ "do": "org/arangodb/actions/redirectRequest", options: { permanently: true, - destination: "/_db/" + db._name() + "/_admin/aardvark/index.html" + destination: "/_db/" + encodeURIComponent(db._name()) + "/_admin/aardvark/index.html" } }, priority: -1000000 From 87009c7e6e014ae89340bee085d18b3a89c65634 Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Thu, 16 Jan 2014 14:08:45 +0100 Subject: [PATCH 02/21] added missing db prefix --- js/server/modules/org/arangodb/actions.js | 74 +++++++++++-------- .../modules/org/arangodb/foxx/manager.js | 3 +- 2 files changed, 47 insertions(+), 30 deletions(-) mode change 100644 => 100755 js/server/modules/org/arangodb/actions.js mode change 100644 => 100755 js/server/modules/org/arangodb/foxx/manager.js diff --git a/js/server/modules/org/arangodb/actions.js b/js/server/modules/org/arangodb/actions.js old mode 100644 new mode 100755 index b0994a5546..420e9201bf --- a/js/server/modules/org/arangodb/actions.js +++ b/js/server/modules/org/arangodb/actions.js @@ -1487,13 +1487,46 @@ function resultUnsupported (req, res, headers) { /// @brief internal function for handling redirects //////////////////////////////////////////////////////////////////////////////// -function handleRedirect (req, res, destination, headers) { +function handleRedirect (req, res, options, headers) { + var destination; + var url; + + destination = options.destination; + + if (destination.substr(0,5) !== "http:" && destination.substr(0,6) !== "https:") { + url = req.protocol + "://"; + + if (req.headers.hasOwnProperty('host')) { + url += req.headers.host; + } + else { + url += req.server.address + ":" + req.server.port; + } + + if (options.relative) { + var u = req.url; + + if (0 < u.length && u[u.length - 1] === '/') { + url += "/_db/" + encodeURIComponent(req.database) + u + destination; + } + else { + url += "/_db/" + encodeURIComponent(req.database) + u + "/" + destination; + } + } + else { + url += destination; + } + } + else { + url = destination; + } + res.contentType = "text/html"; res.body = "Moved" + "

Moved

This page has moved to " - + destination + + url + ".

"; if (headers !== undefined) { @@ -1503,56 +1536,39 @@ function handleRedirect (req, res, destination, headers) { res.headers = {}; } - res.headers.location = destination; + res.headers.location = url; } //////////////////////////////////////////////////////////////////////////////// /// @brief generates a permanently redirect /// -/// @FUN{actions.resultPermanentRedirect(@FA{req}, @FA{res}, @FA{destination}, @FA{headers})} +/// @FUN{actions.resultPermanentRedirect(@FA{req}, @FA{res}, @FA{options}, @FA{headers})} /// /// The function generates a redirect response. //////////////////////////////////////////////////////////////////////////////// -function resultPermanentRedirect (req, res, destination, headers) { +function resultPermanentRedirect (req, res, options, headers) { 'use strict'; res.responseCode = exports.HTTP_MOVED_PERMANENTLY; - if (destination.substr(0,5) !== "http:" && destination.substr(0,6) !== "https:") { - if (req.headers.hasOwnProperty('host')) { - destination = req.protocol - + "://" - + req.headers.host - + destination; - } - else { - destination = req.protocol - + "://" - + req.server.address - + ":" - + req.server.port - + destination; - } - } - - handleRedirect(req, res, destination, headers); + handleRedirect(req, res, options, headers); } //////////////////////////////////////////////////////////////////////////////// /// @brief generates a temporary redirect /// -/// @FUN{actions.resultTemporaryRedirect(@FA{req}, @FA{res}, @FA{destination}, @FA{headers})} +/// @FUN{actions.resultTemporaryRedirect(@FA{req}, @FA{res}, @FA{options}, @FA{headers})} /// /// The function generates a redirect response. //////////////////////////////////////////////////////////////////////////////// -function resultTemporaryRedirect (req, res, destination, headers) { +function resultTemporaryRedirect (req, res, options, headers) { 'use strict'; res.responseCode = exports.HTTP_TEMPORARY_REDIRECT; - handleRedirect(req, res, destination, headers); + handleRedirect(req, res, options, headers); } // ----------------------------------------------------------------------------- @@ -1867,10 +1883,10 @@ function redirectRequest (req, res, options, next) { 'use strict'; if (options.permanently) { - resultPermanentRedirect(req, res, options.destination); + resultPermanentRedirect(req, res, options); } else { - resultTemporaryRedirect(req, res, options.destination); + resultTemporaryRedirect(req, res, options); } } diff --git a/js/server/modules/org/arangodb/foxx/manager.js b/js/server/modules/org/arangodb/foxx/manager.js old mode 100644 new mode 100755 index 14f208c226..5a420997bf --- a/js/server/modules/org/arangodb/foxx/manager.js +++ b/js/server/modules/org/arangodb/foxx/manager.js @@ -642,7 +642,8 @@ function routingAalApp (app, mount, options) { "do" : "org/arangodb/actions/redirectRequest", "options" : { "permanently" : (app._id.substr(0,4) !== 'dev'), - "destination" : p + defaultDocument + "destination" : defaultDocument, + "relative" : true } } }); From d0c05f662838722579f9e5c10327e9b1ea52785d Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Thu, 16 Jan 2014 14:16:47 +0100 Subject: [PATCH 03/21] fixed issue #734: foxx cookie and route problem --- CHANGELOG | 4 ++++ js/server/modules/org/arangodb/foxx/authentication.js | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) mode change 100644 => 100755 js/server/modules/org/arangodb/foxx/authentication.js diff --git a/CHANGELOG b/CHANGELOG index 95c01232d4..22ed2b851f 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,10 +1,13 @@ v1.4.6 (XXXX-XX-XX) ------------------- +* fixed issue #734: foxx cookie and route problem + * added method `fm.configJson` for arangosh * include `startupPath` in result of API `/_api/foxx/config` + v1.4.5 (2014-01-15) ------------------- @@ -30,6 +33,7 @@ v1.4.5 (2014-01-15) * added override file "arangod.conf.local" (and co) + v1.4.4 (2013-12-24) ------------------- diff --git a/js/server/modules/org/arangodb/foxx/authentication.js b/js/server/modules/org/arangodb/foxx/authentication.js old mode 100644 new mode 100755 index 7d0277da76..12cf55eb40 --- a/js/server/modules/org/arangodb/foxx/authentication.js +++ b/js/server/modules/org/arangodb/foxx/authentication.js @@ -1017,7 +1017,7 @@ CookieAuthentication = function (applicationContext, options) { this._options = { name: options.name || this._applicationContext.name + "-session", lifetime: options.lifetime || 3600, - path: options.path || this._applicationContext.mount, + path: options.path || "/", domain: options.path || undefined, secure: options.secure || false, httpOnly: options.httpOnly || false From d7c3ac13c9241365fea4768c8a25387ca35a16b5 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Thu, 16 Jan 2014 17:17:59 +0100 Subject: [PATCH 04/21] fixed compile warning --- lib/BasicsC/files.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/BasicsC/files.c b/lib/BasicsC/files.c index c1ad516ae9..c4d4d5ffba 100644 --- a/lib/BasicsC/files.c +++ b/lib/BasicsC/files.c @@ -281,7 +281,7 @@ static void ListTreeRecursively (char const* full, /// @brief locates a environment given configuration directory //////////////////////////////////////////////////////////////////////////////// -static char* LocateConfigDirectoryEnv () { +static char* LocateConfigDirectoryEnv (void) { char const* v; char* r; From 0ddf9996d848c0d739dd853209f4fd29c2a0fed7 Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Fri, 17 Jan 2014 11:33:31 +0100 Subject: [PATCH 05/21] fix indentation --- Documentation/InstallationManual/InstallingTOC.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Documentation/InstallationManual/InstallingTOC.md b/Documentation/InstallationManual/InstallingTOC.md index 7fc2c994f2..568fd16578 100644 --- a/Documentation/InstallationManual/InstallingTOC.md +++ b/Documentation/InstallationManual/InstallingTOC.md @@ -3,8 +3,8 @@ TOC {#InstallingTOC} - @ref Installing - @ref InstallingLinux - - @ref InstallingLinuxPackageManager - - @ref InstallingDebian + - @ref InstallingLinuxPackageManager + - @ref InstallingDebian - @ref InstallingMacOSX - @ref InstallingMacOSXHomebrew - @ref InstallingMacOSXAppStore From c7380398d9047d5f3e602eabb4026156a88ca34e Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Fri, 17 Jan 2014 14:04:10 +0100 Subject: [PATCH 06/21] added fm.rescan() method --- CHANGELOG | 2 ++ js/actions/api-foxx.js | 18 ++++++++++++++++++ js/client/modules/org/arangodb/foxx/manager.js | 17 +++++++++++++++++ js/server/modules/org/arangodb/foxx/manager.js | 11 +++++++++++ 4 files changed, 48 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index 22ed2b851f..a8fc04e555 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,6 +1,8 @@ v1.4.6 (XXXX-XX-XX) ------------------- +* added fm.rescan() method for Foxx-Manager + * fixed issue #734: foxx cookie and route problem * added method `fm.configJson` for arangosh diff --git a/js/actions/api-foxx.js b/js/actions/api-foxx.js index 00c8eaff14..b6d5fde6ed 100644 --- a/js/actions/api-foxx.js +++ b/js/actions/api-foxx.js @@ -145,6 +145,24 @@ actions.defineHttp({ } }) }); + +//////////////////////////////////////////////////////////////////////////////// +/// @brief rescans the FOXX application directory +//////////////////////////////////////////////////////////////////////////////// + +actions.defineHttp({ + url : "_admin/foxx/rescan", + context : "admin", + prefix : false, + + callback: easyPostCallback({ + body: true, + callback: function (body) { + foxxManager.scanAppDirectory(); + return true; + } + }) +}); //////////////////////////////////////////////////////////////////////////////// /// @brief sets up a FOXX application diff --git a/js/client/modules/org/arangodb/foxx/manager.js b/js/client/modules/org/arangodb/foxx/manager.js index c085430a44..0b4659a438 100644 --- a/js/client/modules/org/arangodb/foxx/manager.js +++ b/js/client/modules/org/arangodb/foxx/manager.js @@ -684,6 +684,9 @@ exports.run = function (args) { exports.mount(args[1], args[2]); } } + else if (type === 'rescan') { + exports.rescan(); + } else if (type === 'setup') { exports.setup(args[1]); } @@ -822,6 +825,18 @@ exports.fetch = function (type, location, version) { return arangosh.checkRequestResult(res); }; +//////////////////////////////////////////////////////////////////////////////// +/// @brief rescans the FOXX application directory +//////////////////////////////////////////////////////////////////////////////// + +exports.rescan = function () { + 'use strict'; + + var res = arango.POST("/_admin/foxx/rescan", ""); + + return arangosh.checkRequestResult(res); +}; + //////////////////////////////////////////////////////////////////////////////// /// @brief mounts a FOXX application //////////////////////////////////////////////////////////////////////////////// @@ -1436,6 +1451,8 @@ exports.help = function () { "setup" : "setup executes the setup script (app must already be mounted)", "install" : "fetches a foxx application from the central foxx-apps repository, mounts it to a local URL " + "and sets it up", + "rescan" : "rescans the foxx application directory on the server side (only needed if server-side apps " + + "directory is modified by other processes)", "replace" : "replaces an aleady existing foxx application with the current local version", "teardown" : "teardown execute the teardown script (app must be still be mounted)", "unmount" : "unmounts a mounted foxx application", diff --git a/js/server/modules/org/arangodb/foxx/manager.js b/js/server/modules/org/arangodb/foxx/manager.js index 5a420997bf..149f9ae471 100755 --- a/js/server/modules/org/arangodb/foxx/manager.js +++ b/js/server/modules/org/arangodb/foxx/manager.js @@ -823,6 +823,17 @@ exports.scanAppDirectory = function () { scanDirectory(module.appPath()); }; +//////////////////////////////////////////////////////////////////////////////// +/// @brief rescans the FOXX application directory +/// this function is a trampoline for scanAppDirectory +/// the shorter function name is only here to keep compatibility with the +/// client-side foxx manager +//////////////////////////////////////////////////////////////////////////////// + +exports.rescan = function () { + return exports.scanAppDirectory(); +}; + //////////////////////////////////////////////////////////////////////////////// /// @brief mounts a FOXX application /// From 622edb0fee9d2dab81ade7fbe221730aab62eea4 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Fri, 17 Jan 2014 17:10:06 +0100 Subject: [PATCH 07/21] updated documentation --- lib/Admin/RestJobHandler.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/Admin/RestJobHandler.cpp b/lib/Admin/RestJobHandler.cpp index 4f005d83bf..25c1dffb22 100644 --- a/lib/Admin/RestJobHandler.cpp +++ b/lib/Admin/RestJobHandler.cpp @@ -413,15 +413,12 @@ void RestJobHandler::getJob () { /// /// @RESTURLPARAM{type,string,required} /// The type of jobs to delete. `type` can be: -/// /// - `all`: deletes all jobs results. Currently executing or queued async jobs /// will not be stopped by this call. -/// /// - `expired`: deletes expired results. To determine the expiration status of /// a result, pass the `stamp` URL parameter. `stamp` needs to be a UNIX /// timestamp, and all async job results created at a lower timestamp will be /// deleted. -/// /// - an actual job-id: in this case, the call will remove the result of the /// specified async job. If the job is currently executing or queued, it will /// not be aborted. From 583878176b6b9a0df24949160ac5851c3ab79ee1 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Fri, 17 Jan 2014 22:39:04 +0100 Subject: [PATCH 08/21] issue #736: AQL function to parse collection and key from document handle Conflicts: CHANGELOG --- CHANGELOG | 111 +++++++++++++++++++++ Documentation/UserManual/Aql.md | 13 +++ arangod/Ahuacatl/ahuacatl-functions.c | 1 + js/server/modules/org/arangodb/ahuacatl.js | 31 ++++++ js/server/tests/ahuacatl-functions.js | 84 ++++++++++++++++ 5 files changed, 240 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index a8fc04e555..e7c8db9a56 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,6 +1,117 @@ +v1.5.0 (XXXX-XX-XX) +------------------- + +* issue #738: added __dirname, __filename pseudo-globals. Fixes #733. (@by pluma) + +* allow `\n` (as well as `\r\n`) as line terminator in batch requests sent to + `/_api/batch` HTTP API. + +* use `--data-binary` instead of `--data` parameter in generated cURL examples + +* issue #703: Also show path of logfile for fm.config() + +* issue #675: Dropping a collection used in "graph" module breaks the graph + +* added "static" Graph.drop() method for graphs API + +* fixed issue #695: arangosh server.password error + +* use pretty-printing in `--console` mode by defaul + +* added `check-server` binary for testing + +* simplified ArangoDB startup options + + Some startup options are now superfluous or their usage is simplified. The + following options have been changed: + + * `--javascript.modules-path`: this option has been removed. The modules paths + are determined by arangod and arangosh automatically based on the value of + `--javascript.startup-directory`. + + If the option is set on startup, it is ignored so startup will not abort with + an error `unrecognized option`. + + * `--javascript.action-directory`: this option has been removed. The actions + directory is determined by arangod automatically based on the value of + `--javascript.startup-directory`. + + If the option is set on startup, it is ignored so startup will not abort with + an error `unrecognized option`. + + * `--javascript.package-path`: this option is still available but it is not + required anymore to set the standard package paths (e.g. `js/npm`). arangod + will automatically use this standard package path regardless of whether it + was specified via the options. + + It is possible to use this option to add additional package paths to the + standard value. + + Configuration files included with arangod are adjusted accordingly. + +* layout of the graphs tab adapted to better fit with the other tabs + +* database selection is moved to the bottom right corner of the web interface + +* removed priority queues + + this feature was never advertised nor documented nor tested. + +* display internal attributes in document source view of web interface + +* removed separate shape collections + + When upgrading to ArangoDB 1.5, existing collections will be converted to include + shapes and attribute markers in the datafiles instead of using separate files for + shapes. + + When a collection is converted, existing shapes from the SHAPES directory will + be written to a new datafile in the collection directory, and the SHAPES directory + will be removed afterwards. + + This saves up to 2 MB of memory and disk space for each collection + (savings are higher, the less different shapes there are in a collection). + Additionally, one less file descriptor per opened collection will be used. + + When creating a new collection, the amount of sync calls may be reduced. The same + may be true for documents with yet-unknown shapes. This may help performance + in these cases. + +* added AQL functions `NTH` and `POSITION` + +* added signal handler for arangosh to save last command in more cases + +* added extra prompt placeholders for arangosh: + - `%e`: current endpoint + - `%u`: current user + +* added arangosh option `--javascript.gc-interval` to control amount of + garbage collection performed by arangosh + +* fixed issue #651: Allow addEdge() to take vertex ids in the JS library + +* removed command-line option `--log.format` + + In previous versions, this option did not have an effect for most log messages, so + it got removed. + +* removed C++ logger implementation + + Logging inside ArangoDB is now done using the LOG_XXX() macros. The LOGGER_XXX() + macros are gone. + +* added collection status "loading" + +* added the option to return the number of elements indexed to the + result of .getIndexes() for each index. This is + currently only implemented for hash indices and skiplist indices. + + v1.4.6 (XXXX-XX-XX) ------------------- +* issue #736: AQL function to parse collection and key from document handle + * added fm.rescan() method for Foxx-Manager * fixed issue #734: foxx cookie and route problem diff --git a/Documentation/UserManual/Aql.md b/Documentation/UserManual/Aql.md index 5d80b71d38..07d54e00dc 100644 --- a/Documentation/UserManual/Aql.md +++ b/Documentation/UserManual/Aql.md @@ -1249,6 +1249,19 @@ AQL supports the following functions to operate on document values: RETURN KEEP(doc, 'firstname', 'name', 'likes') +- @FN{PARSE_IDENTIFIER(@FA{document-handle})}: parses the document handle specified in + @FA{document-handle} and returns a the handle's individual parts a separate attributes. + This function can be used to easily determine the collection name and key from a given document. + The @FA{document-handle} can either be a regular document from a collection, or a document + identifier string (e.g. `_users/1234`). Passing either a non-string or a non-document or a + document without an `_id` attribute will result in an error. + + RETURN PARSE_IDENTIFIER('_users/my-user') + [ { "collection" : "_users", "key" : "my-user" } ] + + RETURN PARSE_IDENTIFIER({ "_id" : "mycollection/mykey", "value" : "some value" }) + [ { "collection" : "mycollection", "key" : "mykey" } ] + @subsubsection AqlFunctionsGeo Geo functions AQL offers the following functions to filter data based on geo indexes: diff --git a/arangod/Ahuacatl/ahuacatl-functions.c b/arangod/Ahuacatl/ahuacatl-functions.c index 1ef9d070ec..c7abace7d0 100644 --- a/arangod/Ahuacatl/ahuacatl-functions.c +++ b/arangod/Ahuacatl/ahuacatl-functions.c @@ -712,6 +712,7 @@ TRI_associative_pointer_t* TRI_CreateFunctionsAql (void) { REGISTER_FUNCTION("NOT_NULL", "NOT_NULL", true, false, ".|+", NULL); REGISTER_FUNCTION("FIRST_LIST", "FIRST_LIST", true, false, ".|+", NULL); REGISTER_FUNCTION("FIRST_DOCUMENT", "FIRST_DOCUMENT", true, false, ".|+", NULL); + REGISTER_FUNCTION("PARSE_IDENTIFIER", "PARSE_IDENTIFIER", true, false, ".", NULL); if (! result) { TRI_FreeFunctionsAql(functions); diff --git a/js/server/modules/org/arangodb/ahuacatl.js b/js/server/modules/org/arangodb/ahuacatl.js index 19e0eb6f4a..50dbd34a72 100644 --- a/js/server/modules/org/arangodb/ahuacatl.js +++ b/js/server/modules/org/arangodb/ahuacatl.js @@ -3217,6 +3217,36 @@ function FIRST_DOCUMENT () { return null; } +//////////////////////////////////////////////////////////////////////////////// +/// @brief return the parts of a document identifier separately +/// +/// returns a document with the attributes `collection` and `key` or fails if +/// the individual parts cannot be determined. +//////////////////////////////////////////////////////////////////////////////// + +function PARSE_IDENTIFIER (value) { + "use strict"; + + if (TYPEWEIGHT(value) === TYPEWEIGHT_STRING) { + var parts = value.split('/'); + if (parts.length === 2) { + return { + collection: parts[0], + key: parts[1] + }; + } + // fall through intentional + } + else if (TYPEWEIGHT(value) === TYPEWEIGHT_DOCUMENT) { + if (value.hasOwnProperty('_id')) { + return PARSE_IDENTIFIER(value._id); + } + // fall through intentional + } + + THROW(INTERNAL.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "PARSE_IDENTIFIER"); +} + //////////////////////////////////////////////////////////////////////////////// /// @brief check whether a document has a specific attribute //////////////////////////////////////////////////////////////////////////////// @@ -4009,6 +4039,7 @@ exports.GRAPH_NEIGHBORS = GRAPH_NEIGHBORS; exports.NOT_NULL = NOT_NULL; exports.FIRST_LIST = FIRST_LIST; exports.FIRST_DOCUMENT = FIRST_DOCUMENT; +exports.PARSE_IDENTIFIER = PARSE_IDENTIFIER; exports.HAS = HAS; exports.ATTRIBUTES = ATTRIBUTES; exports.UNSET = UNSET; diff --git a/js/server/tests/ahuacatl-functions.js b/js/server/tests/ahuacatl-functions.js index 0a5501910c..8abf40d3a8 100644 --- a/js/server/tests/ahuacatl-functions.js +++ b/js/server/tests/ahuacatl-functions.js @@ -1726,6 +1726,90 @@ function ahuacatlFunctionsTestSuite () { assertEqual(expected, actual); }, +//////////////////////////////////////////////////////////////////////////////// +/// @brief test parse identifier function +//////////////////////////////////////////////////////////////////////////////// + + testParseIdentifier : function () { + var actual; + + actual = getQueryResults("RETURN PARSE_IDENTIFIER('foo/bar')"); + assertEqual([ { collection: 'foo', key: 'bar' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER('this-is-a-collection-name/and-this-is-an-id')"); + assertEqual([ { collection: 'this-is-a-collection-name', key: 'and-this-is-an-id' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER('MY_COLLECTION/MY_DOC')"); + assertEqual([ { collection: 'MY_COLLECTION', key: 'MY_DOC' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER('_users/AbC')"); + assertEqual([ { collection: '_users', key: 'AbC' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER({ _id: 'foo/bar', value: 'baz' })"); + assertEqual([ { collection: 'foo', key: 'bar' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER({ ignore: true, _id: '_system/VALUE', value: 'baz' })"); + assertEqual([ { collection: '_system', key: 'VALUE' } ], actual); + + actual = getQueryResults("RETURN PARSE_IDENTIFIER({ value: 123, _id: 'Some-Odd-Collection/THIS_IS_THE_KEY' })"); + assertEqual([ { collection: 'Some-Odd-Collection', key: 'THIS_IS_THE_KEY' } ], actual); + }, + +//////////////////////////////////////////////////////////////////////////////// +/// @brief test parse identifier function +//////////////////////////////////////////////////////////////////////////////// + + testParseIdentifierCollection : function () { + var cn = "UnitTestsAhuacatlFunctions"; + + internal.db._drop(cn); + var cx = internal.db._create(cn); + cx.save({ "title" : "123", "value" : 456, "_key" : "foobar" }); + cx.save({ "_key" : "so-this-is-it", "title" : "nada", "value" : 123 }); + + var expected, actual; + + expected = [ { collection: cn, key: "foobar" } ]; + actual = getQueryResults("RETURN PARSE_IDENTIFIER(DOCUMENT(CONCAT(@cn, '/', @key)))", { cn: cn, key: "foobar" }); + assertEqual(expected, actual); + + expected = [ { collection: cn, key: "foobar" } ]; + actual = getQueryResults("RETURN PARSE_IDENTIFIER(DOCUMENT(CONCAT(@cn, '/', @key)))", { cn: cn, key: "foobar" }); + assertEqual(expected, actual); + + expected = [ { collection: cn, key: "foobar" } ]; + actual = getQueryResults("RETURN PARSE_IDENTIFIER(DOCUMENT(CONCAT(@cn, '/', 'foobar')))", { cn: cn }); + assertEqual(expected, actual); + + expected = [ { collection: cn, key: "foobar" } ]; + actual = getQueryResults("RETURN PARSE_IDENTIFIER(DOCUMENT([ @key ])[0])", { key: "UnitTestsAhuacatlFunctions/foobar" }); + assertEqual(expected, actual); + + expected = [ { collection: cn, key: "so-this-is-it" } ]; + actual = getQueryResults("RETURN PARSE_IDENTIFIER(DOCUMENT([ 'UnitTestsAhuacatlFunctions/so-this-is-it' ])[0])"); + assertEqual(expected, actual); + + internal.db._drop(cn); + }, + +//////////////////////////////////////////////////////////////////////////////// +/// @brief test parse identifier function +//////////////////////////////////////////////////////////////////////////////// + + testParseIdentifier : function () { + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH.code, "RETURN PARSE_IDENTIFIER()"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH.code, "RETURN PARSE_IDENTIFIER('foo', 'bar')"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER(null)"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER(false)"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER(3)"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER(\"foo\")"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER('foo bar')"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER('foo/bar/baz')"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER([ ])"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER({ })"); + assertQueryError(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, "RETURN PARSE_IDENTIFIER({ foo: 'bar' })"); + }, + //////////////////////////////////////////////////////////////////////////////// /// @brief test document function //////////////////////////////////////////////////////////////////////////////// From 5c5b787e835639a0855d0df87cfbe1096ff58d6c Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Sat, 18 Jan 2014 01:22:26 +0100 Subject: [PATCH 09/21] slightly updated error messages --- arangosh/V8Client/arangoimp.cpp | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/arangosh/V8Client/arangoimp.cpp b/arangosh/V8Client/arangoimp.cpp index df331cc388..96bafe932b 100644 --- a/arangosh/V8Client/arangoimp.cpp +++ b/arangosh/V8Client/arangoimp.cpp @@ -304,7 +304,8 @@ int main (int argc, char* argv[]) { BaseClient.sslProtocol(), false); - if (! ClientConnection->isConnected() || ClientConnection->getLastHttpReturnCode() != HttpResponse::OK) { + if (! ClientConnection->isConnected() || + ClientConnection->getLastHttpReturnCode() != HttpResponse::OK) { cerr << "Could not connect to endpoint '" << BaseClient.endpointServer()->getSpecification() << "', database: '" << BaseClient.databaseName() << "'" << endl; cerr << "Error message: '" << ClientConnection->getErrorMessage() << "'" << endl; @@ -359,18 +360,18 @@ int main (int argc, char* argv[]) { // collection name if (CollectionName == "") { - cerr << "collection name is missing." << endl; + cerr << "Collection name is missing." << endl; TRI_EXIT_FUNCTION(EXIT_FAILURE, NULL); } // filename if (FileName == "") { - cerr << "file name is missing." << endl; + cerr << "File name is missing." << endl; TRI_EXIT_FUNCTION(EXIT_FAILURE, NULL); } if (FileName != "-" && ! FileUtils::isRegularFile(FileName)) { - cerr << "file '" << FileName << "' is not a regular file." << endl; + cerr << "Cannot open file '" << FileName << "'" << endl; TRI_EXIT_FUNCTION(EXIT_FAILURE, NULL); } @@ -416,9 +417,6 @@ int main (int argc, char* argv[]) { cerr << "error message: " << ih.getErrorMessage() << endl; } - // calling dispose in V8 3.10.x causes a segfault. the v8 docs says its not necessary to call it upon program termination - // v8::V8::Dispose(); - TRIAGENS_REST_SHUTDOWN; arangoimpExitFunction(ret, NULL); From 259b748e223aad62e00213d310220f73aa774235 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Sat, 18 Jan 2014 01:44:51 +0100 Subject: [PATCH 10/21] some more notes on importing edges and attribute meanings --- Documentation/ToolsManual/ImpManual.md | 50 ++++++++++++++++++++++- Documentation/ToolsManual/ImpManualTOC.md | 2 + 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/Documentation/ToolsManual/ImpManual.md b/Documentation/ToolsManual/ImpManual.md index ee8d0611db..083ca54a5a 100644 --- a/Documentation/ToolsManual/ImpManual.md +++ b/Documentation/ToolsManual/ImpManual.md @@ -52,7 +52,8 @@ specify a password, you will be prompted for one. Note that the collection (`users` in this case) must already exist or the import will fail. If you want to create a new collection with the import data, you need to specify the `--create-collection` option. Note that it is only possible to -create a document collection using the `--create-collection` flag. +create a document collection using the `--create-collection` flag, and no edge +collections. unix> arangoimp --file "data.json" --type json --collection "users" --create-collection true @@ -114,3 +115,50 @@ with the `--separator` argument. An example command line to execute the TSV import is: unix> arangoimp --file "data.tsv" --type tsv --collection "users" + +Importing into an Edge Collection {#ImpManualEdges} +=================================================== + +arangoimp can also be used to import data into an existing edge collection. +The import data must, for each edge to import, contain at least the `_from` and +`_to` attributes. These indicate which other two documents the edge should connect. +It is necessary that these attributes are set for all records, and point to +valid document ids in existing collections. + +Example: + + { "_from" : "users/1234", "_to" : "users/4321", "desc" : "1234 is connected to 4321" } + +Note that the edge collection must already exist when the import is started. Using +the `--create-collection` flag will not work because arangoimp will always try to +create a regular document collection if the target collection does not exist. + +Attribute Naming and Special Attributes {#ImpManualAttributes} +============================================================== + +Attributes whose names start with an underscore are treated in a special way by +ArangoDB: + +- the optional `_key` attribute contains the document's key. If specified, the value + must be formally valid (e.g. must be a string and conform to the naming conventions + for @ref DocumentKeys). Additionally, the key value must be unique within the + collection the import is run for. +- `_from`: when importing into an edge collection, this attribute contains the id + of one of the documents connected by the edge. The value of `_from` must be a + syntactially valid document id and the referred collection must exist. +- `_to`: when importing into an edge collection, this attribute contains the id + of the other document connected by the edge. The value of `_to` must be a + syntactially valid document id and the referred collection must exist. +- `_rev`: this attribute contains the revision number of a document. However, the + revision numbers are managed by ArangoDB and cannot be specified on import. Thus + any value in this attribute is ignored on import. +- all other attributes starting with an underscore are discarded on import without + any warnings. + +If you import values into `_key`, you should make sure they are valid and unique. + +When importing data into an edge collection, you should make sure that all import +documents can `_from` and `_to` and that their values point to existing documents. + +Finally you should make sure that all other attributes in the import file do not +start with an underscore - otherwise they might be discarded. diff --git a/Documentation/ToolsManual/ImpManualTOC.md b/Documentation/ToolsManual/ImpManualTOC.md index 9fc77f0fe6..dd3e2a9fbe 100644 --- a/Documentation/ToolsManual/ImpManualTOC.md +++ b/Documentation/ToolsManual/ImpManualTOC.md @@ -5,3 +5,5 @@ TOC {#ImpManualTOC} - @ref ImpManualJson - @ref ImpManualCsv - @ref ImpManualTsv + - @ref ImpManualEdges + - @ref ImpManualAttributes From 251c2fa44837430f809d4e005197d24918a40da4 Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Mon, 20 Jan 2014 22:09:44 +0100 Subject: [PATCH 11/21] release version 1.4.6 --- CHANGELOG | 111 +------- Makefile.in | 19 +- VERSION | 2 +- aclocal.m4 | 154 ++++++++--- build.h | 2 +- config/config.guess | 30 ++- config/config.sub | 17 +- config/missing | 4 +- configure | 247 ++++++++---------- configure.ac | 2 +- js/apps/system/aardvark/api-docs.json | 2 +- js/apps/system/aardvark/api-docs/batch.json | 2 +- .../system/aardvark/api-docs/database.json | 2 +- .../system/aardvark/api-docs/endpoint.json | 2 +- js/apps/system/aardvark/api-docs/job.json | 6 +- .../system/aardvark/api-docs/replication.json | 10 +- js/apps/system/aardvark/api-docs/system.json | 2 +- 17 files changed, 290 insertions(+), 324 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index e7c8db9a56..a41c2378fe 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,113 +1,4 @@ -v1.5.0 (XXXX-XX-XX) -------------------- - -* issue #738: added __dirname, __filename pseudo-globals. Fixes #733. (@by pluma) - -* allow `\n` (as well as `\r\n`) as line terminator in batch requests sent to - `/_api/batch` HTTP API. - -* use `--data-binary` instead of `--data` parameter in generated cURL examples - -* issue #703: Also show path of logfile for fm.config() - -* issue #675: Dropping a collection used in "graph" module breaks the graph - -* added "static" Graph.drop() method for graphs API - -* fixed issue #695: arangosh server.password error - -* use pretty-printing in `--console` mode by defaul - -* added `check-server` binary for testing - -* simplified ArangoDB startup options - - Some startup options are now superfluous or their usage is simplified. The - following options have been changed: - - * `--javascript.modules-path`: this option has been removed. The modules paths - are determined by arangod and arangosh automatically based on the value of - `--javascript.startup-directory`. - - If the option is set on startup, it is ignored so startup will not abort with - an error `unrecognized option`. - - * `--javascript.action-directory`: this option has been removed. The actions - directory is determined by arangod automatically based on the value of - `--javascript.startup-directory`. - - If the option is set on startup, it is ignored so startup will not abort with - an error `unrecognized option`. - - * `--javascript.package-path`: this option is still available but it is not - required anymore to set the standard package paths (e.g. `js/npm`). arangod - will automatically use this standard package path regardless of whether it - was specified via the options. - - It is possible to use this option to add additional package paths to the - standard value. - - Configuration files included with arangod are adjusted accordingly. - -* layout of the graphs tab adapted to better fit with the other tabs - -* database selection is moved to the bottom right corner of the web interface - -* removed priority queues - - this feature was never advertised nor documented nor tested. - -* display internal attributes in document source view of web interface - -* removed separate shape collections - - When upgrading to ArangoDB 1.5, existing collections will be converted to include - shapes and attribute markers in the datafiles instead of using separate files for - shapes. - - When a collection is converted, existing shapes from the SHAPES directory will - be written to a new datafile in the collection directory, and the SHAPES directory - will be removed afterwards. - - This saves up to 2 MB of memory and disk space for each collection - (savings are higher, the less different shapes there are in a collection). - Additionally, one less file descriptor per opened collection will be used. - - When creating a new collection, the amount of sync calls may be reduced. The same - may be true for documents with yet-unknown shapes. This may help performance - in these cases. - -* added AQL functions `NTH` and `POSITION` - -* added signal handler for arangosh to save last command in more cases - -* added extra prompt placeholders for arangosh: - - `%e`: current endpoint - - `%u`: current user - -* added arangosh option `--javascript.gc-interval` to control amount of - garbage collection performed by arangosh - -* fixed issue #651: Allow addEdge() to take vertex ids in the JS library - -* removed command-line option `--log.format` - - In previous versions, this option did not have an effect for most log messages, so - it got removed. - -* removed C++ logger implementation - - Logging inside ArangoDB is now done using the LOG_XXX() macros. The LOGGER_XXX() - macros are gone. - -* added collection status "loading" - -* added the option to return the number of elements indexed to the - result of .getIndexes() for each index. This is - currently only implemented for hash indices and skiplist indices. - - -v1.4.6 (XXXX-XX-XX) +v1.4.6 (2014-01-20) ------------------- * issue #736: AQL function to parse collection and key from document handle diff --git a/Makefile.in b/Makefile.in index a307725070..8fddb08a7f 100644 --- a/Makefile.in +++ b/Makefile.in @@ -1,4 +1,4 @@ -# Makefile.in generated by automake 1.13.4 from Makefile.am. +# Makefile.in generated by automake 1.14.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. @@ -2122,8 +2122,8 @@ $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) $(am__aclocal_m4_deps): config/config.h: config/stamp-h1 - @if test ! -f $@; then rm -f config/stamp-h1; else :; fi - @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) config/stamp-h1; else :; fi + @test -f $@ || rm -f config/stamp-h1 + @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) config/stamp-h1 config/stamp-h1: $(top_srcdir)/config/config.h.in $(top_builddir)/config.status @rm -f config/stamp-h1 @@ -2134,8 +2134,8 @@ $(top_srcdir)/config/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) touch $@ lib/BasicsC/local-configuration.h: lib/BasicsC/stamp-h2 - @if test ! -f $@; then rm -f lib/BasicsC/stamp-h2; else :; fi - @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) lib/BasicsC/stamp-h2; else :; fi + @test -f $@ || rm -f lib/BasicsC/stamp-h2 + @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) lib/BasicsC/stamp-h2 lib/BasicsC/stamp-h2: $(top_srcdir)/lib/BasicsC/local-configuration.h.in $(top_builddir)/config.status @rm -f lib/BasicsC/stamp-h2 @@ -7087,10 +7087,16 @@ dist-xz: distdir $(am__post_remove_distdir) dist-tarZ: distdir + @echo WARNING: "Support for shar distribution archives is" \ + "deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir + @echo WARNING: "Support for distribution archives compressed with" \ + "legacy program 'compress' is deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__post_remove_distdir) @@ -7132,9 +7138,10 @@ distcheck: dist && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ - && ../configure --srcdir=.. --prefix="$$dc_install_base" \ + && ../configure \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ + --srcdir=.. --prefix="$$dc_install_base" \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ diff --git a/VERSION b/VERSION index e516bb9d96..c514bd85c2 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.4.5 +1.4.6 diff --git a/aclocal.m4 b/aclocal.m4 index ff0cf18e03..15ce39c029 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -1,4 +1,4 @@ -# generated automatically by aclocal 1.13.4 -*- Autoconf -*- +# generated automatically by aclocal 1.14.1 -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. @@ -32,10 +32,10 @@ To do so, use the procedure documented by the package, typically 'autoreconf'.]) # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.13' +[am__api_version='1.14' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.13.4], [], +m4_if([$1], [1.14.1], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) @@ -51,7 +51,7 @@ m4_define([_AM_AUTOCONF_VERSION], []) # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.13.4])dnl +[AM_AUTOMAKE_VERSION([1.14.1])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) @@ -418,6 +418,12 @@ AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. +dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. +m4_define([AC_PROG_CC], +m4_defn([AC_PROG_CC]) +[_AM_PROG_CC_C_O +]) + # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- @@ -526,7 +532,48 @@ dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl -]) + +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) + fi +fi]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further @@ -534,7 +581,6 @@ dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) - # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. @@ -682,38 +728,6 @@ AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) -# Copyright (C) 1999-2013 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_CC_C_O -# -------------- -# Like AC_PROG_CC_C_O, but changed for automake. -AC_DEFUN([AM_PROG_CC_C_O], -[AC_REQUIRE([AC_PROG_CC_C_O])dnl -AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -AC_REQUIRE_AUX_FILE([compile])dnl -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $[2] | sed ['s/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/']` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi -dnl Make sure AC_PROG_CC is never called again, or it will override our -dnl setting of CC. -m4_define([AC_PROG_CC], - [m4_fatal([AC_PROG_CC cannot be called after AM_PROG_CC_C_O])]) -]) - # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. @@ -784,6 +798,70 @@ AC_DEFUN([_AM_SET_OPTIONS], AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) +# Copyright (C) 1999-2013 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_PROG_CC_C_O +# --------------- +# Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC +# to automatically call this. +AC_DEFUN([_AM_PROG_CC_C_O], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +AC_REQUIRE_AUX_FILE([compile])dnl +AC_LANG_PUSH([C])dnl +AC_CACHE_CHECK( + [whether $CC understands -c and -o together], + [am_cv_prog_cc_c_o], + [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i]) +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +AC_LANG_POP([C])]) + +# For backward compatibility. +AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) + +# Copyright (C) 2001-2013 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_RUN_LOG(COMMAND) +# ------------------- +# Run COMMAND, save the exit status in ac_status, and log it. +# (This has been adapted from Autoconf's _AC_RUN_LOG macro.) +AC_DEFUN([AM_RUN_LOG], +[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD + ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD + (exit $ac_status); }]) + # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. diff --git a/build.h b/build.h index acf6e6d1b4..d648255456 100644 --- a/build.h +++ b/build.h @@ -1 +1 @@ -#define TRI_VERSION "1.4.5" +#define TRI_VERSION "1.4.6" diff --git a/config/config.guess b/config/config.guess index b79252d6b1..9afd676206 100755 --- a/config/config.guess +++ b/config/config.guess @@ -2,7 +2,7 @@ # Attempt to guess a canonical system name. # Copyright 1992-2013 Free Software Foundation, Inc. -timestamp='2013-06-10' +timestamp='2013-11-29' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by @@ -1260,16 +1260,26 @@ EOF if test "$UNAME_PROCESSOR" = unknown ; then UNAME_PROCESSOR=powerpc fi - if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - case $UNAME_PROCESSOR in - i386) UNAME_PROCESSOR=x86_64 ;; - powerpc) UNAME_PROCESSOR=powerpc64 ;; - esac + if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi fi + elif test "$UNAME_PROCESSOR" = i386 ; then + # Avoid executing cc on OS X 10.9, as it ships with a stub + # that puts up a graphical alert prompting to install + # developer tools. Any system running Mac OS X 10.7 or + # later (Darwin 11 and later) is required to have a 64-bit + # processor. This is not true of the ARM version of Darwin + # that Apple uses in portable devices. + UNAME_PROCESSOR=x86_64 fi echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; diff --git a/config/config.sub b/config/config.sub index 8b612ab89d..61cb4bc22d 100755 --- a/config/config.sub +++ b/config/config.sub @@ -2,7 +2,7 @@ # Configuration validation subroutine script. # Copyright 1992-2013 Free Software Foundation, Inc. -timestamp='2013-04-24' +timestamp='2013-10-01' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by @@ -257,7 +257,7 @@ case $basic_machine in | avr | avr32 \ | be32 | be64 \ | bfin \ - | c4x | clipper \ + | c4x | c8051 | clipper \ | d10v | d30v | dlx | dsp16xx \ | epiphany \ | fido | fr30 | frv \ @@ -265,6 +265,7 @@ case $basic_machine in | hexagon \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ + | k1om \ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ @@ -324,7 +325,7 @@ case $basic_machine in c6x) basic_machine=tic6x-unknown ;; - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) basic_machine=$basic_machine-unknown os=-none ;; @@ -372,7 +373,7 @@ case $basic_machine in | be32-* | be64-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* \ - | clipper-* | craynv-* | cydra-* \ + | c8051-* | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ @@ -381,6 +382,7 @@ case $basic_machine in | hexagon-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ + | k1om-* \ | le32-* | le64-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ @@ -794,7 +796,7 @@ case $basic_machine in os=-mingw64 ;; mingw32) - basic_machine=i386-pc + basic_machine=i686-pc os=-mingw32 ;; mingw32ce) @@ -830,7 +832,7 @@ case $basic_machine in basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; msys) - basic_machine=i386-pc + basic_machine=i686-pc os=-msys ;; mvs) @@ -1546,6 +1548,9 @@ case $basic_machine in c4x-* | tic4x-*) os=-coff ;; + c8051-*) + os=-elf + ;; hexagon-*) os=-elf ;; diff --git a/config/missing b/config/missing index cdea514931..db98974ff5 100755 --- a/config/missing +++ b/config/missing @@ -1,7 +1,7 @@ #! /bin/sh # Common wrapper for a few potentially missing GNU programs. -scriptversion=2012-06-26.16; # UTC +scriptversion=2013-10-28.13; # UTC # Copyright (C) 1996-2013 Free Software Foundation, Inc. # Originally written by Fran,cois Pinard , 1996. @@ -160,7 +160,7 @@ give_advice () ;; autom4te*) echo "You might have modified some maintainer files that require" - echo "the 'automa4te' program to be rebuilt." + echo "the 'autom4te' program to be rebuilt." program_details 'autom4te' ;; bison*|yacc*) diff --git a/configure b/configure index 9f5c2bc680..6798466688 100755 --- a/configure +++ b/configure @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.5. +# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.6. # # Report bugs to . # @@ -580,8 +580,8 @@ MAKEFLAGS= # Identity of this package. PACKAGE_NAME='triAGENS ArangoDB' PACKAGE_TARNAME='arangodb' -PACKAGE_VERSION='1.4.5' -PACKAGE_STRING='triAGENS ArangoDB 1.4.5' +PACKAGE_VERSION='1.4.6' +PACKAGE_STRING='triAGENS ArangoDB 1.4.6' PACKAGE_BUGREPORT='info@triagens.de' PACKAGE_URL='http://www.arangodb.org' @@ -1403,7 +1403,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures triAGENS ArangoDB 1.4.5 to adapt to many kinds of systems. +\`configure' configures triAGENS ArangoDB 1.4.6 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1474,7 +1474,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.5:";; + short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.6:";; esac cat <<\_ACEOF @@ -1605,7 +1605,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -triAGENS ArangoDB configure 1.4.5 +triAGENS ArangoDB configure 1.4.6 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -2070,7 +2070,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by triAGENS ArangoDB $as_me 1.4.5, which was +It was created by triAGENS ArangoDB $as_me 1.4.6, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2790,7 +2790,7 @@ if test x$tr_ARM == xyes; then fi -am__api_version='1.13' +am__api_version='1.14' # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or @@ -3276,7 +3276,7 @@ fi # Define the identity of the package. PACKAGE='arangodb' - VERSION='1.4.5' + VERSION='1.4.6' cat >>confdefs.h <<_ACEOF @@ -3327,6 +3327,47 @@ am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 + fi +fi # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; @@ -4577,6 +4618,65 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 +$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } +if ${am_cv_prog_cc_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 + ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 +$as_echo "$am_cv_prog_cc_c_o" >&6; } +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 @@ -5227,131 +5327,6 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test "x$CC" != xcc; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -fi -set dummy $CC; ac_cc=`$as_echo "$2" | - sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -# Make sure it works both with $CC and with simple cc. -# We do the test twice because some compilers refuse to overwrite an -# existing .o file with -o, though they will create one. -ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -rm -f conftest2.* -if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; -then - eval ac_cv_prog_cc_${ac_cc}_c_o=yes - if test "x$CC" != xcc; then - # Test first that cc exists at all. - if { ac_try='cc -c conftest.$ac_ext >&5' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' - rm -f conftest2.* - if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; - then - # cc works too. - : - else - # cc exists but doesn't like -o. - eval ac_cv_prog_cc_${ac_cc}_c_o=no - fi - fi - fi -else - eval ac_cv_prog_cc_${ac_cc}_c_o=no -fi -rm -f core conftest* - -fi -if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - -$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h - -fi - -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 @@ -9200,7 +9175,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by triAGENS ArangoDB $as_me 1.4.5, which was +This file was extended by triAGENS ArangoDB $as_me 1.4.6, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -9267,7 +9242,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -triAGENS ArangoDB config.status 1.4.5 +triAGENS ArangoDB config.status 1.4.6 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff --git a/configure.ac b/configure.ac index 5ff9da3df8..dc30304fef 100644 --- a/configure.ac +++ b/configure.ac @@ -6,7 +6,7 @@ dnl ============================================================================ dnl --SECTION-- triAGENS GmbH Build Environment dnl ============================================================================ -AC_INIT([triAGENS ArangoDB], [1.4.5], [info@triagens.de], [arangodb], [http://www.arangodb.org]) +AC_INIT([triAGENS ArangoDB], [1.4.6], [info@triagens.de], [arangodb], [http://www.arangodb.org]) dnl ---------------------------------------------------------------------------- dnl auxillary directory for install-sh and missing diff --git a/js/apps/system/aardvark/api-docs.json b/js/apps/system/aardvark/api-docs.json index 226b5396bd..a44c25e962 100644 --- a/js/apps/system/aardvark/api-docs.json +++ b/js/apps/system/aardvark/api-docs.json @@ -1,6 +1,6 @@ { "swaggerVersion": "1.1", - "apiVersion": "1.4.5", + "apiVersion": "1.4.6", "apis": [ { "path": "api-docs/aqlfunction.{format}", diff --git a/js/apps/system/aardvark/api-docs/batch.json b/js/apps/system/aardvark/api-docs/batch.json index 5397747841..20ac0357b6 100644 --- a/js/apps/system/aardvark/api-docs/batch.json +++ b/js/apps/system/aardvark/api-docs/batch.json @@ -32,7 +32,7 @@ "notes": "Executes a batch request. A batch request can contain any number of other requests that can be sent to ArangoDB in isolation. The benefit of using batch requests is that batching requests requires less client/server roundtrips than when sending isolated requests.

All parts of a batch request are executed serially on the server. The server will return the results of all parts in a single response when all parts are finished.

Technically, a batch request is a multipart HTTP request, with content-type multipart/form-data. A batch request consists of an envelope and the individual batch part actions. Batch part actions are \"regular\" HTTP requests, including full header and an optional body. Multiple batch parts are separated by a boundary identifier. The boundary identifier is declared in the batch envelope. The MIME content-type for each individual batch part must be application/x-arango-batchpart.

The response sent by the server will be an HTTP 200 response, with an error summary header x-arango-errors. This header contains the number of batch parts that failed with an HTTP error code of at least 400.

The response sent by the server is a multipart response, too. It contains the individual HTTP responses for all batch parts, including the full HTTP result header (with status code and other potential headers) and an optional result body. The individual batch parts in the result are seperated using the same boundary value as specified in the request.

The order of batch parts in the response will be the same as in the original client request. Client can additionally use the Content-Id MIME header in a batch part to define an individual id for each batch part. The server will return this id is the batch part responses, too.

", "summary": "executes a batch request", "httpMethod": "POST", - "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.5\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", + "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", "nickname": "executesABatchRequest" } ], diff --git a/js/apps/system/aardvark/api-docs/database.json b/js/apps/system/aardvark/api-docs/database.json index 1770f9b7f1..71bf7a8ccc 100644 --- a/js/apps/system/aardvark/api-docs/database.json +++ b/js/apps/system/aardvark/api-docs/database.json @@ -74,7 +74,7 @@ "notes": "Retrieves information about the current database

The response is a JSON object with the following attributes:

- name: the name of the current database

- id: the id of the current database

- path: the filesystem path of the current database

- isSystem: whether or not the current database is the _system database

", "summary": "retrieves information about the current database", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.81124/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.60594/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "retrievesInformationAboutTheCurrentDatabase" } ], diff --git a/js/apps/system/aardvark/api-docs/endpoint.json b/js/apps/system/aardvark/api-docs/endpoint.json index 53d651754a..f1585e9882 100644 --- a/js/apps/system/aardvark/api-docs/endpoint.json +++ b/js/apps/system/aardvark/api-docs/endpoint.json @@ -24,7 +24,7 @@ "notes": "Returns a list of all configured endpoints the server is listening on. For each endpoint, the list of allowed databases is returned too if set.

The result is a JSON hash which has the endpoints as keys, and the list of mapped database names as values for each endpoint.

If a list of mapped databases is empty, it means that all databases can be accessed via the endpoint. If a list of mapped databases contains more than one database name, this means that any of the databases might be accessed via the endpoint, and the first database in the list will be treated as the default database for the endpoint. The default database will be used when an incoming request does not specify a database name in the request explicitly.

Note: retrieving the list of all endpoints is allowed in the system database only. Calling this action in any other database will make the server return an error.

", "summary": "returns a list of all endpoints", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:31124\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:30594\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", "nickname": "returnsAListOfAllEndpoints" } ], diff --git a/js/apps/system/aardvark/api-docs/job.json b/js/apps/system/aardvark/api-docs/job.json index fa87a1080d..593dcb3adf 100644 --- a/js/apps/system/aardvark/api-docs/job.json +++ b/js/apps/system/aardvark/api-docs/job.json @@ -28,7 +28,7 @@ "notes": "Returns the result of an async job identified by job-id. If the async job result is present on the server, the result will be removed from the list of result. That means this method can be called for each job-id once.

The method will return the original job result's headers and body, plus the additional HTTP header x-arango-async-job-id. If this header is present, then the job was found and the response contains the original job's result. If the header is not present, the job was not found and the response contains status information from the job amanger.

", "summary": "Returns the result of an async job", "httpMethod": "PUT", - "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.5\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", + "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", "nickname": "ReturnsTheResultOfAnAsyncJob" } ], @@ -94,7 +94,7 @@ "paramType": "path", "required": "true", "name": "type", - "description": "The type of jobs to delete. type can be: " + "description": "The type of jobs to delete. type can be: - all: deletes all jobs results. Currently executing or queued async jobs will not be stopped by this call. - expired: deletes expired results. To determine the expiration status of a result, pass the stamp URL parameter. stamp needs to be a UNIX timestamp, and all async job results created at a lower timestamp will be deleted. - an actual job-id: in this case, the call will remove the result of the specified async job. If the job is currently executing or queued, it will not be aborted. " }, { "dataType": "Number", @@ -106,7 +106,7 @@ "notes": "Deletes either all job results, expired job results, or the result of a specific job. Clients can use this method to perform an eventual garbage collection of job results.

", "summary": "Deletes the result of async jobs", "httpMethod": "DELETE", - "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1389793241\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", + "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1390252233\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", "nickname": "DeletesTheResultOfAsyncJobs" } ], diff --git a/js/apps/system/aardvark/api-docs/replication.json b/js/apps/system/aardvark/api-docs/replication.json index 6b962b58c9..2fe00daa40 100644 --- a/js/apps/system/aardvark/api-docs/replication.json +++ b/js/apps/system/aardvark/api-docs/replication.json @@ -78,7 +78,7 @@ "notes": "Returns the current state of the server's replication logger. The state will include information about whether the logger is running and about the last logged tick value. This tick value is important for incremental fetching of data.

The state API can be called regardless of whether the logger is currently running or not.

The body of the response contains a JSON object with the following attributes:

- state: the current logger state as a JSON hash array with the following sub-attributes:

- running: whether or not the logger is running

- lastLogTick: the tick value of the latest tick the logger has logged. This value can be used for incremental fetching of log data.

- totalEvents: total number of events logged since the server was started. The value is not reset between multiple stops and re-starts of the logger.

- time: the current date and time on the logger server

- server: a JSON hash with the following sub-attributes:

- version: the logger server's version

- serverId: the logger server's id

- clients: a list of all replication clients that ever connected to the logger since it was started. This list can be used to determine approximately how much data the individual clients have already fetched from the logger server. Each entry in the list contains a time value indicating the server time the client last fetched data from the replication logger. The lastServedTick value of each client indicates the latest tick value sent to the client upon a client request to the replication logger.

", "summary": "returns the replication logger state", "httpMethod": "GET", - "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", + "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-20T21:04:23Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-20T21:04:24Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", "nickname": "returnsTheReplicationLoggerState" } ], @@ -332,7 +332,7 @@ "notes": "Returns the list of collections and indexes available on the server. This list can be used by replication clients to initiate an initial sync with the server.

The response will contain a JSON hash array with the collection and state attributes.

collections is a list of collections with the following sub-attributes:

- parameters: the collection properties

- indexes: a list of the indexes of a the collection. Primary indexes and edges indexes are not included in this list.

tick: the system-wide tick value at the start of the dump

The state attribute contains the current state of the replication logger. It contains the following sub-attributes:

- running: whether or not the replication logger is currently active

- lastLogTick: the value of the last tick the replication logger has written

- time: the current time on the server

Replication clients should note the lastLogTick value returned. They can then fetch collections' data using the dump method up to the value of lastLogTick, and query the continuous replication log for log events after this tick value.

To create a full copy of the collections on the logger server, a replication client can execute these steps:

- call the /inventory API method. This returns the lastLogTick value and the list of collections and indexes from the logger server.

- for each collection returned by /inventory, create the collection locally and call /dump to stream the collection data to the client, up to the value of lastLogTick. After that, the client can create the indexes on the collections as they were reported by /inventory.

If the clients wants to continuously stream replication log events from the logger server, the following additional steps need to be carried out:

- the client should call /logger-follow initially to fetch the first batch of replication events that were logged after the client's call to /inventory.

The call to /logger-follow should use a from parameter with the value of the lastLogTick as reported by /inventory. The call to /logger-follow will return the x-arango-replication-lastincluded which will contain the last tick value included in the response.

- the client can then continuously call /logger-follow to incrementally fetch new replication events that occurred after the last transfer.

Calls should use a from parameter with the value of the x-arango-replication-lastincluded header of the previous response. If there are no more replication events, the response will be empty and clients can go to sleep for a while and try again later.

", "summary": "returns an inventory of collections and indexes", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", "nickname": "returnsAnInventoryOfCollectionsAndIndexes" } ], @@ -565,7 +565,7 @@ "notes": "Starts the replication applier. This will return immediately if the replication applier is already running.

If the replication applier is not already running, the applier configuration will be checked, and if it is complete, the applier will be started in a background thread. This means that even if the applier will encounter any errors while running, they will not be reported in the response to this method.

To detect replication applier errors after the applier was started, use the /_api/replication/applier-state API instead.

", "summary": "starts the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:33:58Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:03:04Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "startsTheReplicationApplier" } ], @@ -592,7 +592,7 @@ "notes": "Stops the replication applier. This will return immediately if the replication applier is not running.

", "summary": "stops the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "stopsTheReplicationApplier" } ], @@ -619,7 +619,7 @@ "notes": "Returns the state of the replication applier, regardless of whether the applier is currently running or not.

The response is a JSON hash with the following attributes:

- state: a JSON hash with the following sub-attributes:

- running: whether or not the applier is active and running

- lastAppliedContinuousTick: the last tick value from the continuous replication log the applier has applied.

- lastProcessedContinuousTick: the last tick value from the continuous replication log the applier has processed.

Regularly, the last applied and last processed tick values should be identical. For transactional operations, the replication applier will first process incoming log events before applying them, so the processed tick value might be higher than the applied tick value. This will be the case until the applier encounters the transaction commit log event for the transaction.

- lastAvailableContinuousTick: the last tick value the logger server can provide.

- time: the time on the applier server.

- totalRequests: the total number of requests the applier has made to the endpoint.

- totalFailedConnects: the total number of failed connection attempts the applier has made.

- totalEvents: the total number of log events the applier has processed.

- progress: a JSON hash with details about the replication applier progress. It contains the following sub-attributes if there is progress to report:

- message: a textual description of the progress

- time: the date and time the progress was logged

- failedConnects: the current number of failed connection attempts

- lastError: a JSON hash with details about the last error that happened on the applier. It contains the following sub-attributes if there was an error:

- errorNum: a numerical error code

- errorMessage: a textual error description

- time: the date and time the error occurred

In case no error has occurred, lastError will be empty.

- server: a JSON hash with the following sub-attributes:

- version: the applier server's version

- serverId: the applier server's id

- endpoint: the endpoint the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

- database: the name of the database the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

", "summary": "returns the state of the replication applier", "httpMethod": "GET", - "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"fetching master state information\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 3, \n    \"totalFailedConnects\" : 3, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "returnsTheStateOfTheReplicationApplier" } ], diff --git a/js/apps/system/aardvark/api-docs/system.json b/js/apps/system/aardvark/api-docs/system.json index 9dd3fc504d..5c280b91d9 100644 --- a/js/apps/system/aardvark/api-docs/system.json +++ b/js/apps/system/aardvark/api-docs/system.json @@ -92,7 +92,7 @@ "notes": "

Returns the statistics information. The returned object contains the statistics figures grouped together according to the description returned by _admin/statistics-description. For instance, to access a figure userTime from the group system, you first select the sub-object describing the group stored in system and in that sub-object the value for userTime is stored in the attribute of the same name.

In case of a distribution, the returned object contains the total count in count and the distribution list in counts. The sum (or total) of the individual values is returned in sum.

", "summary": "reads the statistics", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 106242, \n    \"majorPageFaults\" : 1916, \n    \"userTime\" : 12.592, \n    \"systemTime\" : 1.726252, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 57217024, \n    \"virtualSize\" : 4994158592 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.00037384033203125, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 22.933324813842773, \n      \"count\" : 850, \n      \"counts\" : [ \n        539, \n        201, \n        44, \n        49, \n        14, \n        0, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 22.767783641815186, \n      \"count\" : 850, \n      \"counts\" : [ \n        539, \n        203, \n        42, \n        49, \n        14, \n        0, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.018385887145996094, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 27.57566213607788 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 113349, \n    \"majorPageFaults\" : 1916, \n    \"userTime\" : 28.970069, \n    \"systemTime\" : 3.00499, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 58105856, \n    \"virtualSize\" : 5001773056 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.003744840621948242, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 51.210867404937744, \n      \"count\" : 850, \n      \"counts\" : [ \n        216, \n        248, \n        250, \n        114, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 50.808953285217285, \n      \"count\" : 850, \n      \"counts\" : [ \n        226, \n        242, \n        251, \n        109, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.03761625289916992, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 58.96970891952515 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "readsTheStatistics" } ], From 50017099269547fe4b582a72a1b305966640180d Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 21 Jan 2014 09:15:45 +0100 Subject: [PATCH 12/21] updated import manual --- Documentation/Examples/api-import-documents | 2 +- Documentation/Examples/api-import-headers | 2 +- Documentation/ImplementorManual/HttpImport.md | 17 +++++++++++++++-- Documentation/ToolsManual/ImpManual.md | 12 ++++++++++++ arangod/RestHandler/RestImportHandler.cpp | 8 +++----- 5 files changed, 32 insertions(+), 9 deletions(-) diff --git a/Documentation/Examples/api-import-documents b/Documentation/Examples/api-import-documents index 6917a8c974..ff4789c2ea 100644 --- a/Documentation/Examples/api-import-documents +++ b/Documentation/Examples/api-import-documents @@ -7,4 +7,4 @@ server: triagens GmbH High-Performance HTTP Server connection: Keep-Alive content-type: application/json; charset=utf-8 -{"error":false,"created":2,"errors":0} +{"error":false,"created":2,"empty":0,"errors":0} diff --git a/Documentation/Examples/api-import-headers b/Documentation/Examples/api-import-headers index 6bd4ab6b2a..3c4b46b0d8 100644 --- a/Documentation/Examples/api-import-headers +++ b/Documentation/Examples/api-import-headers @@ -8,4 +8,4 @@ server: triagens GmbH High-Performance HTTP Server connection: Keep-Alive content-type: application/json; charset=utf-8 -{"error":false,"created":2,"errors":0} +{"error":false,"created":2,"empty":0,"errors":0} diff --git a/Documentation/ImplementorManual/HttpImport.md b/Documentation/ImplementorManual/HttpImport.md index faa94a37b7..b40f773513 100644 --- a/Documentation/ImplementorManual/HttpImport.md +++ b/Documentation/ImplementorManual/HttpImport.md @@ -93,7 +93,14 @@ the data are line-wise JSON documents (type = documents) or a JSON list (type = The server will respond with an HTTP 201 if everything went well. The number of documents imported will be returned in the `created` attribute of the response. If any documents were skipped or incorrectly formatted, this will be -returned in the `errors` attribute. +returned in the `errors` attribute. There will also be an attribute `empty` in +the response, which will contain a value of `0`. + +If the `details` parameter was set to `true` in the request, the response will +also contain an attribute `details` which is a list of details about errors that +occurred on the server side during the import. This list might be empty if no +errors occurred. + Importing Headers and Values {#HttpImportHeaderData} ==================================================== @@ -112,7 +119,13 @@ are needed or allowed in this data section. The server will again respond with an HTTP 201 if everything went well. The number of documents imported will be returned in the `created` attribute of the response. If any documents were skipped or incorrectly formatted, this will be -returned in the `errors` attribute. +returned in the `errors` attribute. The number of empty lines in the input file +will be returned in the `empty` attribute. + +If the `details` parameter was set to `true` in the request, the response will +also contain an attribute `details` which is a list of details about errors that +occurred on the server side during the import. This list might be empty if no +errors occurred. Importing into Edge Collections {#HttpImportEdges} ================================================== diff --git a/Documentation/ToolsManual/ImpManual.md b/Documentation/ToolsManual/ImpManual.md index 083ca54a5a..9eb7777aeb 100644 --- a/Documentation/ToolsManual/ImpManual.md +++ b/Documentation/ToolsManual/ImpManual.md @@ -66,6 +66,18 @@ Please note that by default, _arangoimp_ will import data into the specified collection in the default database (`_system`). To specify a different database, use the `--server.database` option when invoking _arangoimp_. +An _arangoimp_ import will print out the final results on the command line. +By default, it shows the number of documents created, the number of errors that +occurred on the server side, and the total number of input file lines/documents +that it processed. Additionally, _arangoimp_ will print out details about errors +that happended on the server-side (if any). + +Example: + + created: 2 + errors: 0 + total: 2 + Importing CSV Data {#ImpManualCsv} ================================== diff --git a/arangod/RestHandler/RestImportHandler.cpp b/arangod/RestHandler/RestImportHandler.cpp index 85672b71f8..16b33694eb 100644 --- a/arangod/RestHandler/RestImportHandler.cpp +++ b/arangod/RestHandler/RestImportHandler.cpp @@ -244,14 +244,11 @@ int RestImportHandler::handleSingleDocument (ImportTransactionType& trx, /// @RESTQUERYPARAM{type,string,required} /// Determines how the body of the request will be interpreted. `type` can have /// the following values: -/// /// - `documents`: when this type is used, each line in the request body is /// expected to be an individual JSON-encoded document. Multiple JSON documents /// in the request body need to be separated by newlines. -/// /// - `list`: when this type is used, the request body must contain a single /// JSON-encoded list of individual documents to import. -/// /// - `auto`: if set, this will automatically determine the body type (either /// `documents` or `list`). /// @@ -736,8 +733,9 @@ bool RestImportHandler::createFromJson (const string& type) { /// /// @RESTBODYPARAM{documents,string,required} /// The body must consist of JSON-encoded lists of attribute values, with one -/// line per per document. The first line of the request must be a JSON-encoded -/// list of attribute names. +/// line per per document. The first row of the request must be a JSON-encoded +/// list of attribute names. These attribute names are used for the data in the +/// subsequent rows. /// /// @RESTQUERYPARAMETERS /// From 0c039e0264d14245c79ace0283893aa84d4303a4 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 21 Jan 2014 13:27:40 +0100 Subject: [PATCH 13/21] mount all system apps by name --- js/server/bootstrap/module-internal.js | 28 ++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/js/server/bootstrap/module-internal.js b/js/server/bootstrap/module-internal.js index 4ecc709400..fa6d1c6aa4 100644 --- a/js/server/bootstrap/module-internal.js +++ b/js/server/bootstrap/module-internal.js @@ -1,5 +1,5 @@ /*jslint indent: 2, nomen: true, maxlen: 120, sloppy: true, vars: true, white: true, plusplus: true, nonpropdel: true */ -/*global require, db, ArangoCollection, ArangoDatabase, ArangoCursor, +/*global require, db, ArangoCollection, ArangoDatabase, ArangoCursor, module, ShapedJson, RELOAD_AUTH, SYS_DEFINE_ACTION, SYS_EXECUTE_GLOBAL_CONTEXT_FUNCTION, AHUACATL_RUN, AHUACATL_PARSE, AHUACATL_EXPLAIN */ @@ -162,15 +162,31 @@ catch (err) { console.error("cannot initialize Foxx application: %s", String(err)); } - + var aal = internal.db._collection("_aal"); if (aal !== null) { - var found = aal.firstExample({ type: "mount", mount: "/_admin/aardvark" }); + var systemAppPath = module.systemAppPath(); - if (found === null) { - fm.mount("aardvark", "/_admin/aardvark", {reload: false}); - } + var fs = require("fs"); + var apps = fs.list(systemAppPath); + + apps.forEach(function (appName) { + if (! fs.isDirectory(fs.join(systemAppPath, appName))) { + return; + } + + try { + var found = aal.firstExample({ type: "mount", mount: "/_admin/" + appName }); + + if (found === null) { + fm.mount(appName, "/_admin/" + appName, {reload: false}); + } + } + catch (err) { + console.error("unable to mount system application '%s': %s", appName, String(err)); + } + }); } }; From 3e653e303d3d0fc16e36ddcd89585d7d77ad85b3 Mon Sep 17 00:00:00 2001 From: Alan Plum Date: Fri, 17 Jan 2014 21:31:16 +0100 Subject: [PATCH 14/21] Added __dirname, __filename pseudo-globals. Fixes #733. --- js/common/bootstrap/modules.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/js/common/bootstrap/modules.js b/js/common/bootstrap/modules.js index ffa05bb742..63a3737563 100644 --- a/js/common/bootstrap/modules.js +++ b/js/common/bootstrap/modules.js @@ -735,6 +735,8 @@ function require (path) { } } + sandbox.__filename = origin; + sandbox.__dirname = typeof origin === 'string' ? origin.split('/').slice(0, -1).join('/') : origin; sandbox.module = module; sandbox.exports = module.exports; sandbox.require = function(path) { return module.require(path); }; @@ -1326,6 +1328,8 @@ function require (path) { } } + sandbox.__filename = full; + sandbox.__dirname = full.split('/').slice(0, -1).join('/'); sandbox.module = appModule; sandbox.applicationContext = appContext; From f99f362f07f0a0231fa2c1aea2d5c488355202f2 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Fri, 17 Jan 2014 22:01:24 +0100 Subject: [PATCH 15/21] small improvement for issue #738 --- js/common/bootstrap/modules.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/js/common/bootstrap/modules.js b/js/common/bootstrap/modules.js index 63a3737563..336064e262 100644 --- a/js/common/bootstrap/modules.js +++ b/js/common/bootstrap/modules.js @@ -735,6 +735,13 @@ function require (path) { } } + // actually the file name can be set via the path attribute + if (origin === undefined) { + origin = description.path; + } + // strip protocol (e.g. file://) + origin = origin.replace(/^[a-z]+:\/\//, ''); + sandbox.__filename = origin; sandbox.__dirname = typeof origin === 'string' ? origin.split('/').slice(0, -1).join('/') : origin; sandbox.module = module; From d10748155f56294e028c12c5f6964c57a54bb46b Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Fri, 17 Jan 2014 22:58:43 +0100 Subject: [PATCH 16/21] follow up for issue #738 --- js/common/bootstrap/modules.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/js/common/bootstrap/modules.js b/js/common/bootstrap/modules.js index 336064e262..553c93c13a 100644 --- a/js/common/bootstrap/modules.js +++ b/js/common/bootstrap/modules.js @@ -740,7 +740,9 @@ function require (path) { origin = description.path; } // strip protocol (e.g. file://) - origin = origin.replace(/^[a-z]+:\/\//, ''); + if (typeof origin === 'string') { + origin = origin.replace(/^[a-z]+:\/\//, ''); + } sandbox.__filename = origin; sandbox.__dirname = typeof origin === 'string' ? origin.split('/').slice(0, -1).join('/') : origin; From f6b872e01c5c51960b41a9645f81992fed338335 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 21 Jan 2014 13:38:32 +0100 Subject: [PATCH 17/21] updated CHANGELOG --- CHANGELOG | 8 ++++++++ js/server/bootstrap/module-internal.js | 9 ++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index a41c2378fe..5c5c4f5cb2 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,11 @@ +v1.4.7 (XXXX-XX-XX) +------------------- + +* issue #738: added __dirname, __filename pseudo-globals. Fixes #733. (@by pluma) + +* mount all Foxx applications in system apps directory on startup + + v1.4.6 (2014-01-20) ------------------- diff --git a/js/server/bootstrap/module-internal.js b/js/server/bootstrap/module-internal.js index fa6d1c6aa4..98e6f6bd3a 100644 --- a/js/server/bootstrap/module-internal.js +++ b/js/server/bootstrap/module-internal.js @@ -171,8 +171,15 @@ var fs = require("fs"); var apps = fs.list(systemAppPath); + // make sure the aardvark app is always there + if (apps.indexOf("aardvark") === -1) { + apps.push("aardvark"); + } + apps.forEach(function (appName) { - if (! fs.isDirectory(fs.join(systemAppPath, appName))) { + // for all unknown system apps: check that the directory actually exists + if (appName !== "aardvark" && + ! fs.isDirectory(fs.join(systemAppPath, appName))) { return; } From b63a8bdbceb5f7705f09786044be563e4e23537e Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Tue, 21 Jan 2014 15:23:26 +0100 Subject: [PATCH 18/21] issue #737: adjusted error message --- arangosh/V8Client/arangoimp.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/arangosh/V8Client/arangoimp.cpp b/arangosh/V8Client/arangoimp.cpp index 96bafe932b..6f859b5123 100644 --- a/arangosh/V8Client/arangoimp.cpp +++ b/arangosh/V8Client/arangoimp.cpp @@ -371,7 +371,16 @@ int main (int argc, char* argv[]) { } if (FileName != "-" && ! FileUtils::isRegularFile(FileName)) { - cerr << "Cannot open file '" << FileName << "'" << endl; + if (! FileUtils::exists(FileName)) { + cerr << "Cannot open file '" << FileName << "'. File not found." << endl; + } + else if (FileUtils::isDirectory(FileName)) { + cerr << "Specified file '" << FileName << "' is a directory. Please use a regular file." << endl; + } + else { + cerr << "Cannot open '" << FileName << "'. Invalid file type." << endl; + } + TRI_EXIT_FUNCTION(EXIT_FAILURE, NULL); } From a9b579f1b1454b7c8a5e28872e461ff899dcd293 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Thu, 23 Jan 2014 09:47:22 +0100 Subject: [PATCH 19/21] issue #744 --- CHANGELOG | 2 ++ Documentation/man1/arangoimp | 6 ++++++ arangoirb/MRClient/arangoirb.cpp | 2 +- arangosh/ArangoShell/ArangoClient.cpp | 4 ++++ arangosh/ArangoShell/ArangoClient.h | 1 + arangosh/Benchmark/arangob.cpp | 2 +- arangosh/V8Client/arangodump.cpp | 2 +- arangosh/V8Client/arangoimp.cpp | 2 +- arangosh/V8Client/arangorestore.cpp | 2 +- arangosh/V8Client/arangosh.cpp | 2 +- lib/ApplicationServer/ApplicationServer.cpp | 2 +- 11 files changed, 20 insertions(+), 7 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 5c5c4f5cb2..73739cf64e 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,6 +1,8 @@ v1.4.7 (XXXX-XX-XX) ------------------- +* issue #744: Add usage example arangoimp from Command line + * issue #738: added __dirname, __filename pseudo-globals. Fixes #733. (@by pluma) * mount all Foxx applications in system apps directory on startup diff --git a/Documentation/man1/arangoimp b/Documentation/man1/arangoimp index c7b438d702..afdef13b80 100644 --- a/Documentation/man1/arangoimp +++ b/Documentation/man1/arangoimp @@ -38,4 +38,10 @@ username to use when connecting (default "root") ENDOPTION OPTION "--server.password " password to use when connecting. Leave empty for a password prompt ENDOPTION EXAMPLES +EXAMPLE COMMAND --file heroes.json --type json --collection superheroes --create-collection true +imports JSON data from file heroes.json into collection superhoeres. creates the collection if it does not exist +EXAMPLE COMMAND --file export.csv --type csv --collection mydata +imports CSV data from export.csv into existing collection mydata. +EXAMPLE COMMAND --file values.json --collection mydata --server.endpoint tcp://127.0.0.1:8529 --server.database mydb +imports JSON data from file values.json into collection mydata, using a different server endpoint and database AUTHOR diff --git a/arangoirb/MRClient/arangoirb.cpp b/arangoirb/MRClient/arangoirb.cpp index e35cc21c25..ca59b316a6 100644 --- a/arangoirb/MRClient/arangoirb.cpp +++ b/arangoirb/MRClient/arangoirb.cpp @@ -203,7 +203,7 @@ static void ParseProgramOptions (int argc, char* argv[]) { // and parse the command line and config file ProgramOptions options; - BaseClient.parse(options, description, argc, argv, "arangoirb.conf"); + BaseClient.parse(options, description, "", argc, argv, "arangoirb.conf"); // check module path if (StartupModules.empty()) { diff --git a/arangosh/ArangoShell/ArangoClient.cpp b/arangosh/ArangoShell/ArangoClient.cpp index 4ff1d1e1e8..b53e36908b 100644 --- a/arangosh/ArangoShell/ArangoClient.cpp +++ b/arangosh/ArangoShell/ArangoClient.cpp @@ -257,6 +257,7 @@ void ArangoClient::setupServer (ProgramOptionsDescription& description) { void ArangoClient::parse (ProgramOptions& options, ProgramOptionsDescription& description, + string const& example, int argc, char* argv[], string const& initFilename) { @@ -268,6 +269,9 @@ void ArangoClient::parse (ProgramOptions& options, set help = options.needHelp("help"); if (! help.empty()) { + if (! example.empty()) { + cout << "USAGE: " << argv[0] << " " << example << endl << endl; + } cout << description.usage(help) << endl; TRI_EXIT_FUNCTION(EXIT_SUCCESS, NULL); } diff --git a/arangosh/ArangoShell/ArangoClient.h b/arangosh/ArangoShell/ArangoClient.h index 823c662baf..eb30669cf6 100644 --- a/arangosh/ArangoShell/ArangoClient.h +++ b/arangosh/ArangoShell/ArangoClient.h @@ -207,6 +207,7 @@ namespace triagens { void parse (triagens::basics::ProgramOptions&, triagens::basics::ProgramOptionsDescription& description, + string const& example, int argc, char* argv[], string const& initFilename); diff --git a/arangosh/Benchmark/arangob.cpp b/arangosh/Benchmark/arangob.cpp index fbcb686c18..507216a677 100644 --- a/arangosh/Benchmark/arangob.cpp +++ b/arangosh/Benchmark/arangob.cpp @@ -221,7 +221,7 @@ static void ParseProgramOptions (int argc, char* argv[]) { description.arguments(&arguments); ProgramOptions options; - BaseClient.parse(options, description, argc, argv, "arangob.conf"); + BaseClient.parse(options, description, "--concurrency --requests --test-case ...", argc, argv, "arangob.conf"); } //////////////////////////////////////////////////////////////////////////////// diff --git a/arangosh/V8Client/arangodump.cpp b/arangosh/V8Client/arangodump.cpp index cf0af17e7e..53d8a3a8f3 100644 --- a/arangosh/V8Client/arangodump.cpp +++ b/arangosh/V8Client/arangodump.cpp @@ -193,7 +193,7 @@ static void ParseProgramOptions (int argc, char* argv[]) { description.arguments(&arguments); ProgramOptions options; - BaseClient.parse(options, description, argc, argv, "arangodump.conf"); + BaseClient.parse(options, description, "", argc, argv, "arangodump.conf"); if (1 == arguments.size()) { OutputDirectory = arguments[0]; diff --git a/arangosh/V8Client/arangoimp.cpp b/arangosh/V8Client/arangoimp.cpp index 6f859b5123..9cd7428f40 100644 --- a/arangosh/V8Client/arangoimp.cpp +++ b/arangosh/V8Client/arangoimp.cpp @@ -171,7 +171,7 @@ static void ParseProgramOptions (int argc, char* argv[]) { description.arguments(&arguments); ProgramOptions options; - BaseClient.parse(options, description, argc, argv, "arangoimp.conf"); + BaseClient.parse(options, description, "--file --type --collection ", argc, argv, "arangoimp.conf"); if (FileName == "" && arguments.size() > 0) { FileName = arguments[0]; diff --git a/arangosh/V8Client/arangorestore.cpp b/arangosh/V8Client/arangorestore.cpp index d640bb2e4b..b1bd23763f 100644 --- a/arangosh/V8Client/arangorestore.cpp +++ b/arangosh/V8Client/arangorestore.cpp @@ -194,7 +194,7 @@ static void ParseProgramOptions (int argc, char* argv[]) { description.arguments(&arguments); ProgramOptions options; - BaseClient.parse(options, description, argc, argv, "arangorestore.conf"); + BaseClient.parse(options, description, "", argc, argv, "arangorestore.conf"); if (1 == arguments.size()) { InputDirectory = arguments[0]; diff --git a/arangosh/V8Client/arangosh.cpp b/arangosh/V8Client/arangosh.cpp index b1e4c37bd1..34b3edfedc 100644 --- a/arangosh/V8Client/arangosh.cpp +++ b/arangosh/V8Client/arangosh.cpp @@ -462,7 +462,7 @@ static vector ParseProgramOptions (int argc, char* argv[]) { TRI_FreeString(TRI_CORE_MEM_ZONE, p); conf += ".conf"; - BaseClient.parse(options, description, argc, argv, conf); + BaseClient.parse(options, description, "", argc, argv, conf); // set V8 options v8::V8::SetFlagsFromCommandLine(&argc, argv, true); diff --git a/lib/ApplicationServer/ApplicationServer.cpp b/lib/ApplicationServer/ApplicationServer.cpp index 40b806c786..1aa8528c44 100644 --- a/lib/ApplicationServer/ApplicationServer.cpp +++ b/lib/ApplicationServer/ApplicationServer.cpp @@ -420,7 +420,7 @@ bool ApplicationServer::parse (int argc, set help = _options.needHelp("help"); if (! help.empty()) { - cout << argv[0] << " " << _title << "\n\n" << _description.usage(help) << endl; + cout << argv[0] << " " << _title << endl << endl << _description.usage(help) << endl; TRI_EXIT_FUNCTION(EXIT_SUCCESS, NULL); } From c4c6500b9a2ba1b24fe39d49fd6b4395bde33971 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Thu, 23 Jan 2014 10:09:32 +0100 Subject: [PATCH 20/21] updated man pages --- Documentation/Makefile.files | 2 +- Documentation/man/man1/arangob.1 | 24 +++++++++++++----------- Documentation/man/man1/arangodump.1 | 6 ++++-- Documentation/man/man1/arangoimp.1 | 23 ++++++++++++++++++++--- Documentation/man/man1/arangorestore.1 | 6 ++++-- Documentation/man/man1/arangosh.1 | 12 ++++++++---- Documentation/man/man8/arango-dfdb.8 | 2 +- Documentation/man/man8/arangod.8 | 4 +++- Documentation/man/man8/foxx-manager.8 | 4 +++- Documentation/man/man8/rcarangod.8 | 2 +- Documentation/man1/arangob | 20 +++++++++++--------- Documentation/man1/arangodump | 4 +++- Documentation/man1/arangoimp | 10 ++++++---- Documentation/man1/arangorestore | 4 +++- Documentation/man1/arangosh | 6 +++--- 15 files changed, 84 insertions(+), 45 deletions(-) diff --git a/Documentation/Makefile.files b/Documentation/Makefile.files index 6c004ff710..637ff7f198 100644 --- a/Documentation/Makefile.files +++ b/Documentation/Makefile.files @@ -304,7 +304,7 @@ latex: Doxygen/.setup-directories Doxygen/arango-latex.doxy $(DOXYGEN) .PHONY: man man: Doxygen/.setup-directories - for section in 1 8; do for i in `ls Documentation/man$$section`; do sed -f Documentation/Scripts/man.sed -e "s/\/$$section/" -e "s/\/$$i/g" -e "s/DATE/`date`/g" Documentation/man$$section/$$i > Doxygen/man/man$$section/$$i.$$section; done; done + for section in 1 8; do for i in `ls Documentation/man$$section`; do sed -f Documentation/Scripts/man.sed -e "s/\/$$section/" -e "s/\/$$i/g" -e "s/DATE/`date`/g" Documentation/man$$section/$$i > Doxygen/man/man$$section/$$i.$$section; cp "Doxygen/man/man$$section/$$i.$$section" "Documentation/man/man$$section/$$i.$$section"; done; done ## ----------------------------------------------------------------------------- ## --SECTION-- EXAMPLES diff --git a/Documentation/man/man1/arangob.1 b/Documentation/man/man1/arangob.1 index b74cd86163..5baa97d135 100644 --- a/Documentation/man/man1/arangob.1 +++ b/Documentation/man/man1/arangob.1 @@ -1,4 +1,4 @@ -.TH arangob 1 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH arangob 1 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangob - the ArangoDB benchmark and test tool .SH SYNOPSIS @@ -32,11 +32,13 @@ complexity value for test case (meaning depends on test case) .IP "--server.endpoint " server endpoint to connect to, consisting of protocol, ip address and port .IP "--server.database " -database name to use when connection (default: "_system") +database name to use when connection (default: "_system") .IP "--server.username " username to use when connecting (default "root") .IP "--server.password " -password to use when connecting. Leave empty for a password prompt +password to use when connecting. Don't specify this option to get a password prompt +.IP "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server .SH EXAMPLES .EX shell> arangob @@ -44,23 +46,23 @@ starts arangob with the default user and server endpoint .EE .EX -shell> arangob --server.username fuchsia -starts arangob with a specific user. Password prompt will follow +shell> arangob --test-case version --requests 1000 --concurrency 1 +runs the 'version' test case with 1000 requests, without concurrency .EE .EX -shell> arangob --server.username fuchsia --server.password "abcd@34" -starts arangob with a specific user and password given on command line +shell> arangob --test-case document --requests 1000 --concurrency 2 +runs the 'document' test case with 2000 requests, with concurrency 2 .EE .EX -shell> arangob --server.endpoint tcp://192.168.173.13:8529 -starts arangob connecting to a specific server +shell> arangob --test-case document --requests 1000 --concurrency 2 --async true +runs the 'document' test case with 2000 requests, with concurrency 2, with async requests .EE .EX -shell> arangob --server.endpoint ssl://192.168.173.13:8530 -starts arangob connecting to a specific server using an SSL connection +shell> arangob --test-case document --requests 1000 --concurrency 2 --batch-size 10 +runs the 'document' test case with 2000 requests, with concurrency 2, using batch requests .EE diff --git a/Documentation/man/man1/arangodump.1 b/Documentation/man/man1/arangodump.1 index 61c72b99f7..fa39ff2669 100644 --- a/Documentation/man/man1/arangodump.1 +++ b/Documentation/man/man1/arangodump.1 @@ -1,4 +1,4 @@ -.TH arangodump 1 "Fr 6. Sep 02:19:07 CEST 2013" "" "ArangoDB" +.TH arangodump 1 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangodump - a tool to create logical dumps of an ArangoDB database .SH SYNOPSIS @@ -51,7 +51,9 @@ database name to use when connection (default: "_system") .IP "--server.username " username to use when connecting (default "root") .IP "--server.password " -password to use when connecting. Leave empty for a password prompt +password to use when connecting. Don't specify this option to get a password prompt +.IP "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server .SH EXAMPLES .SH AUTHOR diff --git a/Documentation/man/man1/arangoimp.1 b/Documentation/man/man1/arangoimp.1 index 789deda357..2f02d6791c 100644 --- a/Documentation/man/man1/arangoimp.1 +++ b/Documentation/man/man1/arangoimp.1 @@ -1,4 +1,4 @@ -.TH arangoimp 1 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH arangoimp 1 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangoimp - a bulk importer for the ArangoDB database .SH SYNOPSIS @@ -32,12 +32,29 @@ set to "json", "tsv" or "csv", depending on the input file format .IP "--server.endpoint " server endpoint to connect to, consisting of protocol, ip address and port .IP "--server.database " -database name to use when connection (default: "_system") +database name to use when connection (default: "_system") .IP "--server.username " username to use when connecting (default "root") .IP "--server.password " -password to use when connecting. Leave empty for a password prompt +password to use when connecting. Don't specify this option to get a password prompt +.IP "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server .SH EXAMPLES +.EX +shell> arangoimp --file heroes.json --type json --collection superheroes --create-collection true +imports JSON data from file heroes.json into collection superhoeres. creates the collection if it does not exist +.EE + +.EX +shell> arangoimp --file export.csv --type csv --collection mydata +imports CSV data from export.csv into existing collection mydata +.EE + +.EX +shell> arangoimp --file values.json --collection mydata --server.endpoint tcp://127.0.0.1:8529 --server.database mydb +imports JSON data from file values.json into collection mydata, using a different server endpoint and database +.EE + .SH AUTHOR Copyright triAGENS GmbH, Cologne, Germany diff --git a/Documentation/man/man1/arangorestore.1 b/Documentation/man/man1/arangorestore.1 index 3ea4cab2d6..f529a12700 100644 --- a/Documentation/man/man1/arangorestore.1 +++ b/Documentation/man/man1/arangorestore.1 @@ -1,4 +1,4 @@ -.TH arangorestore 1 "Fr 6. Sep 02:19:07 CEST 2013" "" "ArangoDB" +.TH arangorestore 1 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangorestore - a data restore tool for the ArangoDB database .SH SYNOPSIS @@ -42,7 +42,9 @@ database name to use when connection (default: "_system") .IP "--server.username " username to use when connecting (default "root") .IP "--server.password " -password to use when connecting. Leave empty for a password prompt +password to use when connecting. Don't specify this option to get a password prompt +.IP "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server .SH EXAMPLES .SH AUTHOR diff --git a/Documentation/man/man1/arangosh.1 b/Documentation/man/man1/arangosh.1 index 4916c04237..e5890a2cdc 100644 --- a/Documentation/man/man1/arangosh.1 +++ b/Documentation/man/man1/arangosh.1 @@ -1,4 +1,4 @@ -.TH arangosh 1 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH arangosh 1 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangosh - the ArangoDB shell .SH SYNOPSIS @@ -13,6 +13,8 @@ online manual, available at http://www.arangodb.org/ The most important startup options are: +.IP "--audit-log " +log input and output to audit log file .IP "--configuration " read configuration from file .IP "--log.level " @@ -20,11 +22,13 @@ set the log level (possible values: "fatal", "error", "warning", "info", "debug" .IP "--server.endpoint " server endpoint to connect to, consisting of protocol, ip address and port .IP "--server.database " -database name to use when connection (default: "_system") +database name to use when connection (default: "_system") .IP "--server.username " username to use when connecting (default "root") .IP "--server.password " -password to use when connecting. Leave empty for a password prompt +password to use when connecting. Don't specify this option to get a password prompt +.IP "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server .SH EXAMPLES .EX shell> arangosh @@ -33,7 +37,7 @@ starts arangosh with the default user and server endpoint .EX shell> arangosh --server.username fuchsia -starts arangosh with a specific user. Password prompt will follow +starts arangosh with a specific user. Password prompt will follow if --server.disable-authentication is true. .EE .EX diff --git a/Documentation/man/man8/arango-dfdb.8 b/Documentation/man/man8/arango-dfdb.8 index 765ee45d04..f33c51607f 100644 --- a/Documentation/man/man8/arango-dfdb.8 +++ b/Documentation/man/man8/arango-dfdb.8 @@ -1,4 +1,4 @@ -.TH arango-dfdb 8 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH arango-dfdb 8 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arango-dfdb - a datafile debugger for ArangoDB .SH SYNOPSIS diff --git a/Documentation/man/man8/arangod.8 b/Documentation/man/man8/arangod.8 index 0da3ea07ae..fa3f195be9 100644 --- a/Documentation/man/man8/arangod.8 +++ b/Documentation/man/man8/arangod.8 @@ -1,4 +1,4 @@ -.TH arangod 8 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH arangod 8 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME arangod - the ArangoDB database server .SH SYNOPSIS @@ -30,6 +30,8 @@ log to file set the log level (possible values: "fatal", "error", "warning", "info", "debug", "trace") .IP "--server.endpoint " listen endpoint for client requests, consisting of protocol, ip address and port +.IP "--server.disable-authentication " +disable the password prompt when connecting to the server .IP "--database.directory " path to the database directory .SH EXAMPLES diff --git a/Documentation/man/man8/foxx-manager.8 b/Documentation/man/man8/foxx-manager.8 index 389f991f3b..d33d6e54b3 100644 --- a/Documentation/man/man8/foxx-manager.8 +++ b/Documentation/man/man8/foxx-manager.8 @@ -1,4 +1,4 @@ -.TH foxx-manager 8 "Fr 26. Jul 22:41:49 CEST 2013" "" "ArangoDB" +.TH foxx-manager 8 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME foxx-manager - a Foxx application manager for ArangoDB .SH SYNOPSIS @@ -9,6 +9,8 @@ ArangoDB database server. Foxx applications can be installed and uninstalled. More specific instructions are displayed when the program is invoked. .SH OPTIONS +.IP "--server.database " +database name to use when connection (default: "_system") .IP "--server.disable-authentication " disable the password prompt when connecting to the server .SH EXAMPLES diff --git a/Documentation/man/man8/rcarangod.8 b/Documentation/man/man8/rcarangod.8 index cba9a6f09f..fed4cb8f32 100644 --- a/Documentation/man/man8/rcarangod.8 +++ b/Documentation/man/man8/rcarangod.8 @@ -1,4 +1,4 @@ -.TH rcarangod 8 "So 30. Sep 01:36:14 CEST 2012" "" "ArangoDB" +.TH rcarangod 8 "Do 23. Jan 09:59:38 CET 2014" "" "ArangoDB" .SH NAME rcarangod - control script for the ArangoDB database server .SH SYNOPSIS diff --git a/Documentation/man1/arangob b/Documentation/man1/arangob index bfd78fe491..8d3332f44f 100644 --- a/Documentation/man1/arangob +++ b/Documentation/man1/arangob @@ -36,16 +36,18 @@ database name to use when connection (default: "_system") ENDOPTION OPTION "--server.username " username to use when connecting (default "root") ENDOPTION OPTION "--server.password " -password to use when connecting. Leave empty for a password prompt ENDOPTION +password to use when connecting. Don't specify this option to get a password prompt ENDOPTION +OPTION "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server ENDOPTION EXAMPLES EXAMPLE COMMAND starts COMMAND with the default user and server endpoint ENDEXAMPLE -EXAMPLE COMMAND --server.username fuchsia -starts COMMAND with a specific user. Password prompt will follow ENDEXAMPLE -EXAMPLE COMMAND --server.username fuchsia --server.password "abcd@34" -starts COMMAND with a specific user and password given on command line ENDEXAMPLE -EXAMPLE COMMAND --server.endpoint tcp://192.168.173.13:8529 -starts COMMAND connecting to a specific server ENDEXAMPLE -EXAMPLE COMMAND --server.endpoint ssl://192.168.173.13:8530 -starts COMMAND connecting to a specific server using an SSL connection ENDEXAMPLE +EXAMPLE COMMAND --test-case version --requests 1000 --concurrency 1 +runs the 'version' test case with 1000 requests, without concurrency ENDEXAMPLE +EXAMPLE COMMAND --test-case document --requests 1000 --concurrency 2 +runs the 'document' test case with 2000 requests, with concurrency 2 ENDEXAMPLE +EXAMPLE COMMAND --test-case document --requests 1000 --concurrency 2 --async true +runs the 'document' test case with 2000 requests, with concurrency 2, with async requests ENDEXAMPLE +EXAMPLE COMMAND --test-case document --requests 1000 --concurrency 2 --batch-size 10 +runs the 'document' test case with 2000 requests, with concurrency 2, using batch requests ENDEXAMPLE AUTHOR diff --git a/Documentation/man1/arangodump b/Documentation/man1/arangodump index bfa6df96c2..21c78ad99a 100644 --- a/Documentation/man1/arangodump +++ b/Documentation/man1/arangodump @@ -51,6 +51,8 @@ database name to use when connection (default: "_system") ENDOPTION OPTION "--server.username " username to use when connecting (default "root") ENDOPTION OPTION "--server.password " -password to use when connecting. Leave empty for a password prompt ENDOPTION +password to use when connecting. Don't specify this option to get a password prompt ENDOPTION +OPTION "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server ENDOPTION EXAMPLES AUTHOR diff --git a/Documentation/man1/arangoimp b/Documentation/man1/arangoimp index afdef13b80..527e357ff4 100644 --- a/Documentation/man1/arangoimp +++ b/Documentation/man1/arangoimp @@ -36,12 +36,14 @@ database name to use when connection (default: "_system") ENDOPTION OPTION "--server.username " username to use when connecting (default "root") ENDOPTION OPTION "--server.password " -password to use when connecting. Leave empty for a password prompt ENDOPTION +password to use when connecting. Don't specify this option to get a password prompt ENDOPTION +OPTION "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server ENDOPTION EXAMPLES EXAMPLE COMMAND --file heroes.json --type json --collection superheroes --create-collection true -imports JSON data from file heroes.json into collection superhoeres. creates the collection if it does not exist +imports JSON data from file heroes.json into collection superhoeres. creates the collection if it does not exist ENDEXAMPLE EXAMPLE COMMAND --file export.csv --type csv --collection mydata -imports CSV data from export.csv into existing collection mydata. +imports CSV data from export.csv into existing collection mydata ENDEXAMPLE EXAMPLE COMMAND --file values.json --collection mydata --server.endpoint tcp://127.0.0.1:8529 --server.database mydb -imports JSON data from file values.json into collection mydata, using a different server endpoint and database +imports JSON data from file values.json into collection mydata, using a different server endpoint and database ENDEXAMPLE AUTHOR diff --git a/Documentation/man1/arangorestore b/Documentation/man1/arangorestore index 963216aff1..cae89c2a53 100644 --- a/Documentation/man1/arangorestore +++ b/Documentation/man1/arangorestore @@ -42,6 +42,8 @@ database name to use when connection (default: "_system") ENDOPTION OPTION "--server.username " username to use when connecting (default "root") ENDOPTION OPTION "--server.password " -password to use when connecting. Leave empty for a password prompt ENDOPTION +password to use when connecting. Don't specify this option to get a password prompt ENDOPTION +OPTION "--server.disable-authentication " +disable the password prompt and authentication when connecting to the server ENDOPTION EXAMPLES AUTHOR diff --git a/Documentation/man1/arangosh b/Documentation/man1/arangosh index dd2c7f6c5c..7b766253a1 100644 --- a/Documentation/man1/arangosh +++ b/Documentation/man1/arangosh @@ -26,14 +26,14 @@ database name to use when connection (default: "_system") ENDOPTION OPTION "--server.username " username to use when connecting (default "root") ENDOPTION OPTION "--server.password " -password to use when connecting. Leave empty for a password prompt ENDOPTION +password to use when connecting. Don't specify this option to get a password prompt ENDOPTION OPTION "--server.disable-authentication " -turn off autpassword to use when connecting. Leave empty for a password prompt ENDOPTION +disable the password prompt and authentication when connecting to the server ENDOPTION EXAMPLES EXAMPLE COMMAND starts COMMAND with the default user and server endpoint ENDEXAMPLE EXAMPLE COMMAND --server.username fuchsia -starts COMMAND with a specific user. Password prompt will follow ENDEXAMPLE +starts COMMAND with a specific user. Password prompt will follow if --server.disable-authentication is true. ENDEXAMPLE EXAMPLE COMMAND --server.username fuchsia --server.password "abcd@34" starts COMMAND with a specific user and password given on command line ENDEXAMPLE EXAMPLE COMMAND --server.endpoint tcp://192.168.173.13:8529 From fdee8360236bb8c2d9ca0247570c9b3f0d0fcfac Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Thu, 23 Jan 2014 17:13:52 +0100 Subject: [PATCH 21/21] release version 1.4.7 --- CHANGELOG | 2 +- Makefile.in | 2 +- VERSION | 2 +- build.h | 2 +- configure | 20 +++++++++---------- configure.ac | 2 +- js/apps/system/aardvark/api-docs.json | 2 +- js/apps/system/aardvark/api-docs/batch.json | 2 +- .../system/aardvark/api-docs/collection.json | 2 +- .../system/aardvark/api-docs/database.json | 2 +- .../system/aardvark/api-docs/endpoint.json | 2 +- js/apps/system/aardvark/api-docs/import.json | 4 ++-- js/apps/system/aardvark/api-docs/job.json | 4 ++-- .../system/aardvark/api-docs/replication.json | 10 +++++----- js/apps/system/aardvark/api-docs/system.json | 2 +- 15 files changed, 30 insertions(+), 30 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 73739cf64e..dddf3ab562 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,4 +1,4 @@ -v1.4.7 (XXXX-XX-XX) +v1.4.7 (2014-01-23) ------------------- * issue #744: Add usage example arangoimp from Command line diff --git a/Makefile.in b/Makefile.in index 8fddb08a7f..6ebe9377b3 100644 --- a/Makefile.in +++ b/Makefile.in @@ -8301,7 +8301,7 @@ latex: Doxygen/.setup-directories Doxygen/arango-latex.doxy $(DOXYGEN) .PHONY: man man: Doxygen/.setup-directories - for section in 1 8; do for i in `ls Documentation/man$$section`; do sed -f Documentation/Scripts/man.sed -e "s/\/$$section/" -e "s/\/$$i/g" -e "s/DATE/`date`/g" Documentation/man$$section/$$i > Doxygen/man/man$$section/$$i.$$section; done; done + for section in 1 8; do for i in `ls Documentation/man$$section`; do sed -f Documentation/Scripts/man.sed -e "s/\/$$section/" -e "s/\/$$i/g" -e "s/DATE/`date`/g" Documentation/man$$section/$$i > Doxygen/man/man$$section/$$i.$$section; cp "Doxygen/man/man$$section/$$i.$$section" "Documentation/man/man$$section/$$i.$$section"; done; done ################################################################################ ### @brief generate json for swagger REST-API diff --git a/VERSION b/VERSION index c514bd85c2..be05bba982 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.4.6 +1.4.7 diff --git a/build.h b/build.h index d648255456..739b35ccee 100644 --- a/build.h +++ b/build.h @@ -1 +1 @@ -#define TRI_VERSION "1.4.6" +#define TRI_VERSION "1.4.7" diff --git a/configure b/configure index 6798466688..81912e4e94 100755 --- a/configure +++ b/configure @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.6. +# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.7. # # Report bugs to . # @@ -580,8 +580,8 @@ MAKEFLAGS= # Identity of this package. PACKAGE_NAME='triAGENS ArangoDB' PACKAGE_TARNAME='arangodb' -PACKAGE_VERSION='1.4.6' -PACKAGE_STRING='triAGENS ArangoDB 1.4.6' +PACKAGE_VERSION='1.4.7' +PACKAGE_STRING='triAGENS ArangoDB 1.4.7' PACKAGE_BUGREPORT='info@triagens.de' PACKAGE_URL='http://www.arangodb.org' @@ -1403,7 +1403,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures triAGENS ArangoDB 1.4.6 to adapt to many kinds of systems. +\`configure' configures triAGENS ArangoDB 1.4.7 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1474,7 +1474,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.6:";; + short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.7:";; esac cat <<\_ACEOF @@ -1605,7 +1605,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -triAGENS ArangoDB configure 1.4.6 +triAGENS ArangoDB configure 1.4.7 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -2070,7 +2070,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by triAGENS ArangoDB $as_me 1.4.6, which was +It was created by triAGENS ArangoDB $as_me 1.4.7, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -3276,7 +3276,7 @@ fi # Define the identity of the package. PACKAGE='arangodb' - VERSION='1.4.6' + VERSION='1.4.7' cat >>confdefs.h <<_ACEOF @@ -9175,7 +9175,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by triAGENS ArangoDB $as_me 1.4.6, which was +This file was extended by triAGENS ArangoDB $as_me 1.4.7, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -9242,7 +9242,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -triAGENS ArangoDB config.status 1.4.6 +triAGENS ArangoDB config.status 1.4.7 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff --git a/configure.ac b/configure.ac index dc30304fef..71b4865d72 100644 --- a/configure.ac +++ b/configure.ac @@ -6,7 +6,7 @@ dnl ============================================================================ dnl --SECTION-- triAGENS GmbH Build Environment dnl ============================================================================ -AC_INIT([triAGENS ArangoDB], [1.4.6], [info@triagens.de], [arangodb], [http://www.arangodb.org]) +AC_INIT([triAGENS ArangoDB], [1.4.7], [info@triagens.de], [arangodb], [http://www.arangodb.org]) dnl ---------------------------------------------------------------------------- dnl auxillary directory for install-sh and missing diff --git a/js/apps/system/aardvark/api-docs.json b/js/apps/system/aardvark/api-docs.json index a44c25e962..10c7e39dad 100644 --- a/js/apps/system/aardvark/api-docs.json +++ b/js/apps/system/aardvark/api-docs.json @@ -1,6 +1,6 @@ { "swaggerVersion": "1.1", - "apiVersion": "1.4.6", + "apiVersion": "1.4.7", "apis": [ { "path": "api-docs/aqlfunction.{format}", diff --git a/js/apps/system/aardvark/api-docs/batch.json b/js/apps/system/aardvark/api-docs/batch.json index 20ac0357b6..77abcaf9b0 100644 --- a/js/apps/system/aardvark/api-docs/batch.json +++ b/js/apps/system/aardvark/api-docs/batch.json @@ -32,7 +32,7 @@ "notes": "Executes a batch request. A batch request can contain any number of other requests that can be sent to ArangoDB in isolation. The benefit of using batch requests is that batching requests requires less client/server roundtrips than when sending isolated requests.

All parts of a batch request are executed serially on the server. The server will return the results of all parts in a single response when all parts are finished.

Technically, a batch request is a multipart HTTP request, with content-type multipart/form-data. A batch request consists of an envelope and the individual batch part actions. Batch part actions are \"regular\" HTTP requests, including full header and an optional body. Multiple batch parts are separated by a boundary identifier. The boundary identifier is declared in the batch envelope. The MIME content-type for each individual batch part must be application/x-arango-batchpart.

The response sent by the server will be an HTTP 200 response, with an error summary header x-arango-errors. This header contains the number of batch parts that failed with an HTTP error code of at least 400.

The response sent by the server is a multipart response, too. It contains the individual HTTP responses for all batch parts, including the full HTTP result header (with status code and other potential headers) and an optional result body. The individual batch parts in the result are seperated using the same boundary value as specified in the request.

The order of batch parts in the response will be the same as in the original client request. Client can additionally use the Content-Id MIME header in a batch part to define an individual id for each batch part. The server will return this id is the batch part responses, too.

", "summary": "executes a batch request", "httpMethod": "POST", - "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", + "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.7\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", "nickname": "executesABatchRequest" } ], diff --git a/js/apps/system/aardvark/api-docs/collection.json b/js/apps/system/aardvark/api-docs/collection.json index 6a29add436..9b971c1046 100644 --- a/js/apps/system/aardvark/api-docs/collection.json +++ b/js/apps/system/aardvark/api-docs/collection.json @@ -19,7 +19,7 @@ "notes": "Creates an new collection with a given name. The request must contain an object with the following attributes.

- name: The name of the collection.

- waitForSync (optional, default: false): If true then the data is synchronised to disk before returning from a create or update of a document.

- doCompact (optional, default is true): whether or not the collection will be compacted.

- journalSize (optional, default is a @ref CommandLineArangod \"configuration parameter\"): The maximal size of a journal or datafile. Note that this also limits the maximal size of a single object. Must be at least 1MB.

- isSystem (optional, default is false): If true, create a system collection. In this case collection-name should start with an underscore. End users should normally create non-system collections only. API implementors may be required to create system collections in very special occasions, but normally a regular collection will do.

- isVolatile (optional, default is false): If true then the collection data is kept in-memory only and not made persistent. Unloading the collection will cause the collection data to be discarded. Stopping or re-starting the server will also cause full loss of data in the collection. Setting this option will make the resulting collection be slightly faster than regular collections because ArangoDB does not enforce any synchronisation to disk and does not calculate any CRC checksums for datafiles (as there are no datafiles).

This option should threrefore be used for cache-type collections only, and not for data that cannot be re-created otherwise.

- keyOptions (optional) additional options for key generation. If specified, then keyOptions should be a JSON array containing the following attributes (note: some of them are optional): - type: specifies the type of the key generator. The currently available generators are traditional and autoincrement. - allowUserKeys: if set to true, then it is allowed to supply own key values in the _key attribute of a document. If set to false, then the key generator will solely be responsible for generating keys and supplying own key values in the _key attribute of documents is considered an error. - increment: increment value for autoincrement key generator. Not used for other key generator types. - offset: initial offset value for autoincrement key generator. Not used for other key generator types.

- type (optional, default is 2): the type of the collection to create. The following values for type are valid: - 2: document collection - 3: edges collection

", "summary": "creates a collection", "httpMethod": "POST", - "examples": "

unix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionBasics\"}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionBasics\n\n{ \n  \"id\" : \"19480999\", \n  \"name\" : \"testCollectionBasics\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 2, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\nunix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionEdges\",\"type\":3}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionEdges\n\n{ \n  \"id\" : \"20136359\", \n  \"name\" : \"testCollectionEdges\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 3, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n



unix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionUsers\",\"keyOptions\":{\"type\":\"autoincrement\",\"increment\":5,\"allowUserKeys\":true}}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionUsers\n\n{ \n  \"id\" : \"20922791\", \n  \"name\" : \"testCollectionUsers\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 2, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionBasics\"}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionBasics\n\n{ \n  \"id\" : \"19546535\", \n  \"name\" : \"testCollectionBasics\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 2, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\nunix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionEdges\",\"type\":3}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionEdges\n\n{ \n  \"id\" : \"20201895\", \n  \"name\" : \"testCollectionEdges\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 3, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n



unix> curl -X POST --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\"testCollectionUsers\",\"keyOptions\":{\"type\":\"autoincrement\",\"increment\":5,\"allowUserKeys\":true}}\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nlocation: /_db/_system/_api/collection/testCollectionUsers\n\n{ \n  \"id\" : \"20922791\", \n  \"name\" : \"testCollectionUsers\", \n  \"waitForSync\" : false, \n  \"isVolatile\" : false, \n  \"isSystem\" : false, \n  \"status\" : 3, \n  \"type\" : 2, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "createsACollection" } ], diff --git a/js/apps/system/aardvark/api-docs/database.json b/js/apps/system/aardvark/api-docs/database.json index 71bf7a8ccc..7b6e386add 100644 --- a/js/apps/system/aardvark/api-docs/database.json +++ b/js/apps/system/aardvark/api-docs/database.json @@ -74,7 +74,7 @@ "notes": "Retrieves information about the current database

The response is a JSON object with the following attributes:

- name: the name of the current database

- id: the id of the current database

- path: the filesystem path of the current database

- isSystem: whether or not the current database is the _system database

", "summary": "retrieves information about the current database", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.60594/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.30582/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "retrievesInformationAboutTheCurrentDatabase" } ], diff --git a/js/apps/system/aardvark/api-docs/endpoint.json b/js/apps/system/aardvark/api-docs/endpoint.json index f1585e9882..d9ac3c3ce1 100644 --- a/js/apps/system/aardvark/api-docs/endpoint.json +++ b/js/apps/system/aardvark/api-docs/endpoint.json @@ -24,7 +24,7 @@ "notes": "Returns a list of all configured endpoints the server is listening on. For each endpoint, the list of allowed databases is returned too if set.

The result is a JSON hash which has the endpoints as keys, and the list of mapped database names as values for each endpoint.

If a list of mapped databases is empty, it means that all databases can be accessed via the endpoint. If a list of mapped databases contains more than one database name, this means that any of the databases might be accessed via the endpoint, and the first database in the list will be treated as the default database for the endpoint. The default database will be used when an incoming request does not specify a database name in the request explicitly.

Note: retrieving the list of all endpoints is allowed in the system database only. Calling this action in any other database will make the server return an error.

", "summary": "returns a list of all endpoints", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:30594\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:30582\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", "nickname": "returnsAListOfAllEndpoints" } ], diff --git a/js/apps/system/aardvark/api-docs/import.json b/js/apps/system/aardvark/api-docs/import.json index b3f33282da..2d0bddf626 100644 --- a/js/apps/system/aardvark/api-docs/import.json +++ b/js/apps/system/aardvark/api-docs/import.json @@ -41,7 +41,7 @@ "paramType": "query", "required": "True", "name": "type", - "description": "Determines how the body of the request will be interpreted. type can have the following values: " + "description": "Determines how the body of the request will be interpreted. type can have the following values: - documents: when this type is used, each line in the request body is expected to be an individual JSON-encoded document. Multiple JSON documents in the request body need to be separated by newlines. - list: when this type is used, the request body must contain a single JSON-encoded list of individual documents to import. - auto: if set, this will automatically determine the body type (either documents or list). " }, { "dataType": "String", @@ -115,7 +115,7 @@ "paramType": "body", "required": "true", "name": "documents", - "description": "The body must consist of JSON-encoded lists of attribute values, with one line per per document. The first line of the request must be a JSON-encoded list of attribute names. " + "description": "The body must consist of JSON-encoded lists of attribute values, with one line per per document. The first row of the request must be a JSON-encoded list of attribute names. These attribute names are used for the data in the subsequent rows. " }, { "dataType": "String", diff --git a/js/apps/system/aardvark/api-docs/job.json b/js/apps/system/aardvark/api-docs/job.json index 593dcb3adf..ca4fda5796 100644 --- a/js/apps/system/aardvark/api-docs/job.json +++ b/js/apps/system/aardvark/api-docs/job.json @@ -28,7 +28,7 @@ "notes": "Returns the result of an async job identified by job-id. If the async job result is present on the server, the result will be removed from the list of result. That means this method can be called for each job-id once.

The method will return the original job result's headers and body, plus the additional HTTP header x-arango-async-job-id. If this header is present, then the job was found and the response contains the original job's result. If the header is not present, the job was not found and the response contains status information from the job amanger.

", "summary": "Returns the result of an async job", "httpMethod": "PUT", - "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", + "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.7\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", "nickname": "ReturnsTheResultOfAnAsyncJob" } ], @@ -106,7 +106,7 @@ "notes": "Deletes either all job results, expired job results, or the result of a specific job. Clients can use this method to perform an eventual garbage collection of job results.

", "summary": "Deletes the result of async jobs", "httpMethod": "DELETE", - "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1390252233\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", + "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1390493937\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", "nickname": "DeletesTheResultOfAsyncJobs" } ], diff --git a/js/apps/system/aardvark/api-docs/replication.json b/js/apps/system/aardvark/api-docs/replication.json index 2fe00daa40..28290191e9 100644 --- a/js/apps/system/aardvark/api-docs/replication.json +++ b/js/apps/system/aardvark/api-docs/replication.json @@ -78,7 +78,7 @@ "notes": "Returns the current state of the server's replication logger. The state will include information about whether the logger is running and about the last logged tick value. This tick value is important for incremental fetching of data.

The state API can be called regardless of whether the logger is currently running or not.

The body of the response contains a JSON object with the following attributes:

- state: the current logger state as a JSON hash array with the following sub-attributes:

- running: whether or not the logger is running

- lastLogTick: the tick value of the latest tick the logger has logged. This value can be used for incremental fetching of log data.

- totalEvents: total number of events logged since the server was started. The value is not reset between multiple stops and re-starts of the logger.

- time: the current date and time on the logger server

- server: a JSON hash with the following sub-attributes:

- version: the logger server's version

- serverId: the logger server's id

- clients: a list of all replication clients that ever connected to the logger since it was started. This list can be used to determine approximately how much data the individual clients have already fetched from the logger server. Each entry in the list contains a time value indicating the server time the client last fetched data from the replication logger. The lastServedTick value of each client indicates the latest tick value sent to the client upon a client request to the replication logger.

", "summary": "returns the replication logger state", "httpMethod": "GET", - "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-20T21:04:23Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-20T21:04:24Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", + "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-23T16:12:50Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-23T16:12:50Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", "nickname": "returnsTheReplicationLoggerState" } ], @@ -332,7 +332,7 @@ "notes": "Returns the list of collections and indexes available on the server. This list can be used by replication clients to initiate an initial sync with the server.

The response will contain a JSON hash array with the collection and state attributes.

collections is a list of collections with the following sub-attributes:

- parameters: the collection properties

- indexes: a list of the indexes of a the collection. Primary indexes and edges indexes are not included in this list.

tick: the system-wide tick value at the start of the dump

The state attribute contains the current state of the replication logger. It contains the following sub-attributes:

- running: whether or not the replication logger is currently active

- lastLogTick: the value of the last tick the replication logger has written

- time: the current time on the server

Replication clients should note the lastLogTick value returned. They can then fetch collections' data using the dump method up to the value of lastLogTick, and query the continuous replication log for log events after this tick value.

To create a full copy of the collections on the logger server, a replication client can execute these steps:

- call the /inventory API method. This returns the lastLogTick value and the list of collections and indexes from the logger server.

- for each collection returned by /inventory, create the collection locally and call /dump to stream the collection data to the client, up to the value of lastLogTick. After that, the client can create the indexes on the collections as they were reported by /inventory.

If the clients wants to continuously stream replication log events from the logger server, the following additional steps need to be carried out:

- the client should call /logger-follow initially to fetch the first batch of replication events that were logged after the client's call to /inventory.

The call to /logger-follow should use a from parameter with the value of the lastLogTick as reported by /inventory. The call to /logger-follow will return the x-arango-replication-lastincluded which will contain the last tick value included in the response.

- the client can then continuously call /logger-follow to incrementally fetch new replication events that occurred after the last transfer.

Calls should use a from parameter with the value of the x-arango-replication-lastincluded header of the previous response. If there are no more replication events, the response will be empty and clients can go to sleep for a while and try again later.

", "summary": "returns an inventory of collections and indexes", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-23T16:12:50Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-23T16:12:50Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", "nickname": "returnsAnInventoryOfCollectionsAndIndexes" } ], @@ -565,7 +565,7 @@ "notes": "Starts the replication applier. This will return immediately if the replication applier is already running.

If the replication applier is not already running, the applier configuration will be checked, and if it is complete, the applier will be started in a background thread. This means that even if the applier will encounter any errors while running, they will not be reported in the response to this method.

To detect replication applier errors after the applier was started, use the /_api/replication/applier-state API instead.

", "summary": "starts the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:03:04Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-23T16:12:11Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-23T16:12:51Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "startsTheReplicationApplier" } ], @@ -592,7 +592,7 @@ "notes": "Stops the replication applier. This will return immediately if the replication applier is not running.

", "summary": "stops the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-23T16:12:51Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-23T16:12:51Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-23T16:12:51Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "stopsTheReplicationApplier" } ], @@ -619,7 +619,7 @@ "notes": "Returns the state of the replication applier, regardless of whether the applier is currently running or not.

The response is a JSON hash with the following attributes:

- state: a JSON hash with the following sub-attributes:

- running: whether or not the applier is active and running

- lastAppliedContinuousTick: the last tick value from the continuous replication log the applier has applied.

- lastProcessedContinuousTick: the last tick value from the continuous replication log the applier has processed.

Regularly, the last applied and last processed tick values should be identical. For transactional operations, the replication applier will first process incoming log events before applying them, so the processed tick value might be higher than the applied tick value. This will be the case until the applier encounters the transaction commit log event for the transaction.

- lastAvailableContinuousTick: the last tick value the logger server can provide.

- time: the time on the applier server.

- totalRequests: the total number of requests the applier has made to the endpoint.

- totalFailedConnects: the total number of failed connection attempts the applier has made.

- totalEvents: the total number of log events the applier has processed.

- progress: a JSON hash with details about the replication applier progress. It contains the following sub-attributes if there is progress to report:

- message: a textual description of the progress

- time: the date and time the progress was logged

- failedConnects: the current number of failed connection attempts

- lastError: a JSON hash with details about the last error that happened on the applier. It contains the following sub-attributes if there was an error:

- errorNum: a numerical error code

- errorMessage: a textual error description

- time: the date and time the error occurred

In case no error has occurred, lastError will be empty.

- server: a JSON hash with the following sub-attributes:

- version: the applier server's version

- serverId: the applier server's id

- endpoint: the endpoint the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

- database: the name of the database the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

", "summary": "returns the state of the replication applier", "httpMethod": "GET", - "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-23T16:12:51Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-23T16:12:51Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-23T16:12:51Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-23T16:12:51Z\", \n      \"message\" : \"fetching master state information\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 3, \n    \"totalFailedConnects\" : 3, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-23T16:12:51Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.7\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "returnsTheStateOfTheReplicationApplier" } ], diff --git a/js/apps/system/aardvark/api-docs/system.json b/js/apps/system/aardvark/api-docs/system.json index 5c280b91d9..24d4ca2f24 100644 --- a/js/apps/system/aardvark/api-docs/system.json +++ b/js/apps/system/aardvark/api-docs/system.json @@ -92,7 +92,7 @@ "notes": "

Returns the statistics information. The returned object contains the statistics figures grouped together according to the description returned by _admin/statistics-description. For instance, to access a figure userTime from the group system, you first select the sub-object describing the group stored in system and in that sub-object the value for userTime is stored in the attribute of the same name.

In case of a distribution, the returned object contains the total count in count and the distribution list in counts. The sum (or total) of the individual values is returned in sum.

", "summary": "reads the statistics", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 113349, \n    \"majorPageFaults\" : 1916, \n    \"userTime\" : 28.970069, \n    \"systemTime\" : 3.00499, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 58105856, \n    \"virtualSize\" : 5001773056 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.003744840621948242, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 51.210867404937744, \n      \"count\" : 850, \n      \"counts\" : [ \n        216, \n        248, \n        250, \n        114, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 50.808953285217285, \n      \"count\" : 850, \n      \"counts\" : [ \n        226, \n        242, \n        251, \n        109, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.03761625289916992, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 58.96970891952515 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 72203, \n    \"majorPageFaults\" : 1914, \n    \"userTime\" : 12.08358, \n    \"systemTime\" : 1.664977, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 57380864, \n    \"virtualSize\" : 4994146304 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.0004661083221435547, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 23.29973530769348, \n      \"count\" : 850, \n      \"counts\" : [ \n        512, \n        234, \n        40, \n        51, \n        10, \n        0, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 23.136818885803223, \n      \"count\" : 850, \n      \"counts\" : [ \n        516, \n        230, \n        41, \n        50, \n        10, \n        0, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.01817011833190918, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 27.90538001060486 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "readsTheStatistics" } ],