From 251c2fa44837430f809d4e005197d24918a40da4 Mon Sep 17 00:00:00 2001 From: Frank Celler Date: Mon, 20 Jan 2014 22:09:44 +0100 Subject: [PATCH] release version 1.4.6 --- CHANGELOG | 111 +------- Makefile.in | 19 +- VERSION | 2 +- aclocal.m4 | 154 ++++++++--- build.h | 2 +- config/config.guess | 30 ++- config/config.sub | 17 +- config/missing | 4 +- configure | 247 ++++++++---------- configure.ac | 2 +- js/apps/system/aardvark/api-docs.json | 2 +- js/apps/system/aardvark/api-docs/batch.json | 2 +- .../system/aardvark/api-docs/database.json | 2 +- .../system/aardvark/api-docs/endpoint.json | 2 +- js/apps/system/aardvark/api-docs/job.json | 6 +- .../system/aardvark/api-docs/replication.json | 10 +- js/apps/system/aardvark/api-docs/system.json | 2 +- 17 files changed, 290 insertions(+), 324 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index e7c8db9a56..a41c2378fe 100755 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,113 +1,4 @@ -v1.5.0 (XXXX-XX-XX) -------------------- - -* issue #738: added __dirname, __filename pseudo-globals. Fixes #733. (@by pluma) - -* allow `\n` (as well as `\r\n`) as line terminator in batch requests sent to - `/_api/batch` HTTP API. - -* use `--data-binary` instead of `--data` parameter in generated cURL examples - -* issue #703: Also show path of logfile for fm.config() - -* issue #675: Dropping a collection used in "graph" module breaks the graph - -* added "static" Graph.drop() method for graphs API - -* fixed issue #695: arangosh server.password error - -* use pretty-printing in `--console` mode by defaul - -* added `check-server` binary for testing - -* simplified ArangoDB startup options - - Some startup options are now superfluous or their usage is simplified. The - following options have been changed: - - * `--javascript.modules-path`: this option has been removed. The modules paths - are determined by arangod and arangosh automatically based on the value of - `--javascript.startup-directory`. - - If the option is set on startup, it is ignored so startup will not abort with - an error `unrecognized option`. - - * `--javascript.action-directory`: this option has been removed. The actions - directory is determined by arangod automatically based on the value of - `--javascript.startup-directory`. - - If the option is set on startup, it is ignored so startup will not abort with - an error `unrecognized option`. - - * `--javascript.package-path`: this option is still available but it is not - required anymore to set the standard package paths (e.g. `js/npm`). arangod - will automatically use this standard package path regardless of whether it - was specified via the options. - - It is possible to use this option to add additional package paths to the - standard value. - - Configuration files included with arangod are adjusted accordingly. - -* layout of the graphs tab adapted to better fit with the other tabs - -* database selection is moved to the bottom right corner of the web interface - -* removed priority queues - - this feature was never advertised nor documented nor tested. - -* display internal attributes in document source view of web interface - -* removed separate shape collections - - When upgrading to ArangoDB 1.5, existing collections will be converted to include - shapes and attribute markers in the datafiles instead of using separate files for - shapes. - - When a collection is converted, existing shapes from the SHAPES directory will - be written to a new datafile in the collection directory, and the SHAPES directory - will be removed afterwards. - - This saves up to 2 MB of memory and disk space for each collection - (savings are higher, the less different shapes there are in a collection). - Additionally, one less file descriptor per opened collection will be used. - - When creating a new collection, the amount of sync calls may be reduced. The same - may be true for documents with yet-unknown shapes. This may help performance - in these cases. - -* added AQL functions `NTH` and `POSITION` - -* added signal handler for arangosh to save last command in more cases - -* added extra prompt placeholders for arangosh: - - `%e`: current endpoint - - `%u`: current user - -* added arangosh option `--javascript.gc-interval` to control amount of - garbage collection performed by arangosh - -* fixed issue #651: Allow addEdge() to take vertex ids in the JS library - -* removed command-line option `--log.format` - - In previous versions, this option did not have an effect for most log messages, so - it got removed. - -* removed C++ logger implementation - - Logging inside ArangoDB is now done using the LOG_XXX() macros. The LOGGER_XXX() - macros are gone. - -* added collection status "loading" - -* added the option to return the number of elements indexed to the - result of .getIndexes() for each index. This is - currently only implemented for hash indices and skiplist indices. - - -v1.4.6 (XXXX-XX-XX) +v1.4.6 (2014-01-20) ------------------- * issue #736: AQL function to parse collection and key from document handle diff --git a/Makefile.in b/Makefile.in index a307725070..8fddb08a7f 100644 --- a/Makefile.in +++ b/Makefile.in @@ -1,4 +1,4 @@ -# Makefile.in generated by automake 1.13.4 from Makefile.am. +# Makefile.in generated by automake 1.14.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. @@ -2122,8 +2122,8 @@ $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) $(am__aclocal_m4_deps): config/config.h: config/stamp-h1 - @if test ! -f $@; then rm -f config/stamp-h1; else :; fi - @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) config/stamp-h1; else :; fi + @test -f $@ || rm -f config/stamp-h1 + @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) config/stamp-h1 config/stamp-h1: $(top_srcdir)/config/config.h.in $(top_builddir)/config.status @rm -f config/stamp-h1 @@ -2134,8 +2134,8 @@ $(top_srcdir)/config/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) touch $@ lib/BasicsC/local-configuration.h: lib/BasicsC/stamp-h2 - @if test ! -f $@; then rm -f lib/BasicsC/stamp-h2; else :; fi - @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) lib/BasicsC/stamp-h2; else :; fi + @test -f $@ || rm -f lib/BasicsC/stamp-h2 + @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) lib/BasicsC/stamp-h2 lib/BasicsC/stamp-h2: $(top_srcdir)/lib/BasicsC/local-configuration.h.in $(top_builddir)/config.status @rm -f lib/BasicsC/stamp-h2 @@ -7087,10 +7087,16 @@ dist-xz: distdir $(am__post_remove_distdir) dist-tarZ: distdir + @echo WARNING: "Support for shar distribution archives is" \ + "deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir + @echo WARNING: "Support for distribution archives compressed with" \ + "legacy program 'compress' is deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__post_remove_distdir) @@ -7132,9 +7138,10 @@ distcheck: dist && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ - && ../configure --srcdir=.. --prefix="$$dc_install_base" \ + && ../configure \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ + --srcdir=.. --prefix="$$dc_install_base" \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ diff --git a/VERSION b/VERSION index e516bb9d96..c514bd85c2 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.4.5 +1.4.6 diff --git a/aclocal.m4 b/aclocal.m4 index ff0cf18e03..15ce39c029 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -1,4 +1,4 @@ -# generated automatically by aclocal 1.13.4 -*- Autoconf -*- +# generated automatically by aclocal 1.14.1 -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. @@ -32,10 +32,10 @@ To do so, use the procedure documented by the package, typically 'autoreconf'.]) # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.13' +[am__api_version='1.14' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.13.4], [], +m4_if([$1], [1.14.1], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) @@ -51,7 +51,7 @@ m4_define([_AM_AUTOCONF_VERSION], []) # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.13.4])dnl +[AM_AUTOMAKE_VERSION([1.14.1])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) @@ -418,6 +418,12 @@ AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. +dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. +m4_define([AC_PROG_CC], +m4_defn([AC_PROG_CC]) +[_AM_PROG_CC_C_O +]) + # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- @@ -526,7 +532,48 @@ dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl -]) + +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) + fi +fi]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further @@ -534,7 +581,6 @@ dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) - # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. @@ -682,38 +728,6 @@ AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) -# Copyright (C) 1999-2013 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_CC_C_O -# -------------- -# Like AC_PROG_CC_C_O, but changed for automake. -AC_DEFUN([AM_PROG_CC_C_O], -[AC_REQUIRE([AC_PROG_CC_C_O])dnl -AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -AC_REQUIRE_AUX_FILE([compile])dnl -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $[2] | sed ['s/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/']` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi -dnl Make sure AC_PROG_CC is never called again, or it will override our -dnl setting of CC. -m4_define([AC_PROG_CC], - [m4_fatal([AC_PROG_CC cannot be called after AM_PROG_CC_C_O])]) -]) - # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. @@ -784,6 +798,70 @@ AC_DEFUN([_AM_SET_OPTIONS], AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) +# Copyright (C) 1999-2013 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_PROG_CC_C_O +# --------------- +# Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC +# to automatically call this. +AC_DEFUN([_AM_PROG_CC_C_O], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +AC_REQUIRE_AUX_FILE([compile])dnl +AC_LANG_PUSH([C])dnl +AC_CACHE_CHECK( + [whether $CC understands -c and -o together], + [am_cv_prog_cc_c_o], + [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i]) +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +AC_LANG_POP([C])]) + +# For backward compatibility. +AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) + +# Copyright (C) 2001-2013 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_RUN_LOG(COMMAND) +# ------------------- +# Run COMMAND, save the exit status in ac_status, and log it. +# (This has been adapted from Autoconf's _AC_RUN_LOG macro.) +AC_DEFUN([AM_RUN_LOG], +[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD + ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD + (exit $ac_status); }]) + # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. diff --git a/build.h b/build.h index acf6e6d1b4..d648255456 100644 --- a/build.h +++ b/build.h @@ -1 +1 @@ -#define TRI_VERSION "1.4.5" +#define TRI_VERSION "1.4.6" diff --git a/config/config.guess b/config/config.guess index b79252d6b1..9afd676206 100755 --- a/config/config.guess +++ b/config/config.guess @@ -2,7 +2,7 @@ # Attempt to guess a canonical system name. # Copyright 1992-2013 Free Software Foundation, Inc. -timestamp='2013-06-10' +timestamp='2013-11-29' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by @@ -1260,16 +1260,26 @@ EOF if test "$UNAME_PROCESSOR" = unknown ; then UNAME_PROCESSOR=powerpc fi - if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - case $UNAME_PROCESSOR in - i386) UNAME_PROCESSOR=x86_64 ;; - powerpc) UNAME_PROCESSOR=powerpc64 ;; - esac + if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi fi + elif test "$UNAME_PROCESSOR" = i386 ; then + # Avoid executing cc on OS X 10.9, as it ships with a stub + # that puts up a graphical alert prompting to install + # developer tools. Any system running Mac OS X 10.7 or + # later (Darwin 11 and later) is required to have a 64-bit + # processor. This is not true of the ARM version of Darwin + # that Apple uses in portable devices. + UNAME_PROCESSOR=x86_64 fi echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; diff --git a/config/config.sub b/config/config.sub index 8b612ab89d..61cb4bc22d 100755 --- a/config/config.sub +++ b/config/config.sub @@ -2,7 +2,7 @@ # Configuration validation subroutine script. # Copyright 1992-2013 Free Software Foundation, Inc. -timestamp='2013-04-24' +timestamp='2013-10-01' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by @@ -257,7 +257,7 @@ case $basic_machine in | avr | avr32 \ | be32 | be64 \ | bfin \ - | c4x | clipper \ + | c4x | c8051 | clipper \ | d10v | d30v | dlx | dsp16xx \ | epiphany \ | fido | fr30 | frv \ @@ -265,6 +265,7 @@ case $basic_machine in | hexagon \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ + | k1om \ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ @@ -324,7 +325,7 @@ case $basic_machine in c6x) basic_machine=tic6x-unknown ;; - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) basic_machine=$basic_machine-unknown os=-none ;; @@ -372,7 +373,7 @@ case $basic_machine in | be32-* | be64-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* \ - | clipper-* | craynv-* | cydra-* \ + | c8051-* | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ @@ -381,6 +382,7 @@ case $basic_machine in | hexagon-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ + | k1om-* \ | le32-* | le64-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ @@ -794,7 +796,7 @@ case $basic_machine in os=-mingw64 ;; mingw32) - basic_machine=i386-pc + basic_machine=i686-pc os=-mingw32 ;; mingw32ce) @@ -830,7 +832,7 @@ case $basic_machine in basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; msys) - basic_machine=i386-pc + basic_machine=i686-pc os=-msys ;; mvs) @@ -1546,6 +1548,9 @@ case $basic_machine in c4x-* | tic4x-*) os=-coff ;; + c8051-*) + os=-elf + ;; hexagon-*) os=-elf ;; diff --git a/config/missing b/config/missing index cdea514931..db98974ff5 100755 --- a/config/missing +++ b/config/missing @@ -1,7 +1,7 @@ #! /bin/sh # Common wrapper for a few potentially missing GNU programs. -scriptversion=2012-06-26.16; # UTC +scriptversion=2013-10-28.13; # UTC # Copyright (C) 1996-2013 Free Software Foundation, Inc. # Originally written by Fran,cois Pinard , 1996. @@ -160,7 +160,7 @@ give_advice () ;; autom4te*) echo "You might have modified some maintainer files that require" - echo "the 'automa4te' program to be rebuilt." + echo "the 'autom4te' program to be rebuilt." program_details 'autom4te' ;; bison*|yacc*) diff --git a/configure b/configure index 9f5c2bc680..6798466688 100755 --- a/configure +++ b/configure @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.5. +# Generated by GNU Autoconf 2.69 for triAGENS ArangoDB 1.4.6. # # Report bugs to . # @@ -580,8 +580,8 @@ MAKEFLAGS= # Identity of this package. PACKAGE_NAME='triAGENS ArangoDB' PACKAGE_TARNAME='arangodb' -PACKAGE_VERSION='1.4.5' -PACKAGE_STRING='triAGENS ArangoDB 1.4.5' +PACKAGE_VERSION='1.4.6' +PACKAGE_STRING='triAGENS ArangoDB 1.4.6' PACKAGE_BUGREPORT='info@triagens.de' PACKAGE_URL='http://www.arangodb.org' @@ -1403,7 +1403,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures triAGENS ArangoDB 1.4.5 to adapt to many kinds of systems. +\`configure' configures triAGENS ArangoDB 1.4.6 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1474,7 +1474,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.5:";; + short | recursive ) echo "Configuration of triAGENS ArangoDB 1.4.6:";; esac cat <<\_ACEOF @@ -1605,7 +1605,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -triAGENS ArangoDB configure 1.4.5 +triAGENS ArangoDB configure 1.4.6 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -2070,7 +2070,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by triAGENS ArangoDB $as_me 1.4.5, which was +It was created by triAGENS ArangoDB $as_me 1.4.6, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2790,7 +2790,7 @@ if test x$tr_ARM == xyes; then fi -am__api_version='1.13' +am__api_version='1.14' # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or @@ -3276,7 +3276,7 @@ fi # Define the identity of the package. PACKAGE='arangodb' - VERSION='1.4.5' + VERSION='1.4.6' cat >>confdefs.h <<_ACEOF @@ -3327,6 +3327,47 @@ am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 + fi +fi # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; @@ -4577,6 +4618,65 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 +$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } +if ${am_cv_prog_cc_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 + ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 +$as_echo "$am_cv_prog_cc_c_o" >&6; } +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 @@ -5227,131 +5327,6 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test "x$CC" != xcc; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -fi -set dummy $CC; ac_cc=`$as_echo "$2" | - sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -# Make sure it works both with $CC and with simple cc. -# We do the test twice because some compilers refuse to overwrite an -# existing .o file with -o, though they will create one. -ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -rm -f conftest2.* -if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; -then - eval ac_cv_prog_cc_${ac_cc}_c_o=yes - if test "x$CC" != xcc; then - # Test first that cc exists at all. - if { ac_try='cc -c conftest.$ac_ext >&5' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' - rm -f conftest2.* - if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; - then - # cc works too. - : - else - # cc exists but doesn't like -o. - eval ac_cv_prog_cc_${ac_cc}_c_o=no - fi - fi - fi -else - eval ac_cv_prog_cc_${ac_cc}_c_o=no -fi -rm -f core conftest* - -fi -if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - -$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h - -fi - -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 @@ -9200,7 +9175,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by triAGENS ArangoDB $as_me 1.4.5, which was +This file was extended by triAGENS ArangoDB $as_me 1.4.6, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -9267,7 +9242,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -triAGENS ArangoDB config.status 1.4.5 +triAGENS ArangoDB config.status 1.4.6 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff --git a/configure.ac b/configure.ac index 5ff9da3df8..dc30304fef 100644 --- a/configure.ac +++ b/configure.ac @@ -6,7 +6,7 @@ dnl ============================================================================ dnl --SECTION-- triAGENS GmbH Build Environment dnl ============================================================================ -AC_INIT([triAGENS ArangoDB], [1.4.5], [info@triagens.de], [arangodb], [http://www.arangodb.org]) +AC_INIT([triAGENS ArangoDB], [1.4.6], [info@triagens.de], [arangodb], [http://www.arangodb.org]) dnl ---------------------------------------------------------------------------- dnl auxillary directory for install-sh and missing diff --git a/js/apps/system/aardvark/api-docs.json b/js/apps/system/aardvark/api-docs.json index 226b5396bd..a44c25e962 100644 --- a/js/apps/system/aardvark/api-docs.json +++ b/js/apps/system/aardvark/api-docs.json @@ -1,6 +1,6 @@ { "swaggerVersion": "1.1", - "apiVersion": "1.4.5", + "apiVersion": "1.4.6", "apis": [ { "path": "api-docs/aqlfunction.{format}", diff --git a/js/apps/system/aardvark/api-docs/batch.json b/js/apps/system/aardvark/api-docs/batch.json index 5397747841..20ac0357b6 100644 --- a/js/apps/system/aardvark/api-docs/batch.json +++ b/js/apps/system/aardvark/api-docs/batch.json @@ -32,7 +32,7 @@ "notes": "Executes a batch request. A batch request can contain any number of other requests that can be sent to ArangoDB in isolation. The benefit of using batch requests is that batching requests requires less client/server roundtrips than when sending isolated requests.

All parts of a batch request are executed serially on the server. The server will return the results of all parts in a single response when all parts are finished.

Technically, a batch request is a multipart HTTP request, with content-type multipart/form-data. A batch request consists of an envelope and the individual batch part actions. Batch part actions are \"regular\" HTTP requests, including full header and an optional body. Multiple batch parts are separated by a boundary identifier. The boundary identifier is declared in the batch envelope. The MIME content-type for each individual batch part must be application/x-arango-batchpart.

The response sent by the server will be an HTTP 200 response, with an error summary header x-arango-errors. This header contains the number of batch parts that failed with an HTTP error code of at least 400.

The response sent by the server is a multipart response, too. It contains the individual HTTP responses for all batch parts, including the full HTTP result header (with status code and other potential headers) and an optional result body. The individual batch parts in the result are seperated using the same boundary value as specified in the request.

The order of batch parts in the response will be the same as in the original client request. Client can additionally use the Content-Id MIME header in a batch part to define an individual id for each batch part. The server will return this id is the batch part responses, too.

", "summary": "executes a batch request", "httpMethod": "POST", - "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.5\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", + "examples": "

unix> curl -X POST --header 'Content-Type: multipart/form-data; boundary=SomeBoundaryValue' --data @- --dump - http://localhost:8529/_api/batch\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nGET /_api/version HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nPOST /_api/collection/products HTTP/1.1\r\n\r\n{ \"name\": \"products\" }\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nGET /_api/collection/products/figures HTTP/1.1\r\n\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nDELETE /_api/collection/products HTTP/1.1\r\n--SomeBoundaryValue--\r\n\n\nHTTP/1.1 200 OK\ncontent-type: multipart/form-data; boundary=SomeBoundaryValue\nx-arango-errors: 1\n\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 37\r\n\r\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 88\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":1203,\"errorMessage\":\"unknown collection 'products'\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 137\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"waitForSync\":false,\"isVolatile\":false,\"isSystem\":false,\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nlocation: /_db/_system/_api/collection/products/figures\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 526\r\n\r\n{\"id\":\"346505639\",\"name\":\"products\",\"doCompact\":true,\"isVolatile\":false,\"isSystem\":false,\"journalSize\":1048576,\"keyOptions\":{\"type\":\"traditional\",\"allowUserKeys\":true},\"waitForSync\":false,\"count\":0,\"figures\":{\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"shapefiles\":{\"count\":1,\"fileSize\":2097152},\"shapes\":{\"count\":6},\"attributes\":{\"count\":0}},\"status\":3,\"type\":2,\"error\":false,\"code\":200}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\ncontent-type: application/json; charset=utf-8\r\ncontent-length: 43\r\n\r\n{\"id\":\"346505639\",\"error\":false,\"code\":200}\r\n--SomeBoundaryValue--\n\n

", "nickname": "executesABatchRequest" } ], diff --git a/js/apps/system/aardvark/api-docs/database.json b/js/apps/system/aardvark/api-docs/database.json index 1770f9b7f1..71bf7a8ccc 100644 --- a/js/apps/system/aardvark/api-docs/database.json +++ b/js/apps/system/aardvark/api-docs/database.json @@ -74,7 +74,7 @@ "notes": "Retrieves information about the current database

The response is a JSON object with the following attributes:

- name: the name of the current database

- id: the id of the current database

- path: the filesystem path of the current database

- isSystem: whether or not the current database is the _system database

", "summary": "retrieves information about the current database", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.81124/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/database/current\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : { \n    \"name\" : \"_system\", \n    \"id\" : \"82343\", \n    \"path\" : \"/tmp/vocdir.60594/databases/database-82343\", \n    \"isSystem\" : true \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "retrievesInformationAboutTheCurrentDatabase" } ], diff --git a/js/apps/system/aardvark/api-docs/endpoint.json b/js/apps/system/aardvark/api-docs/endpoint.json index 53d651754a..f1585e9882 100644 --- a/js/apps/system/aardvark/api-docs/endpoint.json +++ b/js/apps/system/aardvark/api-docs/endpoint.json @@ -24,7 +24,7 @@ "notes": "Returns a list of all configured endpoints the server is listening on. For each endpoint, the list of allowed databases is returned too if set.

The result is a JSON hash which has the endpoints as keys, and the list of mapped database names as values for each endpoint.

If a list of mapped databases is empty, it means that all databases can be accessed via the endpoint. If a list of mapped databases contains more than one database name, this means that any of the databases might be accessed via the endpoint, and the first database in the list will be treated as the default database for the endpoint. The default database will be used when an incoming request does not specify a database name in the request explicitly.

Note: retrieving the list of all endpoints is allowed in the system database only. Calling this action in any other database will make the server return an error.

", "summary": "returns a list of all endpoints", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:31124\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/endpoint\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n[ \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:30594\", \n    \"databases\" : [ ] \n  }, \n  { \n    \"endpoint\" : \"tcp://127.0.0.1:8532\", \n    \"databases\" : [ \n      \"mydb1\", \n      \"mydb2\" \n    ] \n  } \n]\n\n

", "nickname": "returnsAListOfAllEndpoints" } ], diff --git a/js/apps/system/aardvark/api-docs/job.json b/js/apps/system/aardvark/api-docs/job.json index fa87a1080d..593dcb3adf 100644 --- a/js/apps/system/aardvark/api-docs/job.json +++ b/js/apps/system/aardvark/api-docs/job.json @@ -28,7 +28,7 @@ "notes": "Returns the result of an async job identified by job-id. If the async job result is present on the server, the result will be removed from the list of result. That means this method can be called for each job-id once.

The method will return the original job result's headers and body, plus the additional HTTP header x-arango-async-job-id. If this header is present, then the job was found and the response contains the original job's result. If the header is not present, the job was not found and the response contains status information from the job amanger.

", "summary": "Returns the result of an async job", "httpMethod": "PUT", - "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.5\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", + "examples": "Not providing a job-id:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"bad parameter\",\"code\":400,\"errorNum\":400}\n\n

Providing a job-id for a non-existing job:

unix> curl -X PUT --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{\"error\":true,\"errorMessage\":\"not found\",\"code\":404,\"errorNum\":404}\n\n

Fetching the result of an HTTP GET job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409813415\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409813415\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409813415\n\n{\"server\":\"arango\",\"version\":\"1.4.6\"}\n\n

Fetching the result of an HTTP POST job that failed:

unix> curl -X POST --header 'x-arango-async: store' --data @- --dump - http://localhost:8529/_api/collection\n{\"name\":\" this name is invalid \"}\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 409878951\n\nunix> curl -X PUT --dump - http://localhost:8529/_api/job/409878951\n\nHTTP/1.1 400 Bad Request\ncontent-type: application/json; charset=utf-8\nx-arango-async-id: 409878951\n\n{\"error\":true,\"code\":400,\"errorNum\":1208,\"errorMessage\":\"cannot create collection: illegal name\"}\n\n

", "nickname": "ReturnsTheResultOfAnAsyncJob" } ], @@ -94,7 +94,7 @@ "paramType": "path", "required": "true", "name": "type", - "description": "The type of jobs to delete. type can be: " + "description": "The type of jobs to delete. type can be: - all: deletes all jobs results. Currently executing or queued async jobs will not be stopped by this call. - expired: deletes expired results. To determine the expiration status of a result, pass the stamp URL parameter. stamp needs to be a UNIX timestamp, and all async job results created at a lower timestamp will be deleted. - an actual job-id: in this case, the call will remove the result of the specified async job. If the job is currently executing or queued, it will not be aborted. " }, { "dataType": "Number", @@ -106,7 +106,7 @@ "notes": "Deletes either all job results, expired job results, or the result of a specific job. Clients can use this method to perform an eventual garbage collection of job results.

", "summary": "Deletes the result of async jobs", "httpMethod": "DELETE", - "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1389793241\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", + "examples": "Deleting all jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410075559\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/all\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting expired jobs:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410141095\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/expired?stamp=1390252233\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a specific job:

unix> curl --header 'x-arango-async: store' --dump - http://localhost:8529/_api/version\n\nHTTP/1.1 202 Accepted\ncontent-type: text/plain; charset=utf-8\nx-arango-async-id: 410206631\n\nunix> curl -X DELETE --dump - http://localhost:8529/_api/job/410206631\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"result\" : true \n}\n\n

Deleting the result of a non-existing job:

unix> curl -X DELETE --dump - http://localhost:8529/_api/job/foobar\n\nHTTP/1.1 404 Not Found\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"error\" : true, \n  \"errorMessage\" : \"not found\", \n  \"code\" : 404, \n  \"errorNum\" : 404 \n}\n\n

", "nickname": "DeletesTheResultOfAsyncJobs" } ], diff --git a/js/apps/system/aardvark/api-docs/replication.json b/js/apps/system/aardvark/api-docs/replication.json index 6b962b58c9..2fe00daa40 100644 --- a/js/apps/system/aardvark/api-docs/replication.json +++ b/js/apps/system/aardvark/api-docs/replication.json @@ -78,7 +78,7 @@ "notes": "Returns the current state of the server's replication logger. The state will include information about whether the logger is running and about the last logged tick value. This tick value is important for incremental fetching of data.

The state API can be called regardless of whether the logger is currently running or not.

The body of the response contains a JSON object with the following attributes:

- state: the current logger state as a JSON hash array with the following sub-attributes:

- running: whether or not the logger is running

- lastLogTick: the tick value of the latest tick the logger has logged. This value can be used for incremental fetching of log data.

- totalEvents: total number of events logged since the server was started. The value is not reset between multiple stops and re-starts of the logger.

- time: the current date and time on the logger server

- server: a JSON hash with the following sub-attributes:

- version: the logger server's version

- serverId: the logger server's id

- clients: a list of all replication clients that ever connected to the logger since it was started. This list can be used to determine approximately how much data the individual clients have already fetched from the logger server. Each entry in the list contains a time value indicating the server time the client last fetched data from the replication logger. The lastServedTick value of each client indicates the latest tick value sent to the client upon a client request to the replication logger.

", "summary": "returns the replication logger state", "httpMethod": "GET", - "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", + "examples": "Returns the state of an inactive replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"397558183\", \n    \"totalEvents\" : 2, \n    \"time\" : \"2014-01-20T21:04:23Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

Returns the state of an active replication logger.

unix> curl --dump - http://localhost:8529/_api/replication/logger-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastLogTick\" : \"397885863\", \n    \"totalEvents\" : 3, \n    \"time\" : \"2014-01-20T21:04:24Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"clients\" : [ ] \n}\n\n

", "nickname": "returnsTheReplicationLoggerState" } ], @@ -332,7 +332,7 @@ "notes": "Returns the list of collections and indexes available on the server. This list can be used by replication clients to initiate an initial sync with the server.

The response will contain a JSON hash array with the collection and state attributes.

collections is a list of collections with the following sub-attributes:

- parameters: the collection properties

- indexes: a list of the indexes of a the collection. Primary indexes and edges indexes are not included in this list.

tick: the system-wide tick value at the start of the dump

The state attribute contains the current state of the replication logger. It contains the following sub-attributes:

- running: whether or not the replication logger is currently active

- lastLogTick: the value of the last tick the replication logger has written

- time: the current time on the server

Replication clients should note the lastLogTick value returned. They can then fetch collections' data using the dump method up to the value of lastLogTick, and query the continuous replication log for log events after this tick value.

To create a full copy of the collections on the logger server, a replication client can execute these steps:

- call the /inventory API method. This returns the lastLogTick value and the list of collections and indexes from the logger server.

- for each collection returned by /inventory, create the collection locally and call /dump to stream the collection data to the client, up to the value of lastLogTick. After that, the client can create the indexes on the collections as they were reported by /inventory.

If the clients wants to continuously stream replication log events from the logger server, the following additional steps need to be carried out:

- the client should call /logger-follow initially to fetch the first batch of replication events that were logged after the client's call to /inventory.

The call to /logger-follow should use a from parameter with the value of the lastLogTick as reported by /inventory. The call to /logger-follow will return the x-arango-replication-lastincluded which will contain the last tick value included in the response.

- the client can then continuously call /logger-follow to incrementally fetch new replication events that occurred after the last transfer.

Calls should use a from parameter with the value of the x-arango-replication-lastincluded header of the previous response. If there are no more replication events, the response will be empty and clients can go to sleep for a while and try again later.

", "summary": "returns an inventory of collections and indexes", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-15T13:34:34Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"404308391\" \n}\n\n

With some additional indexes:

unix> curl --dump - http://localhost:8529/_api/replication/inventory\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"collections\" : [ \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"16269735\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"animals\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"14827943\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"demo\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"404373927\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"405160359\", \n          \"type\" : \"hash\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"name\" \n          ] \n        }, \n        { \n          \"id\" : \"405422503\", \n          \"type\" : \"skiplist\", \n          \"unique\" : true, \n          \"fields\" : [ \n            \"a\", \n            \"b\" \n          ] \n        }, \n        { \n          \"id\" : \"405488039\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 500, \n          \"byteSize\" : 0 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"405553575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"IndexedCollection2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ \n        { \n          \"id\" : \"406340007\", \n          \"type\" : \"fulltext\", \n          \"unique\" : false, \n          \"minLength\" : 10, \n          \"fields\" : [ \n            \"text\" \n          ] \n        }, \n        { \n          \"id\" : \"406536615\", \n          \"type\" : \"skiplist\", \n          \"unique\" : false, \n          \"fields\" : [ \n            \"a\" \n          ] \n        }, \n        { \n          \"id\" : \"406602151\", \n          \"type\" : \"cap\", \n          \"unique\" : false, \n          \"size\" : 0, \n          \"byteSize\" : 1048576 \n        } \n      ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 2, \n        \"cid\" : \"132465063\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"vertices1\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    }, \n    { \n      \"parameters\" : { \n        \"version\" : 4, \n        \"type\" : 3, \n        \"cid\" : \"135217575\", \n        \"deleted\" : false, \n        \"doCompact\" : true, \n        \"maximalSize\" : 1048576, \n        \"name\" : \"edges2\", \n        \"isVolatile\" : false, \n        \"waitForSync\" : false \n      }, \n      \"indexes\" : [ ] \n    } \n  ], \n  \"state\" : { \n    \"running\" : false, \n    \"lastLogTick\" : \"404242855\", \n    \"totalEvents\" : 22, \n    \"time\" : \"2014-01-20T21:04:25Z\" \n  }, \n  \"tick\" : \"406602151\" \n}\n\n

", "nickname": "returnsAnInventoryOfCollectionsAndIndexes" } ], @@ -565,7 +565,7 @@ "notes": "Starts the replication applier. This will return immediately if the replication applier is already running.

If the replication applier is not already running, the applier configuration will be checked, and if it is complete, the applier will be started in a background thread. This means that even if the applier will encounter any errors while running, they will not be reported in the response to this method.

To detect replication applier errors after the applier was started, use the /_api/replication/applier-state API instead.

", "summary": "starts the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:33:58Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-start\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:03:04Z\", \n      \"message\" : \"applier created\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "startsTheReplicationApplier" } ], @@ -592,7 +592,7 @@ "notes": "Stops the replication applier. This will return immediately if the replication applier is not running.

", "summary": "stops the replication applier", "httpMethod": "PUT", - "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "

unix> curl -X PUT --dump - http://localhost:8529/_api/replication/applier-stop\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "stopsTheReplicationApplier" } ], @@ -619,7 +619,7 @@ "notes": "Returns the state of the replication applier, regardless of whether the applier is currently running or not.

The response is a JSON hash with the following attributes:

- state: a JSON hash with the following sub-attributes:

- running: whether or not the applier is active and running

- lastAppliedContinuousTick: the last tick value from the continuous replication log the applier has applied.

- lastProcessedContinuousTick: the last tick value from the continuous replication log the applier has processed.

Regularly, the last applied and last processed tick values should be identical. For transactional operations, the replication applier will first process incoming log events before applying them, so the processed tick value might be higher than the applied tick value. This will be the case until the applier encounters the transaction commit log event for the transaction.

- lastAvailableContinuousTick: the last tick value the logger server can provide.

- time: the time on the applier server.

- totalRequests: the total number of requests the applier has made to the endpoint.

- totalFailedConnects: the total number of failed connection attempts the applier has made.

- totalEvents: the total number of log events the applier has processed.

- progress: a JSON hash with details about the replication applier progress. It contains the following sub-attributes if there is progress to report:

- message: a textual description of the progress

- time: the date and time the progress was logged

- failedConnects: the current number of failed connection attempts

- lastError: a JSON hash with details about the last error that happened on the applier. It contains the following sub-attributes if there was an error:

- errorNum: a numerical error code

- errorMessage: a textual error description

- time: the date and time the error occurred

In case no error has occurred, lastError will be empty.

- server: a JSON hash with the following sub-attributes:

- version: the applier server's version

- serverId: the applier server's id

- endpoint: the endpoint the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

- database: the name of the database the applier is connected to (if applier is active) or will connect to (if applier is currently inactive)

", "summary": "returns the state of the replication applier", "httpMethod": "GET", - "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 2, \n    \"totalFailedConnects\" : 2, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"errorMessage\" : \"could not connect to master at tcp://127.0.0.1:8529: Could not connect to 'tcp:/...\", \n      \"errorNum\" : 1412 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : true, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-15T13:34:35Z\", \n      \"message\" : \"fetching master state information\", \n      \"failedConnects\" : 1 \n    }, \n    \"totalRequests\" : 3, \n    \"totalFailedConnects\" : 3, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"errorNum\" : 0 \n    }, \n    \"time\" : \"2014-01-15T13:34:35Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.5\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", + "examples": "Fetching the state of an inactive applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

Fetching the state of an active applier:

unix> curl --dump - http://localhost:8529/_api/replication/applier-state\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"state\" : { \n    \"running\" : false, \n    \"lastAppliedContinuousTick\" : null, \n    \"lastProcessedContinuousTick\" : null, \n    \"lastAvailableContinuousTick\" : null, \n    \"progress\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"message\" : \"applier stopped\", \n      \"failedConnects\" : 0 \n    }, \n    \"totalRequests\" : 0, \n    \"totalFailedConnects\" : 0, \n    \"totalEvents\" : 0, \n    \"lastError\" : { \n      \"time\" : \"2014-01-20T21:04:27Z\", \n      \"errorMessage\" : \"got same server id (190048212006786) from endpoint 'tcp://127.0.0.1:8529' as the...\", \n      \"errorNum\" : 1405 \n    }, \n    \"time\" : \"2014-01-20T21:04:27Z\" \n  }, \n  \"server\" : { \n    \"version\" : \"1.4.6\", \n    \"serverId\" : \"190048212006786\" \n  }, \n  \"endpoint\" : \"tcp://127.0.0.1:8529\", \n  \"database\" : \"_system\" \n}\n\n

", "nickname": "returnsTheStateOfTheReplicationApplier" } ], diff --git a/js/apps/system/aardvark/api-docs/system.json b/js/apps/system/aardvark/api-docs/system.json index 9dd3fc504d..5c280b91d9 100644 --- a/js/apps/system/aardvark/api-docs/system.json +++ b/js/apps/system/aardvark/api-docs/system.json @@ -92,7 +92,7 @@ "notes": "

Returns the statistics information. The returned object contains the statistics figures grouped together according to the description returned by _admin/statistics-description. For instance, to access a figure userTime from the group system, you first select the sub-object describing the group stored in system and in that sub-object the value for userTime is stored in the attribute of the same name.

In case of a distribution, the returned object contains the total count in count and the distribution list in counts. The sum (or total) of the individual values is returned in sum.

", "summary": "reads the statistics", "httpMethod": "GET", - "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 106242, \n    \"majorPageFaults\" : 1916, \n    \"userTime\" : 12.592, \n    \"systemTime\" : 1.726252, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 57217024, \n    \"virtualSize\" : 4994158592 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.00037384033203125, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 22.933324813842773, \n      \"count\" : 850, \n      \"counts\" : [ \n        539, \n        201, \n        44, \n        49, \n        14, \n        0, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 22.767783641815186, \n      \"count\" : 850, \n      \"counts\" : [ \n        539, \n        203, \n        42, \n        49, \n        14, \n        0, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.018385887145996094, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 27.57566213607788 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", + "examples": "

unix> curl --dump - http://localhost:8529/_admin/statistics\n\nHTTP/1.1 200 OK\ncontent-type: application/json; charset=utf-8\n\n{ \n  \"system\" : { \n    \"minorPageFaults\" : 113349, \n    \"majorPageFaults\" : 1916, \n    \"userTime\" : 28.970069, \n    \"systemTime\" : 3.00499, \n    \"numberOfThreads\" : 16, \n    \"residentSize\" : 58105856, \n    \"virtualSize\" : 5001773056 \n  }, \n  \"client\" : { \n    \"httpConnections\" : 1, \n    \"connectionTime\" : { \n      \"sum\" : 0.003744840621948242, \n      \"count\" : 1, \n      \"counts\" : [ \n        1, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"totalTime\" : { \n      \"sum\" : 51.210867404937744, \n      \"count\" : 850, \n      \"counts\" : [ \n        216, \n        248, \n        250, \n        114, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"requestTime\" : { \n      \"sum\" : 50.808953285217285, \n      \"count\" : 850, \n      \"counts\" : [ \n        226, \n        242, \n        251, \n        109, \n        18, \n        1, \n        3 \n      ] \n    }, \n    \"queueTime\" : { \n      \"sum\" : 0.03761625289916992, \n      \"count\" : 848, \n      \"counts\" : [ \n        848, \n        0, \n        0, \n        0, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesSent\" : { \n      \"sum\" : 381722, \n      \"count\" : 850, \n      \"counts\" : [ \n        234, \n        501, \n        115, \n        0, \n        0, \n        0 \n      ] \n    }, \n    \"bytesReceived\" : { \n      \"sum\" : 197140, \n      \"count\" : 850, \n      \"counts\" : [ \n        625, \n        225, \n        0, \n        0, \n        0, \n        0 \n      ] \n    } \n  }, \n  \"http\" : { \n    \"requestsTotal\" : 850, \n    \"requestsAsync\" : 0, \n    \"requestsGet\" : 225, \n    \"requestsHead\" : 0, \n    \"requestsPost\" : 446, \n    \"requestsPut\" : 34, \n    \"requestsPatch\" : 3, \n    \"requestsDelete\" : 142, \n    \"requestsOptions\" : 0, \n    \"requestsOther\" : 0 \n  }, \n  \"server\" : { \n    \"uptime\" : 58.96970891952515 \n  }, \n  \"error\" : false, \n  \"code\" : 200 \n}\n\n

", "nickname": "readsTheStatistics" } ],