diff --git a/Doxygen/Examples.Durham/shell-index-create-unique-constraint b/Doxygen/Examples.Durham/shell-index-create-unique-constraint new file mode 100644 index 0000000000..53c07f3af5 --- /dev/null +++ b/Doxygen/Examples.Durham/shell-index-create-unique-constraint @@ -0,0 +1,18 @@ +avocado> db.four.ensureUniqueConstraint("a", "b.c"); +{ "id" : "164405/1147445", "unique" : true, "type" : "hash", "fields" : ["a", "b.c"], "isNewlyCreated" : true } + +avocado> db.four.save({ a : 1, b : { c : 1 } }); +{ "_id" : "164405/1868341", "_rev" : 1868341 } + +avocado> db.four.save({ a : 1, b : { c : 1 } }); +JavaScript exception in file '(avocado)' at 1,9: [AvocadoError 1210: cannot save document] +!db.four.save({ a : 1, b : { c : 1 } }); +! ^ +stacktrace: [AvocadoError 1210: cannot save document] + at (avocado):1:9 + +avocado> db.four.save({ a : 1, b : { c : null } }); +{ "_id" : "164405/2196021", "_rev" : 2196021 } + +avocado> db.four.save({ a : 1 }); +{ "_id" : "164405/2196023", "_rev" : 2196023 } diff --git a/MRClient/avocirb.cpp b/MRClient/avocirb.cpp index bb952df446..ea33aa4f57 100644 --- a/MRClient/avocirb.cpp +++ b/MRClient/avocirb.cpp @@ -416,22 +416,26 @@ int main (int argc, char* argv[]) { ParseProgramOptions(argc, argv); // http://www.network-science.de/ascii/ Font: ogre - if (NoColors) { - printf(" _ _ \n"); - printf(" __ ___ _____ ___(_)_ __| |__ \n"); - printf(" / _` \\ \\ / / _ \\ / __| | '__| '_ \\ \n"); - printf(" | (_| |\\ V / (_) | (__| | | | |_) | \n"); - printf(" \\__,_| \\_/ \\___/ \\___|_|_| |_.__/ \n"); - printf(" \n"); - } - else { - printf("%s %s _ _ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); - printf("%s __ ___ _____ ___%s(_)_ __| |__ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); - printf("%s / _` \\ \\ / / _ \\ / __%s| | '__| '_ \\ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); - printf("%s | (_| |\\ V / (_) | (__%s| | | | |_) | %s\n", DEF_GREEN, DEF_RED, DEF_RESET); - printf("%s \\__,_| \\_/ \\___/ \\___%s|_|_| |_.__/ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); - printf("%s %s %s\n", DEF_GREEN, DEF_RED, DEF_RESET); + { + char const* g = DEF_GREEN; + char const* r = DEF_RED; + char const* z = DEF_RESET; + + if (NoColors) { + g = ""; + r = ""; + z = ""; + } + + printf("%s %s _ _ %s\n", g, r, z); + printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s(_)_ __| |__ %s\n", g, r, z); + printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s| | '__| '_ \\ %s\n", g, r, z); + printf("%s| (_| | | | (_| | | | | (_| | (_) %s| | | | |_) |%s\n", g, r, z); + printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|_|_| |_.__/ %s\n", g, r, z); + printf("%s |___/ %s %s\n", g, r, z); } + + printf("\n"); printf("Welcome to avocirb %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION); #ifdef TRI_V8_VERSION diff --git a/Makefile.files b/Makefile.files index abb69f0f20..b3024ce2b7 100644 --- a/Makefile.files +++ b/Makefile.files @@ -289,7 +289,6 @@ JAVASCRIPT_HEADER = \ js/common/bootstrap/js-errors.h \ js/common/bootstrap/js-modules.h \ js/common/bootstrap/js-print.h \ - js/common/bootstrap/js-errors.h \ js/client/js-client.h \ js/server/js-server.h \ js/server/js-ahuacatl.h diff --git a/Makefile.in b/Makefile.in index 8ca70f57fc..afeee2201f 100644 --- a/Makefile.in +++ b/Makefile.in @@ -921,7 +921,6 @@ JAVASCRIPT_HEADER = \ js/common/bootstrap/js-errors.h \ js/common/bootstrap/js-modules.h \ js/common/bootstrap/js-print.h \ - js/common/bootstrap/js-errors.h \ js/client/js-client.h \ js/server/js-server.h \ js/server/js-ahuacatl.h diff --git a/RestHandler/RestDocumentHandler.cpp b/RestHandler/RestDocumentHandler.cpp index e1798b7f34..d6843d287c 100644 --- a/RestHandler/RestDocumentHandler.cpp +++ b/RestHandler/RestDocumentHandler.cpp @@ -184,9 +184,12 @@ HttpHandler::status_e RestDocumentHandler::execute () { /// @REST{POST /document?collection=@FA{collection-name}&createCollection=@FA{create}} /// /// Instead of a @FA{collection-identifier}, a @FA{collection-name} can be -/// used. If @FA{createCollection} is true, then the collection is created if it does not -/// exists. +/// used. If @FA{createCollection} is true, then the collection is created if it +/// does not exists. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// Create a document given a collection identifier @LIT{161039} for the collection @@ -367,6 +370,9 @@ bool RestDocumentHandler::readDocument () { /// given etag. Otherwise a @LIT{HTTP 412} is returned. As an alternative /// you can supply the etag in an attribute @LIT{rev} in the URL. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// Use a document handle: @@ -474,6 +480,9 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) { /// /// Instead of a @FA{collection-identifier}, a collection name can be given. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// @verbinclude rest_read-document-all @@ -577,6 +586,9 @@ bool RestDocumentHandler::readAllDocuments () { /// can use this call to get the current revision of a document or check if /// the document was deleted. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// @verbinclude rest_read-document-head @@ -634,6 +646,9 @@ bool RestDocumentHandler::checkDocument () { /// header. You must never supply both the "ETag" header and the @LIT{rev} /// parameter. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// Using document handle: @@ -785,6 +800,9 @@ bool RestDocumentHandler::updateDocument () { /// "If-Match" header. You must never supply both the "If-Match" header and the /// @LIT{rev} parameter. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// Using document handle: diff --git a/RestHandler/RestEdgeHandler.cpp b/RestHandler/RestEdgeHandler.cpp index ff00db2d75..c5e298a430 100644 --- a/RestHandler/RestEdgeHandler.cpp +++ b/RestHandler/RestEdgeHandler.cpp @@ -84,6 +84,9 @@ RestEdgeHandler::RestEdgeHandler (HttpRequest* request, TRI_vocbase_t* vocbase) /// If you request such an edge, the returned document will also contain the /// attributes @LIT{_from} and @LIT{_to}. /// +/// @note If you are implementing a client api then you should use the path +/// @LIT{/_api/document}. +/// /// @EXAMPLES /// /// Create an edge: diff --git a/RestServer/AvocadoServer.cpp b/RestServer/AvocadoServer.cpp index e83cc0f1ff..cd745ee291 100644 --- a/RestServer/AvocadoServer.cpp +++ b/RestServer/AvocadoServer.cpp @@ -642,9 +642,13 @@ int AvocadoServer::startupServer () { _applicationAdminServer->addBasicHandlers(factory); factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator::createData, _vocbase); - factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator::createData, _vocbase); factory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator::createData, _vocbase); + factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator::createData, _vocbase); + factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator::createData, _vocbase); + + factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator::createData, _vocbase); + if (shareAdminPort) { _applicationAdminServer->addHandlers(factory, "/_admin"); _applicationUserManager->addHandlers(factory, "/_admin"); @@ -681,6 +685,10 @@ int AvocadoServer::startupServer () { adminFactory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator::createData, _vocbase); adminFactory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator::createData, _vocbase); + + adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator::createData, _vocbase); + adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator::createData, _vocbase); + adminFactory->addPrefixHandler("/", RestHandlerCreator::createData< pair< TRI_vocbase_t*, set* >* >, (void*) &handlerDataAdmin); diff --git a/RestServer/index-hash.dox b/RestServer/index-hash.dox index 92c4503110..870903d325 100644 --- a/RestServer/index-hash.dox +++ b/RestServer/index-hash.dox @@ -29,7 +29,16 @@ /// @page IndexHashTOC /// ///
    -///
  1. MISSING
  2. +///
  3. @ref IndexHashHttp +///
      +///
    1. @ref IndexHashHttpEnsureUniqueConstraint "POST /_api/index"
    2. +///
    +///
  4. +///
  5. @ref IndexHashShell +///
      +///
    1. @ref IndexHashShellEnsureCapConstraint "collection.ensureUniqueConstraint"
    2. +///
    +///
  6. ///
//////////////////////////////////////////////////////////////////////////////// @@ -41,6 +50,23 @@ ///
/// @copydoc IndexHashTOC ///
+/// +/// It is possible to define a hash index on one or more attributes (or paths) +/// of a documents. This hash is then used in queries to locate documents in +/// O(1) operations. If the hash is unique, then no two documents are allowed +/// to have the same set of attribute values. +/// +/// @section IndexHashHttp Accessing Hash Indexes via Http +////////////////////////////////////////////////////////// +/// +/// @anchor IndexHashHttpEnsureUniqueConstraint +/// @copydetails JSF_POST_api_index_hash +/// +/// @section IndexHashShell Accessing hash Indexes from the Shell +///////////////////////////////////////////////////////////////// +/// +/// @anchor IndexHashShellEnsureCapConstraint +/// @copydetails JS_EnsureUniqueConstraintVocbaseCol //////////////////////////////////////////////////////////////////////////////// // Local Variables: diff --git a/UnitTests/HttpInterface/api-index-unique-constraint-spec.rb b/UnitTests/HttpInterface/api-index-hash-spec.rb similarity index 75% rename from UnitTests/HttpInterface/api-index-unique-constraint-spec.rb rename to UnitTests/HttpInterface/api-index-hash-spec.rb index 00a5e05804..13ef849303 100644 --- a/UnitTests/HttpInterface/api-index-unique-constraint-spec.rb +++ b/UnitTests/HttpInterface/api-index-hash-spec.rb @@ -5,13 +5,58 @@ require './avocadodb.rb' describe AvocadoDB do api = "/_api/index" - prefix = "api-index-unique-constraint" + prefix = "api-index-hash" ################################################################################ ## unique constraints during create ################################################################################ - context "creating:" do + context "creating index:" do + context "dealing with unique constraints violation:" do + before do + @cn = "UnitTestsCollectionIndexes" + AvocadoDB.drop_collection(@cn) + @cid = AvocadoDB.create_collection(@cn) + end + + after do + AvocadoDB.drop_collection(@cn) + end + + it "does not create the index in case of violation" do + + # create a document + cmd1 = "/document?collection=#{@cid}" + body = "{ \"a\" : 1, \"b\" : 1 }" + doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body) + + doc.code.should eq(201) + + # create another document + cmd1 = "/document?collection=#{@cid}" + body = "{ \"a\" : 1, \"b\" : 1 }" + doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body) + + doc.code.should eq(201) + + # try to create the index + cmd = "/_api/index?collection=#{@cid}" + body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }" + doc = AvocadoDB.log_post("#{prefix}-fail", cmd, :body => body) + + doc.code.should eq(400) + doc.parsed_response['error'].should eq(true) + doc.parsed_response['code'].should eq(400) + doc.parsed_response['errorNum'].should eq(1203) + end + end + end + +################################################################################ +## unique constraints during create +################################################################################ + + context "creating documents:" do context "dealing with unique constraints:" do before do @cn = "UnitTestsCollectionIndexes" @@ -24,9 +69,9 @@ describe AvocadoDB do end it "rolls back in case of violation" do - cmd = "/_api/index/#{@cid}" + cmd = "/_api/index?collection=#{@cid}" body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }" - doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body) + doc = AvocadoDB.log_post("#{prefix}-create1", cmd, :body => body) doc.code.should eq(201) doc.parsed_response['type'].should eq("hash") @@ -35,7 +80,7 @@ describe AvocadoDB do # create a document cmd1 = "/document?collection=#{@cid}" body = "{ \"a\" : 1, \"b\" : 1 }" - doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) + doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body) doc.code.should eq(201) @@ -57,7 +102,7 @@ describe AvocadoDB do # create a unique constraint violation body = "{ \"a\" : 1, \"b\" : 2 }" - doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) + doc = AvocadoDB.log_post("#{prefix}-create3", cmd1, :body => body) doc.code.should eq(409) @@ -72,7 +117,7 @@ describe AvocadoDB do # third try (make sure the rollback has not destroyed anything) body = "{ \"a\" : 1, \"b\" : 3 }" - doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) + doc = AvocadoDB.log_post("#{prefix}-create4", cmd1, :body => body) doc.code.should eq(409) @@ -116,7 +161,7 @@ describe AvocadoDB do ## unique constraints during update ################################################################################ - context "updating:" do + context "updating documents:" do context "dealing with unique constraints:" do before do @cn = "UnitTestsCollectionIndexes" @@ -129,9 +174,9 @@ describe AvocadoDB do end it "rolls back in case of violation" do - cmd = "/_api/index/#{@cid}" + cmd = "/_api/index?collection=#{@cid}" body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }" - doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body) + doc = AvocadoDB.log_post("#{prefix}-update1", cmd, :body => body) doc.code.should eq(201) doc.parsed_response['type'].should eq("hash") @@ -140,7 +185,7 @@ describe AvocadoDB do # create a document cmd1 = "/document?collection=#{@cid}" body = "{ \"a\" : 1, \"b\" : 1 }" - doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) + doc = AvocadoDB.log_post("#{prefix}-update2", cmd1, :body => body) doc.code.should eq(201) @@ -162,7 +207,7 @@ describe AvocadoDB do # create a second document body = "{ \"a\" : 2, \"b\" : 2 }" - doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) + doc = AvocadoDB.log_post("#{prefix}-update3", cmd1, :body => body) doc.code.should eq(201) diff --git a/UnitTests/HttpInterface/run-tests b/UnitTests/HttpInterface/run-tests index 331343db2b..8699e02e1b 100755 --- a/UnitTests/HttpInterface/run-tests +++ b/UnitTests/HttpInterface/run-tests @@ -9,5 +9,6 @@ rspec --format d \ rest-delete-document-spec.rb \ rest-edge-spec.rb \ api-index-spec.rb \ + api-index-hash-spec.rb \ api-cursor-spec.rb \ api-simple-spec.rb diff --git a/V8/v8-vocbase.cpp b/V8/v8-vocbase.cpp index 89b89ec8e3..9560f2cb0e 100644 --- a/V8/v8-vocbase.cpp +++ b/V8/v8-vocbase.cpp @@ -4504,17 +4504,24 @@ static v8::Handle JS_EnsureGeoConstraintVocbaseCol (v8::Arguments con /// /// @FUN{ensureUniqueConstraint(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} /// -/// Creates a hash index on all documents using attributes as paths to the -/// fields. At least one attribute must be given. The value of this attribute -/// must be a list. All documents, which do not have the attribute path or where -/// one or more values that are not suitable, are ignored. +/// Creates a unique hash index on all documents using @FA{field1}, @FA{field2}, +/// ... as attribute paths. At least one attribute path must be given. /// -/// In case that the index was successfully created, the index identifier -/// is returned. +/// When a unique constraint is in effect for a collection, then all documents +/// which contain the given attributes must differ in the attribute +/// values. Creating a new document or updating a document will fail, if the +/// uniqueness is violated. If any attribute value is null for a document, this +/// document is ignored by the index. +/// +/// In case that the index was successfully created, the index identifier is +/// returned. +/// +/// Note that non-existing attribute paths in a document are treat as if the +/// value were @LIT{null}. /// /// @EXAMPLES /// -/// @verbinclude admin5 +/// @verbinclude shell-index-create-unique-constraint //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_EnsureUniqueConstraintVocbaseCol (v8::Arguments const& argv) { diff --git a/V8Client/avocsh.cpp b/V8Client/avocsh.cpp index b6973fa472..61ff9e946c 100644 --- a/V8Client/avocsh.cpp +++ b/V8Client/avocsh.cpp @@ -1227,20 +1227,26 @@ int main (int argc, char* argv[]) { // http://www.network-science.de/ascii/ Font: ogre - if (noColors) { - printf(" " " _ \n"); - printf(" __ ___ _____ ___ " "___| |__ \n"); - printf(" / _` \\ \\ / / _ \\ / __" "/ __| '_ \\ \n"); - printf(" | (_| |\\ V / (_) | (__" "\\__ \\ | | | \n"); - printf(" \\__,_| \\_/ \\___/ \\___" "|___/_| |_| \n\n"); - } - else { - printf( " " "\x1b[31m _ \x1b[0m\n"); - printf("\x1b[32m __ ___ _____ ___ " "\x1b[31m___| |__ \x1b[0m\n"); - printf("\x1b[32m / _` \\ \\ / / _ \\ / __" "\x1b[31m/ __| '_ \\ \x1b[0m\n"); - printf("\x1b[32m | (_| |\\ V / (_) | (__" "\x1b[31m\\__ \\ | | | \x1b[0m\n"); - printf("\x1b[32m \\__,_| \\_/ \\___/ \\___" "\x1b[31m|___/_| |_| \x1b[0m\n\n"); + { + char const* g = DEF_GREEN; + char const* r = DEF_RED; + char const* z = DEF_RESET; + + if (noColors) { + g = ""; + r = ""; + z = ""; + } + + printf("%s %s _ %s\n", g, r, z); + printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s ___| |__ %s\n", g, r, z); + printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s/ __| '_ \\ %s\n", g, r, z); + printf("%s| (_| | | | (_| | | | | (_| | (_) %s\\__ \\ | | |%s\n", g, r, z); + printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|___/_| |_|%s\n", g, r, z); + printf("%s |___/ %s %s\n", g, r, z); } + + printf("\n"); printf("Welcome to avocsh %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION); #ifdef TRI_V8_VERSION diff --git a/VocBase/index.c b/VocBase/index.c index 725a0e0560..b259ad19f5 100644 --- a/VocBase/index.c +++ b/VocBase/index.c @@ -1211,214 +1211,88 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t* idx, /// @brief helper for hashing //////////////////////////////////////////////////////////////////////////////// -static int HashIndexHelper (const TRI_hash_index_t* hashIndex, +static int HashIndexHelper (TRI_hash_index_t const* hashIndex, HashIndexElement* hashElement, - const TRI_doc_mptr_t* document, - const TRI_shaped_json_t* shapedDoc) { + TRI_doc_mptr_t const* document, + TRI_shaped_json_t const* shapedDoc) { union { void* p; void const* c; } cnv; - TRI_shaped_json_t shapedObject; TRI_shape_access_t* acc; + TRI_shaped_json_t shapedObject; + TRI_shaper_t* shaper; + int res; size_t j; + shaper = hashIndex->base._collection->_shaper; + + // ............................................................................. + // Attempting to locate a hash entry using TRI_shaped_json_t object. Use this + // when we wish to remove a hash entry and we only have the "keys" rather than + // having the document (from which the keys would follow). + // ............................................................................. + if (shapedDoc != NULL) { - - // .......................................................................... - // Attempting to locate a hash entry using TRI_shaped_json_t object. Use this - // when we wish to remove a hash entry and we only have the "keys" rather than - // having the document (from which the keys would follow). - // .......................................................................... - hashElement->data = NULL; + } - for (j = 0; j < hashIndex->_paths._length; ++j) { - TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j))); + // ............................................................................. + // Assign the document to the HashIndexElement structure - so that it can + // later be retreived. + // ............................................................................. + + else if (document != NULL) { + cnv.c = document; + hashElement->data = cnv.p; + + shapedDoc = &document->_document; + } + + else { + return TRI_ERROR_INTERNAL; + } + + // ............................................................................. + // Extract the attribute values + // ............................................................................. + + res = TRI_ERROR_NO_ERROR; + + for (j = 0; j < hashIndex->_paths._length; ++j) { + TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths, j))); - // .......................................................................... - // Determine if document has that particular shape - // .......................................................................... + // determine if document has that particular shape + acc = TRI_ShapeAccessor(shaper, shapedDoc->_sid, shape); - acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, shapedDoc->_sid, shape); + if (acc == NULL || acc->_shape == NULL) { + if (acc != NULL) { + TRI_FreeShapeAccessor(acc); + } - if (acc == NULL || acc->_shape == NULL) { - if (acc != NULL) { - TRI_FreeShapeAccessor(acc); - } + shapedObject._sid = shaper->_sidNull; + shapedObject._data.length = 0; + shapedObject._data.data = NULL; - // TRI_Free(hashElement->fields); memory deallocated in the calling procedure - return TRI_WARNING_AVOCADO_INDEX_HASH_UPDATE_ATTRIBUTE_MISSING; - } + res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING; + } + else { - // .......................................................................... - // Extract the field - // .......................................................................... - + // extract the field if (! TRI_ExecuteShapeAccessor(acc, shapedDoc, &shapedObject)) { TRI_FreeShapeAccessor(acc); // TRI_Free(hashElement->fields); memory deallocated in the calling procedure return TRI_ERROR_INTERNAL; } - - // .......................................................................... - // Store the json shaped Object -- this is what will be hashed - // .......................................................................... - hashElement->fields[j] = shapedObject; TRI_FreeShapeAccessor(acc); - } // end of for loop - } - - else if (document != NULL) { - - // .......................................................................... - // Assign the document to the HashIndexElement structure - so that it can later - // be retreived. - // .......................................................................... - cnv.c = document; - hashElement->data = cnv.p; - - for (j = 0; j < hashIndex->_paths._length; ++j) { - TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j))); - - // .......................................................................... - // Determine if document has that particular shape - // It is not an error if the document DOES NOT have the particular shape - // .......................................................................... - - acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, document->_document._sid, shape); - - if (acc == NULL || acc->_shape == NULL) { - if (acc != NULL) { - TRI_FreeShapeAccessor(acc); - } - - // TRI_Free(hashElement->fields); memory deallocated in the calling procedure - - return TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING; - } - - // .......................................................................... - // Extract the field - // .......................................................................... - - if (! TRI_ExecuteShapeAccessor(acc, &(document->_document), &shapedObject)) { - TRI_FreeShapeAccessor(acc); - // TRI_Free(hashElement->fields); memory deallocated in the calling procedure - - return TRI_ERROR_INTERNAL; + if (shapedObject._sid == shaper->_sidNull) { + res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING; } - - // .......................................................................... - // Store the field - // .......................................................................... - - hashElement->fields[j] = shapedObject; - - TRI_FreeShapeAccessor(acc); - } // end of for loop - } - - else { - return TRI_ERROR_INTERNAL; - } - - return TRI_ERROR_NO_ERROR; -} - -//////////////////////////////////////////////////////////////////////////////// -/// @brief hash indexes a document -//////////////////////////////////////////////////////////////////////////////// - -static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { - HashIndexElement hashElement; - TRI_hash_index_t* hashIndex; - int res; - - // ............................................................................ - // Obtain the hash index structure - // ............................................................................ - - hashIndex = (TRI_hash_index_t*) idx; - - if (idx == NULL) { - LOG_WARNING("internal error in InsertHashIndex"); - return TRI_set_errno(TRI_ERROR_INTERNAL); - } - - // ............................................................................ - // Allocate storage to shaped json objects stored as a simple list. - // These will be used for hashing. - // ............................................................................ - - hashElement.numFields = hashIndex->_paths._length; - hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); - - if (hashElement.fields == NULL) { - LOG_WARNING("out-of-memory in InsertHashIndex"); - return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY); - } - - res = HashIndexHelper(hashIndex, &hashElement, doc, NULL); - - - // ............................................................................ - // It is possible that this document does not have the necessary attributes - // (keys) to participate in this index. - // ............................................................................ - - - // ............................................................................ - // If an error occurred in the called procedure HashIndexHelper, we must - // now exit -- and deallocate memory assigned to hashElement. - // ............................................................................ - - if (res != TRI_ERROR_NO_ERROR) { // some sort of error occurred - - // .......................................................................... - // Deallocated the memory already allocated to hashElement.fields - // .......................................................................... - - TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); - - - // .......................................................................... - // It may happen that the document does not have the necessary attributes to - // be included within the hash index, in this case do not report back an error. - // .......................................................................... - - if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { - return TRI_ERROR_NO_ERROR; } - - return res; - } - - - - // ............................................................................ - // Fill the json field list from the document for unique hash index - // ............................................................................ - - - if (hashIndex->base._unique) { - res = HashIndex_insert(hashIndex->_hashIndex, &hashElement); - } - - // ............................................................................ - // Fill the json field list from the document for non-unique hash index - // ............................................................................ - - else { - res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement); - } - - // ............................................................................ - // Memory which has been allocated to hashElement.fields remains allocated - // contents of which are stored in the hash array. - // ............................................................................ - TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); + // store the json shaped Object -- this is what will be hashed + hashElement->fields[j] = shapedObject; + } return res; } @@ -1510,71 +1384,52 @@ static void RemoveIndexHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec } //////////////////////////////////////////////////////////////////////////////// -/// @brief removes a document from a hash index +/// @brief hash indexes a document //////////////////////////////////////////////////////////////////////////////// -static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { +static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { HashIndexElement hashElement; TRI_hash_index_t* hashIndex; int res; - - // ............................................................................ + + // ............................................................................. // Obtain the hash index structure - // ............................................................................ + // ............................................................................. hashIndex = (TRI_hash_index_t*) idx; if (idx == NULL) { - LOG_WARNING("internal error in RemoveHashIndex"); + LOG_WARNING("internal error in InsertHashIndex"); return TRI_set_errno(TRI_ERROR_INTERNAL); - } - - // ............................................................................ - // Allocate some memory for the HashIndexElement structure - // ............................................................................ + } + // ............................................................................. + // Allocate storage to shaped json objects stored as a simple list. + // These will be used for hashing. + // ............................................................................. + hashElement.numFields = hashIndex->_paths._length; - hashElement.fields = TRI_Allocate( TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); - - if (hashElement.fields == NULL) { - LOG_WARNING("out-of-memory in InsertHashIndex"); - return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY); - } - - // .......................................................................... - // Fill the json field list from the document - // .......................................................................... - + hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); + res = HashIndexHelper(hashIndex, &hashElement, doc, NULL); - - // .......................................................................... - // It may happen that the document does not have attributes which match - // For now return internal error, there needs to be its own error number - // and the appropriate action needs to be taken by the calling function in - // such cases. - // .......................................................................... + // ............................................................................. + // It is possible that this document does not have the necessary attributes + // (keys) to participate in this index. + // + // If an error occurred in the called procedure HashIndexHelper, we must + // now exit -- and deallocate memory assigned to hashElement. + // ............................................................................. if (res != TRI_ERROR_NO_ERROR) { - // ........................................................................ - // Deallocate memory allocated to hashElement.fields above - // ........................................................................ - + // Deallocated the memory already allocated to hashElement.fields TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); - - - // ........................................................................ - // It may happen that the document does not have the necessary attributes - // to have particpated within the hash index. In this case, we do not - // report an error to the calling procedure. - // ........................................................................ - - // ........................................................................ - // -1 from the called procedure HashIndexHelper implies that we do not - // propagate the error to the parent function. However for removal - // we advice the parent function. TODO: return a proper error code. - // ........................................................................ + + // ............................................................................. + // It may happen that the document does not have the necessary attributes to + // be included within the hash index, in this case do not report back an error. + // ............................................................................. if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { return TRI_ERROR_NO_ERROR; @@ -1583,25 +1438,104 @@ static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { return res; } - // ............................................................................ - // Attempt the removal for unique hash indexes - // ............................................................................ + // ............................................................................. + // Fill the json field list from the document for unique or non-unique index + // ............................................................................. + + if (hashIndex->base._unique) { + res = HashIndex_insert(hashIndex->_hashIndex, &hashElement); + } + else { + res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement); + } + + // ............................................................................. + // Memory which has been allocated to hashElement.fields remains allocated + // contents of which are stored in the hash array. + // ............................................................................. + + TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); + + return res; +} + +//////////////////////////////////////////////////////////////////////////////// +/// @brief removes a document from a hash index +//////////////////////////////////////////////////////////////////////////////// + +static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { + HashIndexElement hashElement; + TRI_hash_index_t* hashIndex; + int res; + + // ............................................................................. + // Obtain the hash index structure + // ............................................................................. + + hashIndex = (TRI_hash_index_t*) idx; + + if (idx == NULL) { + LOG_WARNING("internal error in RemoveHashIndex"); + return TRI_set_errno(TRI_ERROR_INTERNAL); + } + + // ............................................................................. + // Allocate some memory for the HashIndexElement structure + // ............................................................................. + + hashElement.numFields = hashIndex->_paths._length; + hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); + + // ............................................................................. + // Fill the json field list from the document + // ............................................................................. + + res = HashIndexHelper(hashIndex, &hashElement, doc, NULL); + + // ............................................................................. + // It may happen that the document does not have attributes which match + // For now return internal error, there needs to be its own error number + // and the appropriate action needs to be taken by the calling function in + // such cases. + // ............................................................................. + + if (res != TRI_ERROR_NO_ERROR) { + + // Deallocate memory allocated to hashElement.fields above + TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); + + // ............................................................................. + // It may happen that the document does not have the necessary attributes to + // have particpated within the hash index. In this case, we do not report an + // error to the calling procedure. + // + // TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING from the called + // procedure HashIndexHelper implies that we do not propagate the error to + // the parent function. However for removal we advice the parent + // function. TODO: return a proper error code. + // ............................................................................. + + if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { + return TRI_ERROR_NO_ERROR; + } + + return res; + } + + // ............................................................................. + // Attempt the removal for unique or non-unique hash indexes + // ............................................................................. if (hashIndex->base._unique) { res = HashIndex_remove(hashIndex->_hashIndex, &hashElement); } - - // ............................................................................ - // Attempt the removal for non-unique hash indexes - // ............................................................................ - else { res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement); } - // ............................................................................ + // ............................................................................. // Deallocate memory allocated to hashElement.fields above - // ............................................................................ + // ............................................................................. TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); @@ -1616,20 +1550,20 @@ static int UpdateHashIndex (TRI_index_t* idx, const TRI_doc_mptr_t* newDoc, const TRI_shaped_json_t* oldDoc) { - // .......................................................................... - // Note: The oldDoc is represented by the TRI_shaped_json_t rather than by - // a TRI_doc_mptr_t object. However for non-unique indexes we must - // pass the document shape to the hash remove function. - // .......................................................................... + // ............................................................................. + // Note: The oldDoc is represented by the TRI_shaped_json_t rather than by a + // TRI_doc_mptr_t object. However for non-unique indexes we must pass the + // document shape to the hash remove function. + // ............................................................................. union { void* p; void const* c; } cnv; HashIndexElement hashElement; TRI_hash_index_t* hashIndex; int res; - // ............................................................................ + // ............................................................................. // Obtain the hash index structure - // ............................................................................ + // ............................................................................. hashIndex = (TRI_hash_index_t*) idx; @@ -1638,179 +1572,98 @@ static int UpdateHashIndex (TRI_index_t* idx, return TRI_ERROR_INTERNAL; } - // ............................................................................ + // ............................................................................. // Allocate some memory for the HashIndexElement structure - // ............................................................................ + // ............................................................................. hashElement.numFields = hashIndex->_paths._length; - hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); - - if (hashElement.fields == NULL) { - LOG_WARNING("out-of-memory in UpdateHashIndex"); - return TRI_ERROR_OUT_OF_MEMORY; - } + hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); - // ............................................................................ + // ............................................................................. // Update for unique hash index - // ............................................................................ - - // ............................................................................ + // // Fill in the fields with the values from oldDoc - // ............................................................................ + // ............................................................................. - if (hashIndex->base._unique) { - - assert(oldDoc != NULL); + assert(oldDoc != NULL); - res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc); + res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc); - if (res == TRI_ERROR_NO_ERROR) { + if (res == TRI_ERROR_NO_ERROR) { - // ............................................................................ - // We must fill the hashElement with the value of the document shape -- this - // is necessary when we attempt to remove non-unique hash indexes. - // ............................................................................ + // ............................................................................ + // We must fill the hashElement with the value of the document shape -- this + // is necessary when we attempt to remove non-unique hash indexes. + // ............................................................................ - cnv.c = newDoc; // we are assuming here that the doc ptr does not change - hashElement.data = cnv.p; + cnv.c = newDoc; // we are assuming here that the doc ptr does not change + hashElement.data = cnv.p; - // ............................................................................ - // Remove the hash index entry and return. - // ............................................................................ + // ............................................................................ + // Remove the old hash index entry + // ............................................................................ + if (hashIndex->base._unique) { res = HashIndex_remove(hashIndex->_hashIndex, &hashElement); - - if (res != TRI_ERROR_NO_ERROR) { - - // .......................................................................... - // This error is common, when a document 'update' occurs, but fails - // due to the fact that a duplicate entry already exists, when the 'rollback' - // is applied, there is no document to remove -- so we get this error. - // .......................................................................... - - LOG_WARNING("could not remove existing document from hash index in UpdateHashIndex"); - } - } - - // .............................................................................. - // Here we are assuming that the existing document could not be removed, because - // the doc did not have the correct attributes. TODO: do not make this assumption. - // .............................................................................. - + } else { - LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex"); + res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement); } - - // ............................................................................ - // Fill the json simple list from the document - // ............................................................................ - - res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL); - - - // ............................................................................ - // Deal with any errors reported back. - // ............................................................................ - + // .......................................................................... + // This error is common, when a document 'update' occurs, but fails + // due to the fact that a duplicate entry already exists, when the 'rollback' + // is applied, there is no document to remove -- so we get this error. + // .......................................................................... + if (res != TRI_ERROR_NO_ERROR) { + LOG_DEBUG("could not remove existing document from hash index in UpdateHashIndex"); + } + } + + else if (res != TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { + LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex"); + } + + // ............................................................................ + // Fill the json simple list from the document + // ............................................................................ + + res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL); + + // ............................................................................ + // Deal with any errors reported back. + // ............................................................................ + + if (res != TRI_ERROR_NO_ERROR) { - // .......................................................................... // Deallocated memory given to hashElement.fields - // .......................................................................... - TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); + // probably fields do not match. if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { - // ........................................................................ - // probably fields do not match. - // ........................................................................ - return TRI_ERROR_NO_ERROR; } return res; - } + } - // ............................................................................ - // Attempt to add the hash entry from the new doc - // ............................................................................ + // ............................................................................ + // Attempt to add the hash entry from the new doc + // ............................................................................ + if (hashIndex->base._unique) { res = HashIndex_insert(hashIndex->_hashIndex, &hashElement); } - - // ............................................................................ - // Update for non-unique hash index - // ............................................................................ - else { - - // ............................................................................ - // Fill in the fields with the values from oldDoc - // ............................................................................ - - res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc); - - if (res == TRI_ERROR_NO_ERROR) { - - // ............................................................................ - // We must fill the hashElement with the value of the document shape -- this - // is necessary when we attempt to remove non-unique hash indexes. - // ............................................................................ - - cnv.c = newDoc; - hashElement.data = cnv.p; - - // ............................................................................ - // Remove the hash index entry and return. - // ............................................................................ - - res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement); - - if (res != TRI_ERROR_NO_ERROR) { - LOG_WARNING("could not remove old document from (non-unique) hash index in UpdateHashIndex"); - } - } - - else { - LOG_WARNING("existing document was not removed from (non-unique) hash index in UpdateHashIndex"); - } - - // ............................................................................ - // Fill the shaped json simple list from the document - // ............................................................................ - - res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL); - - if (res != TRI_ERROR_NO_ERROR) { - - TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); - - if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { - - // ........................................................................ - // probably fields do not match -- report internal error for now - // ........................................................................ - - return TRI_ERROR_NO_ERROR; - } - - return res; - } - - // ............................................................................ - // Attempt to add the hash entry from the new doc - // ............................................................................ - res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement); } - - + // ............................................................................ // Deallocate memory given to hashElement.fields // ............................................................................ - TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); + TRI_Free(TRI_CORE_MEM_ZONE, hashElement.fields); return res; } @@ -1941,7 +1794,7 @@ void TRI_FreeHashIndex (TRI_index_t* idx) { /// HashIndexElements* results //////////////////////////////////////////////////////////////////////////////// -HashIndexElements* TRI_LookupHashIndex(TRI_index_t* idx, TRI_json_t* parameterList) { +HashIndexElements* TRI_LookupHashIndex (TRI_index_t* idx, TRI_json_t* parameterList) { TRI_hash_index_t* hashIndex; HashIndexElements* result; HashIndexElement element; diff --git a/js/actions/system/api-index.js b/js/actions/system/api-index.js index 8d7b3a494f..41155e4fe9 100644 --- a/js/actions/system/api-index.js +++ b/js/actions/system/api-index.js @@ -337,6 +337,39 @@ function POST_api_index_geo (req, res, collection, body) { //////////////////////////////////////////////////////////////////////////////// /// @brief creates a hash index +/// +/// @REST{POST /_api/index?collection=@FA{collection-identifier}} +/// +/// Creates a hash index for the collection @FA{collection-identifier}, if it +/// does not already exist. The call expects an object containing the index +/// details. +/// +/// - @LIT{type}: must be equal to @LIT{"hash"}. +/// +/// - @LIT{fields}: A list of attribute paths. +/// +/// - @LIT{unique}: If @LIT{true}, then create a unique index. +/// +/// If the index does not already exists and could be created, then a @LIT{HTTP +/// 201} is returned. If the index already exists, then a @LIT{HTTP 200} is +/// returned. +/// +/// If the @FA{collection-identifier} is unknown, then a @LIT{HTTP 404} is +/// returned. It is possible to specify a name instead of an identifier. +/// +/// If the collection already contains documents and you try to create a unique +/// hash index in such a way that there are documents violating the uniqueness, +/// then a @LIT{HTTP 400} is returned. +/// +/// @EXAMPLES +/// +/// Creating an unique constraint: +/// +/// @verbinclude api-index-create-new-unique-constraint +/// +/// Creating a hash index: +/// +/// @verbinclude api-index-create-new-hash-index //////////////////////////////////////////////////////////////////////////////// function POST_api_index_hash (req, res, collection, body) { diff --git a/js/actions/system/api-system.js b/js/actions/system/api-system.js index 1b3164a69f..8a89dac696 100644 --- a/js/actions/system/api-system.js +++ b/js/actions/system/api-system.js @@ -37,10 +37,10 @@ var actions = require("actions"); //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// -/// @fn JSF_GET_system_status +/// @fn JSF_GET_admin_status /// @brief returns system status information for the server /// -/// @REST{GET /_system/status} +/// @REST{GET /_admin/status} /// /// The call returns an object with the following attributes: /// @@ -70,7 +70,7 @@ var actions = require("actions"); //////////////////////////////////////////////////////////////////////////////// actions.defineHttp({ - url : "_system/status", + url : "_admin/status", context : "admin", callback : function (req, res) { @@ -86,6 +86,135 @@ actions.defineHttp({ } }); +//////////////////////////////////////////////////////////////////////////////// +/// @fn JSF_GET_admin_config_description +/// @brief returns configuration description +/// +/// @REST{GET /_admin/config/desciption} +/// +/// The call returns an object describing the configuration. +//////////////////////////////////////////////////////////////////////////////// + +actions.defineHttp({ + url : "_admin/config/description", + context : "admin", + + callback : function (req, res) { + try { + result = { + database : { + name : "Database", + type : "section", + + path : { + name : "Path", + type : "string", + readonly : true + }, + + access : { + name : "Combined Access", + type : "string", + readonly : true + } + }, + + logging : { + name : "Logging", + type : "section", + + level : { + name : "Log Level", + type : "pull-down", + values : [ "fatal", "error", "warning", "info", "debug", "trace" ] + }, + + syslog : { + name : "Use Syslog", + type : "boolean" + }, + + bufferSize : { + name : "Log Buffer Size", + type : "integer" + }, + + output : { + name : "Output", + type : "section", + + file : { + name : "Log File", + type : "string", + readonly : true + } + } + } + }; + + actions.resultOk(req, res, 200, result); + } + catch (err) { + actions.resultError(req, res, err); + } + } +}); + +//////////////////////////////////////////////////////////////////////////////// +/// @fn JSF_GET_admin_config_configuration +/// @brief returns configuration description +/// +/// @REST{GET /_admin/config/configuration} +/// +/// The call returns an object containing configuration. +//////////////////////////////////////////////////////////////////////////////// + +actions.defineHttp({ + url : "_admin/config/configuration", + context : "admin", + + callback : function (req, res) { + try { + result = { + database : { + path : { + value : "/tmp/emil/vocbase" + }, + + access : { + value : "localhost:8529" + } + }, + + logging : { + level : { + value : "info" + }, + + syslog : { + value : true + }, + + bufferSize : { + value : 100 + }, + + output : { + file : { + value : "/var/log/message/arango.log" + } + } + } + }; + + actions.resultOk(req, res, 200, result); + } + catch (err) { + actions.resultError(req, res, err); + } + } +}); + //////////////////////////////////////////////////////////////////////////////// /// @} ////////////////////////////////////////////////////////////////////////////////