1
0
Fork 0

Merge branch 'devel' of https://github.com/triAGENS/AvocadoDB into devel

This commit is contained in:
Jan Steemann 2012-05-09 11:16:58 +02:00
commit a78053805e
15 changed files with 587 additions and 438 deletions

View File

@ -0,0 +1,18 @@
avocado> db.four.ensureUniqueConstraint("a", "b.c");
{ "id" : "164405/1147445", "unique" : true, "type" : "hash", "fields" : ["a", "b.c"], "isNewlyCreated" : true }
avocado> db.four.save({ a : 1, b : { c : 1 } });
{ "_id" : "164405/1868341", "_rev" : 1868341 }
avocado> db.four.save({ a : 1, b : { c : 1 } });
JavaScript exception in file '(avocado)' at 1,9: [AvocadoError 1210: cannot save document]
!db.four.save({ a : 1, b : { c : 1 } });
! ^
stacktrace: [AvocadoError 1210: cannot save document]
at (avocado):1:9
avocado> db.four.save({ a : 1, b : { c : null } });
{ "_id" : "164405/2196021", "_rev" : 2196021 }
avocado> db.four.save({ a : 1 });
{ "_id" : "164405/2196023", "_rev" : 2196023 }

View File

@ -416,22 +416,26 @@ int main (int argc, char* argv[]) {
ParseProgramOptions(argc, argv); ParseProgramOptions(argc, argv);
// http://www.network-science.de/ascii/ Font: ogre // http://www.network-science.de/ascii/ Font: ogre
{
char const* g = DEF_GREEN;
char const* r = DEF_RED;
char const* z = DEF_RESET;
if (NoColors) { if (NoColors) {
printf(" _ _ \n"); g = "";
printf(" __ ___ _____ ___(_)_ __| |__ \n"); r = "";
printf(" / _` \\ \\ / / _ \\ / __| | '__| '_ \\ \n"); z = "";
printf(" | (_| |\\ V / (_) | (__| | | | |_) | \n");
printf(" \\__,_| \\_/ \\___/ \\___|_|_| |_.__/ \n");
printf(" \n");
} }
else {
printf("%s %s _ _ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s %s _ _ %s\n", g, r, z);
printf("%s __ ___ _____ ___%s(_)_ __| |__ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s(_)_ __| |__ %s\n", g, r, z);
printf("%s / _` \\ \\ / / _ \\ / __%s| | '__| '_ \\ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s| | '__| '_ \\ %s\n", g, r, z);
printf("%s | (_| |\\ V / (_) | (__%s| | | | |_) | %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s| (_| | | | (_| | | | | (_| | (_) %s| | | | |_) |%s\n", g, r, z);
printf("%s \\__,_| \\_/ \\___/ \\___%s|_|_| |_.__/ %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|_|_| |_.__/ %s\n", g, r, z);
printf("%s %s %s\n", DEF_GREEN, DEF_RED, DEF_RESET); printf("%s |___/ %s %s\n", g, r, z);
} }
printf("\n");
printf("Welcome to avocirb %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION); printf("Welcome to avocirb %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION);
#ifdef TRI_V8_VERSION #ifdef TRI_V8_VERSION

View File

@ -289,7 +289,6 @@ JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \ js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \ js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \ js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \
js/client/js-client.h \ js/client/js-client.h \
js/server/js-server.h \ js/server/js-server.h \
js/server/js-ahuacatl.h js/server/js-ahuacatl.h

View File

@ -921,7 +921,6 @@ JAVASCRIPT_HEADER = \
js/common/bootstrap/js-errors.h \ js/common/bootstrap/js-errors.h \
js/common/bootstrap/js-modules.h \ js/common/bootstrap/js-modules.h \
js/common/bootstrap/js-print.h \ js/common/bootstrap/js-print.h \
js/common/bootstrap/js-errors.h \
js/client/js-client.h \ js/client/js-client.h \
js/server/js-server.h \ js/server/js-server.h \
js/server/js-ahuacatl.h js/server/js-ahuacatl.h

View File

@ -184,8 +184,11 @@ HttpHandler::status_e RestDocumentHandler::execute () {
/// @REST{POST /document?collection=@FA{collection-name}&createCollection=@FA{create}} /// @REST{POST /document?collection=@FA{collection-name}&createCollection=@FA{create}}
/// ///
/// Instead of a @FA{collection-identifier}, a @FA{collection-name} can be /// Instead of a @FA{collection-identifier}, a @FA{collection-name} can be
/// used. If @FA{createCollection} is true, then the collection is created if it does not /// used. If @FA{createCollection} is true, then the collection is created if it
/// exists. /// does not exists.
///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
/// ///
/// @EXAMPLES /// @EXAMPLES
/// ///
@ -367,6 +370,9 @@ bool RestDocumentHandler::readDocument () {
/// given etag. Otherwise a @LIT{HTTP 412} is returned. As an alternative /// given etag. Otherwise a @LIT{HTTP 412} is returned. As an alternative
/// you can supply the etag in an attribute @LIT{rev} in the URL. /// you can supply the etag in an attribute @LIT{rev} in the URL.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// Use a document handle: /// Use a document handle:
@ -474,6 +480,9 @@ bool RestDocumentHandler::readSingleDocument (bool generateBody) {
/// ///
/// Instead of a @FA{collection-identifier}, a collection name can be given. /// Instead of a @FA{collection-identifier}, a collection name can be given.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// @verbinclude rest_read-document-all /// @verbinclude rest_read-document-all
@ -577,6 +586,9 @@ bool RestDocumentHandler::readAllDocuments () {
/// can use this call to get the current revision of a document or check if /// can use this call to get the current revision of a document or check if
/// the document was deleted. /// the document was deleted.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// @verbinclude rest_read-document-head /// @verbinclude rest_read-document-head
@ -634,6 +646,9 @@ bool RestDocumentHandler::checkDocument () {
/// header. You must never supply both the "ETag" header and the @LIT{rev} /// header. You must never supply both the "ETag" header and the @LIT{rev}
/// parameter. /// parameter.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// Using document handle: /// Using document handle:
@ -785,6 +800,9 @@ bool RestDocumentHandler::updateDocument () {
/// "If-Match" header. You must never supply both the "If-Match" header and the /// "If-Match" header. You must never supply both the "If-Match" header and the
/// @LIT{rev} parameter. /// @LIT{rev} parameter.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// Using document handle: /// Using document handle:

View File

@ -84,6 +84,9 @@ RestEdgeHandler::RestEdgeHandler (HttpRequest* request, TRI_vocbase_t* vocbase)
/// If you request such an edge, the returned document will also contain the /// If you request such an edge, the returned document will also contain the
/// attributes @LIT{_from} and @LIT{_to}. /// attributes @LIT{_from} and @LIT{_to}.
/// ///
/// @note If you are implementing a client api then you should use the path
/// @LIT{/_api/document}.
///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// Create an edge: /// Create an edge:

View File

@ -642,9 +642,13 @@ int AvocadoServer::startupServer () {
_applicationAdminServer->addBasicHandlers(factory); _applicationAdminServer->addBasicHandlers(factory);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase); factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator<RestImportHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase); factory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
factory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_IMPORT_PATH, RestHandlerCreator<RestImportHandler>::createData<TRI_vocbase_t*>, _vocbase);
if (shareAdminPort) { if (shareAdminPort) {
_applicationAdminServer->addHandlers(factory, "/_admin"); _applicationAdminServer->addHandlers(factory, "/_admin");
_applicationUserManager->addHandlers(factory, "/_admin"); _applicationUserManager->addHandlers(factory, "/_admin");
@ -681,6 +685,10 @@ int AvocadoServer::startupServer () {
adminFactory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase); adminFactory->addPrefixHandler(RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase); adminFactory->addPrefixHandler(RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::DOCUMENT_PATH, RestHandlerCreator<RestDocumentHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/_api" + RestVocbaseBaseHandler::EDGE_PATH, RestHandlerCreator<RestEdgeHandler>::createData<TRI_vocbase_t*>, _vocbase);
adminFactory->addPrefixHandler("/", adminFactory->addPrefixHandler("/",
RestHandlerCreator<RestActionHandler>::createData< pair< TRI_vocbase_t*, set<string>* >* >, RestHandlerCreator<RestActionHandler>::createData< pair< TRI_vocbase_t*, set<string>* >* >,
(void*) &handlerDataAdmin); (void*) &handlerDataAdmin);

View File

@ -29,7 +29,16 @@
/// @page IndexHashTOC /// @page IndexHashTOC
/// ///
/// <ol> /// <ol>
/// <li>MISSING</li> /// <li>@ref IndexHashHttp
/// <ol>
/// <li>@ref IndexHashHttpEnsureUniqueConstraint "POST /_api/index"</li>
/// </ol>
/// </li>
/// <li>@ref IndexHashShell
/// <ol>
/// <li>@ref IndexHashShellEnsureCapConstraint "collection.ensureUniqueConstraint"</li>
/// </ol>
/// </li>
/// </ol> /// </ol>
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -41,6 +50,23 @@
/// <hr> /// <hr>
/// @copydoc IndexHashTOC /// @copydoc IndexHashTOC
/// <hr> /// <hr>
///
/// It is possible to define a hash index on one or more attributes (or paths)
/// of a documents. This hash is then used in queries to locate documents in
/// O(1) operations. If the hash is unique, then no two documents are allowed
/// to have the same set of attribute values.
///
/// @section IndexHashHttp Accessing Hash Indexes via Http
//////////////////////////////////////////////////////////
///
/// @anchor IndexHashHttpEnsureUniqueConstraint
/// @copydetails JSF_POST_api_index_hash
///
/// @section IndexHashShell Accessing hash Indexes from the Shell
/////////////////////////////////////////////////////////////////
///
/// @anchor IndexHashShellEnsureCapConstraint
/// @copydetails JS_EnsureUniqueConstraintVocbaseCol
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Local Variables: // Local Variables:

View File

@ -5,13 +5,58 @@ require './avocadodb.rb'
describe AvocadoDB do describe AvocadoDB do
api = "/_api/index" api = "/_api/index"
prefix = "api-index-unique-constraint" prefix = "api-index-hash"
################################################################################ ################################################################################
## unique constraints during create ## unique constraints during create
################################################################################ ################################################################################
context "creating:" do context "creating index:" do
context "dealing with unique constraints violation:" do
before do
@cn = "UnitTestsCollectionIndexes"
AvocadoDB.drop_collection(@cn)
@cid = AvocadoDB.create_collection(@cn)
end
after do
AvocadoDB.drop_collection(@cn)
end
it "does not create the index in case of violation" do
# create a document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201)
# create another document
cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201)
# try to create the index
cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}-fail", cmd, :body => body)
doc.code.should eq(400)
doc.parsed_response['error'].should eq(true)
doc.parsed_response['code'].should eq(400)
doc.parsed_response['errorNum'].should eq(1203)
end
end
end
################################################################################
## unique constraints during create
################################################################################
context "creating documents:" do
context "dealing with unique constraints:" do context "dealing with unique constraints:" do
before do before do
@cn = "UnitTestsCollectionIndexes" @cn = "UnitTestsCollectionIndexes"
@ -24,9 +69,9 @@ describe AvocadoDB do
end end
it "rolls back in case of violation" do it "rolls back in case of violation" do
cmd = "/_api/index/#{@cid}" cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }" body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body) doc = AvocadoDB.log_post("#{prefix}-create1", cmd, :body => body)
doc.code.should eq(201) doc.code.should eq(201)
doc.parsed_response['type'].should eq("hash") doc.parsed_response['type'].should eq("hash")
@ -35,7 +80,7 @@ describe AvocadoDB do
# create a document # create a document
cmd1 = "/document?collection=#{@cid}" cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }" body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) doc = AvocadoDB.log_post("#{prefix}-create2", cmd1, :body => body)
doc.code.should eq(201) doc.code.should eq(201)
@ -57,7 +102,7 @@ describe AvocadoDB do
# create a unique constraint violation # create a unique constraint violation
body = "{ \"a\" : 1, \"b\" : 2 }" body = "{ \"a\" : 1, \"b\" : 2 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) doc = AvocadoDB.log_post("#{prefix}-create3", cmd1, :body => body)
doc.code.should eq(409) doc.code.should eq(409)
@ -72,7 +117,7 @@ describe AvocadoDB do
# third try (make sure the rollback has not destroyed anything) # third try (make sure the rollback has not destroyed anything)
body = "{ \"a\" : 1, \"b\" : 3 }" body = "{ \"a\" : 1, \"b\" : 3 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) doc = AvocadoDB.log_post("#{prefix}-create4", cmd1, :body => body)
doc.code.should eq(409) doc.code.should eq(409)
@ -116,7 +161,7 @@ describe AvocadoDB do
## unique constraints during update ## unique constraints during update
################################################################################ ################################################################################
context "updating:" do context "updating documents:" do
context "dealing with unique constraints:" do context "dealing with unique constraints:" do
before do before do
@cn = "UnitTestsCollectionIndexes" @cn = "UnitTestsCollectionIndexes"
@ -129,9 +174,9 @@ describe AvocadoDB do
end end
it "rolls back in case of violation" do it "rolls back in case of violation" do
cmd = "/_api/index/#{@cid}" cmd = "/_api/index?collection=#{@cid}"
body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }" body = "{ \"type\" : \"hash\", \"unique\" : true, \"fields\" : [ \"a\" ] }"
doc = AvocadoDB.log_post("#{prefix}", cmd, :body => body) doc = AvocadoDB.log_post("#{prefix}-update1", cmd, :body => body)
doc.code.should eq(201) doc.code.should eq(201)
doc.parsed_response['type'].should eq("hash") doc.parsed_response['type'].should eq("hash")
@ -140,7 +185,7 @@ describe AvocadoDB do
# create a document # create a document
cmd1 = "/document?collection=#{@cid}" cmd1 = "/document?collection=#{@cid}"
body = "{ \"a\" : 1, \"b\" : 1 }" body = "{ \"a\" : 1, \"b\" : 1 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) doc = AvocadoDB.log_post("#{prefix}-update2", cmd1, :body => body)
doc.code.should eq(201) doc.code.should eq(201)
@ -162,7 +207,7 @@ describe AvocadoDB do
# create a second document # create a second document
body = "{ \"a\" : 2, \"b\" : 2 }" body = "{ \"a\" : 2, \"b\" : 2 }"
doc = AvocadoDB.log_post("#{prefix}", cmd1, :body => body) doc = AvocadoDB.log_post("#{prefix}-update3", cmd1, :body => body)
doc.code.should eq(201) doc.code.should eq(201)

View File

@ -9,5 +9,6 @@ rspec --format d \
rest-delete-document-spec.rb \ rest-delete-document-spec.rb \
rest-edge-spec.rb \ rest-edge-spec.rb \
api-index-spec.rb \ api-index-spec.rb \
api-index-hash-spec.rb \
api-cursor-spec.rb \ api-cursor-spec.rb \
api-simple-spec.rb api-simple-spec.rb

View File

@ -4504,17 +4504,24 @@ static v8::Handle<v8::Value> JS_EnsureGeoConstraintVocbaseCol (v8::Arguments con
/// ///
/// @FUN{ensureUniqueConstraint(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} /// @FUN{ensureUniqueConstraint(@FA{field1}, @FA{field2}, ...,@FA{fieldn})}
/// ///
/// Creates a hash index on all documents using attributes as paths to the /// Creates a unique hash index on all documents using @FA{field1}, @FA{field2},
/// fields. At least one attribute must be given. The value of this attribute /// ... as attribute paths. At least one attribute path must be given.
/// must be a list. All documents, which do not have the attribute path or where
/// one or more values that are not suitable, are ignored.
/// ///
/// In case that the index was successfully created, the index identifier /// When a unique constraint is in effect for a collection, then all documents
/// is returned. /// which contain the given attributes must differ in the attribute
/// values. Creating a new document or updating a document will fail, if the
/// uniqueness is violated. If any attribute value is null for a document, this
/// document is ignored by the index.
///
/// In case that the index was successfully created, the index identifier is
/// returned.
///
/// Note that non-existing attribute paths in a document are treat as if the
/// value were @LIT{null}.
/// ///
/// @EXAMPLES /// @EXAMPLES
/// ///
/// @verbinclude admin5 /// @verbinclude shell-index-create-unique-constraint
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
static v8::Handle<v8::Value> JS_EnsureUniqueConstraintVocbaseCol (v8::Arguments const& argv) { static v8::Handle<v8::Value> JS_EnsureUniqueConstraintVocbaseCol (v8::Arguments const& argv) {

View File

@ -1227,20 +1227,26 @@ int main (int argc, char* argv[]) {
// http://www.network-science.de/ascii/ Font: ogre // http://www.network-science.de/ascii/ Font: ogre
{
char const* g = DEF_GREEN;
char const* r = DEF_RED;
char const* z = DEF_RESET;
if (noColors) { if (noColors) {
printf(" " " _ \n"); g = "";
printf(" __ ___ _____ ___ " "___| |__ \n"); r = "";
printf(" / _` \\ \\ / / _ \\ / __" "/ __| '_ \\ \n"); z = "";
printf(" | (_| |\\ V / (_) | (__" "\\__ \\ | | | \n");
printf(" \\__,_| \\_/ \\___/ \\___" "|___/_| |_| \n\n");
} }
else {
printf( " " "\x1b[31m _ \x1b[0m\n"); printf("%s %s _ %s\n", g, r, z);
printf("\x1b[32m __ ___ _____ ___ " "\x1b[31m___| |__ \x1b[0m\n"); printf("%s __ _ _ __ __ _ _ __ __ _ ___ %s ___| |__ %s\n", g, r, z);
printf("\x1b[32m / _` \\ \\ / / _ \\ / __" "\x1b[31m/ __| '_ \\ \x1b[0m\n"); printf("%s / _` | '__/ _` | '_ \\ / _` |/ _ \\%s/ __| '_ \\ %s\n", g, r, z);
printf("\x1b[32m | (_| |\\ V / (_) | (__" "\x1b[31m\\__ \\ | | | \x1b[0m\n"); printf("%s| (_| | | | (_| | | | | (_| | (_) %s\\__ \\ | | |%s\n", g, r, z);
printf("\x1b[32m \\__,_| \\_/ \\___/ \\___" "\x1b[31m|___/_| |_| \x1b[0m\n\n"); printf("%s \\__,_|_| \\__,_|_| |_|\\__, |\\___/%s|___/_| |_|%s\n", g, r, z);
printf("%s |___/ %s %s\n", g, r, z);
} }
printf("\n");
printf("Welcome to avocsh %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION); printf("Welcome to avocsh %s. Copyright (c) 2012 triAGENS GmbH.\n", TRIAGENS_VERSION);
#ifdef TRI_V8_VERSION #ifdef TRI_V8_VERSION

View File

@ -1211,47 +1211,71 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t* idx,
/// @brief helper for hashing /// @brief helper for hashing
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
static int HashIndexHelper (const TRI_hash_index_t* hashIndex, static int HashIndexHelper (TRI_hash_index_t const* hashIndex,
HashIndexElement* hashElement, HashIndexElement* hashElement,
const TRI_doc_mptr_t* document, TRI_doc_mptr_t const* document,
const TRI_shaped_json_t* shapedDoc) { TRI_shaped_json_t const* shapedDoc) {
union { void* p; void const* c; } cnv; union { void* p; void const* c; } cnv;
TRI_shaped_json_t shapedObject;
TRI_shape_access_t* acc; TRI_shape_access_t* acc;
TRI_shaped_json_t shapedObject;
TRI_shaper_t* shaper;
int res;
size_t j; size_t j;
if (shapedDoc != NULL) { shaper = hashIndex->base._collection->_shaper;
// .......................................................................... // .............................................................................
// Attempting to locate a hash entry using TRI_shaped_json_t object. Use this // Attempting to locate a hash entry using TRI_shaped_json_t object. Use this
// when we wish to remove a hash entry and we only have the "keys" rather than // when we wish to remove a hash entry and we only have the "keys" rather than
// having the document (from which the keys would follow). // having the document (from which the keys would follow).
// .......................................................................... // .............................................................................
if (shapedDoc != NULL) {
hashElement->data = NULL; hashElement->data = NULL;
}
// .............................................................................
// Assign the document to the HashIndexElement structure - so that it can
// later be retreived.
// .............................................................................
else if (document != NULL) {
cnv.c = document;
hashElement->data = cnv.p;
shapedDoc = &document->_document;
}
else {
return TRI_ERROR_INTERNAL;
}
// .............................................................................
// Extract the attribute values
// .............................................................................
res = TRI_ERROR_NO_ERROR;
for (j = 0; j < hashIndex->_paths._length; ++j) { for (j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j))); TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths, j)));
// .......................................................................... // determine if document has that particular shape
// Determine if document has that particular shape acc = TRI_ShapeAccessor(shaper, shapedDoc->_sid, shape);
// ..........................................................................
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, shapedDoc->_sid, shape);
if (acc == NULL || acc->_shape == NULL) { if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) { if (acc != NULL) {
TRI_FreeShapeAccessor(acc); TRI_FreeShapeAccessor(acc);
} }
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure shapedObject._sid = shaper->_sidNull;
return TRI_WARNING_AVOCADO_INDEX_HASH_UPDATE_ATTRIBUTE_MISSING; shapedObject._data.length = 0;
shapedObject._data.data = NULL;
res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
} }
else {
// .......................................................................... // extract the field
// Extract the field
// ..........................................................................
if (! TRI_ExecuteShapeAccessor(acc, shapedDoc, &shapedObject)) { if (! TRI_ExecuteShapeAccessor(acc, shapedDoc, &shapedObject)) {
TRI_FreeShapeAccessor(acc); TRI_FreeShapeAccessor(acc);
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure // TRI_Free(hashElement->fields); memory deallocated in the calling procedure
@ -1259,167 +1283,17 @@ static int HashIndexHelper (const TRI_hash_index_t* hashIndex,
return TRI_ERROR_INTERNAL; return TRI_ERROR_INTERNAL;
} }
// .......................................................................... TRI_FreeShapeAccessor(acc);
// Store the json shaped Object -- this is what will be hashed
// ..........................................................................
if (shapedObject._sid == shaper->_sidNull) {
res = TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
}
}
// store the json shaped Object -- this is what will be hashed
hashElement->fields[j] = shapedObject; hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
} }
else if (document != NULL) {
// ..........................................................................
// Assign the document to the HashIndexElement structure - so that it can later
// be retreived.
// ..........................................................................
cnv.c = document;
hashElement->data = cnv.p;
for (j = 0; j < hashIndex->_paths._length; ++j) {
TRI_shape_pid_t shape = *((TRI_shape_pid_t*)(TRI_AtVector(&hashIndex->_paths,j)));
// ..........................................................................
// Determine if document has that particular shape
// It is not an error if the document DOES NOT have the particular shape
// ..........................................................................
acc = TRI_ShapeAccessor(hashIndex->base._collection->_shaper, document->_document._sid, shape);
if (acc == NULL || acc->_shape == NULL) {
if (acc != NULL) {
TRI_FreeShapeAccessor(acc);
}
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING;
}
// ..........................................................................
// Extract the field
// ..........................................................................
if (! TRI_ExecuteShapeAccessor(acc, &(document->_document), &shapedObject)) {
TRI_FreeShapeAccessor(acc);
// TRI_Free(hashElement->fields); memory deallocated in the calling procedure
return TRI_ERROR_INTERNAL;
}
// ..........................................................................
// Store the field
// ..........................................................................
hashElement->fields[j] = shapedObject;
TRI_FreeShapeAccessor(acc);
} // end of for loop
}
else {
return TRI_ERROR_INTERNAL;
}
return TRI_ERROR_NO_ERROR;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hash indexes a document
////////////////////////////////////////////////////////////////////////////////
static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// ............................................................................
// Obtain the hash index structure
// ............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL);
}
// ............................................................................
// Allocate storage to shaped json objects stored as a simple list.
// These will be used for hashing.
// ............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
}
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// ............................................................................
// It is possible that this document does not have the necessary attributes
// (keys) to participate in this index.
// ............................................................................
// ............................................................................
// If an error occurred in the called procedure HashIndexHelper, we must
// now exit -- and deallocate memory assigned to hashElement.
// ............................................................................
if (res != TRI_ERROR_NO_ERROR) { // some sort of error occurred
// ..........................................................................
// Deallocated the memory already allocated to hashElement.fields
// ..........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// ..........................................................................
// It may happen that the document does not have the necessary attributes to
// be included within the hash index, in this case do not report back an error.
// ..........................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR;
}
return res;
}
// ............................................................................
// Fill the json field list from the document for unique hash index
// ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Fill the json field list from the document for non-unique hash index
// ............................................................................
else {
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// ............................................................................
// Memory which has been allocated to hashElement.fields remains allocated
// contents of which are stored in the hash array.
// ............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
return res; return res;
} }
@ -1510,71 +1384,52 @@ static void RemoveIndexHashIndex (TRI_index_t* idx, TRI_doc_collection_t* collec
} }
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @brief removes a document from a hash index /// @brief hash indexes a document
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) { static int InsertHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement; HashIndexElement hashElement;
TRI_hash_index_t* hashIndex; TRI_hash_index_t* hashIndex;
int res; int res;
// ............................................................................ // .............................................................................
// Obtain the hash index structure // Obtain the hash index structure
// ............................................................................ // .............................................................................
hashIndex = (TRI_hash_index_t*) idx; hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) { if (idx == NULL) {
LOG_WARNING("internal error in RemoveHashIndex"); LOG_WARNING("internal error in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL); return TRI_set_errno(TRI_ERROR_INTERNAL);
} }
// ............................................................................ // .............................................................................
// Allocate some memory for the HashIndexElement structure // Allocate storage to shaped json objects stored as a simple list.
// ............................................................................ // These will be used for hashing.
// .............................................................................
hashElement.numFields = hashIndex->_paths._length; hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate( TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) {
LOG_WARNING("out-of-memory in InsertHashIndex");
return TRI_set_errno(TRI_ERROR_OUT_OF_MEMORY);
}
// ..........................................................................
// Fill the json field list from the document
// ..........................................................................
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL); res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// .............................................................................
// .......................................................................... // It is possible that this document does not have the necessary attributes
// It may happen that the document does not have attributes which match // (keys) to participate in this index.
// For now return internal error, there needs to be its own error number //
// and the appropriate action needs to be taken by the calling function in // If an error occurred in the called procedure HashIndexHelper, we must
// such cases. // now exit -- and deallocate memory assigned to hashElement.
// .......................................................................... // .............................................................................
if (res != TRI_ERROR_NO_ERROR) { if (res != TRI_ERROR_NO_ERROR) {
// ........................................................................ // Deallocated the memory already allocated to hashElement.fields
// Deallocate memory allocated to hashElement.fields above
// ........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// .............................................................................
// ........................................................................ // It may happen that the document does not have the necessary attributes to
// It may happen that the document does not have the necessary attributes // be included within the hash index, in this case do not report back an error.
// to have particpated within the hash index. In this case, we do not // .............................................................................
// report an error to the calling procedure.
// ........................................................................
// ........................................................................
// -1 from the called procedure HashIndexHelper implies that we do not
// propagate the error to the parent function. However for removal
// we advice the parent function. TODO: return a proper error code.
// ........................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) { if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR; return TRI_ERROR_NO_ERROR;
@ -1583,25 +1438,104 @@ static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
return res; return res;
} }
// ............................................................................ // .............................................................................
// Attempt the removal for unique hash indexes // Fill the json field list from the document for unique or non-unique index
// ............................................................................ // .............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
else {
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
}
// .............................................................................
// Memory which has been allocated to hashElement.fields remains allocated
// contents of which are stored in the hash array.
// .............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
return res;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief removes a document from a hash index
////////////////////////////////////////////////////////////////////////////////
static int RemoveHashIndex (TRI_index_t* idx, TRI_doc_mptr_t const* doc) {
HashIndexElement hashElement;
TRI_hash_index_t* hashIndex;
int res;
// .............................................................................
// Obtain the hash index structure
// .............................................................................
hashIndex = (TRI_hash_index_t*) idx;
if (idx == NULL) {
LOG_WARNING("internal error in RemoveHashIndex");
return TRI_set_errno(TRI_ERROR_INTERNAL);
}
// .............................................................................
// Allocate some memory for the HashIndexElement structure
// .............................................................................
hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
// .............................................................................
// Fill the json field list from the document
// .............................................................................
res = HashIndexHelper(hashIndex, &hashElement, doc, NULL);
// .............................................................................
// It may happen that the document does not have attributes which match
// For now return internal error, there needs to be its own error number
// and the appropriate action needs to be taken by the calling function in
// such cases.
// .............................................................................
if (res != TRI_ERROR_NO_ERROR) {
// Deallocate memory allocated to hashElement.fields above
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
// .............................................................................
// It may happen that the document does not have the necessary attributes to
// have particpated within the hash index. In this case, we do not report an
// error to the calling procedure.
//
// TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING from the called
// procedure HashIndexHelper implies that we do not propagate the error to
// the parent function. However for removal we advice the parent
// function. TODO: return a proper error code.
// .............................................................................
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR;
}
return res;
}
// .............................................................................
// Attempt the removal for unique or non-unique hash indexes
// .............................................................................
if (hashIndex->base._unique) { if (hashIndex->base._unique) {
res = HashIndex_remove(hashIndex->_hashIndex, &hashElement); res = HashIndex_remove(hashIndex->_hashIndex, &hashElement);
} }
// ............................................................................
// Attempt the removal for non-unique hash indexes
// ............................................................................
else { else {
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement); res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
} }
// ............................................................................ // .............................................................................
// Deallocate memory allocated to hashElement.fields above // Deallocate memory allocated to hashElement.fields above
// ............................................................................ // .............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
@ -1616,20 +1550,20 @@ static int UpdateHashIndex (TRI_index_t* idx,
const TRI_doc_mptr_t* newDoc, const TRI_doc_mptr_t* newDoc,
const TRI_shaped_json_t* oldDoc) { const TRI_shaped_json_t* oldDoc) {
// .......................................................................... // .............................................................................
// Note: The oldDoc is represented by the TRI_shaped_json_t rather than by // Note: The oldDoc is represented by the TRI_shaped_json_t rather than by a
// a TRI_doc_mptr_t object. However for non-unique indexes we must // TRI_doc_mptr_t object. However for non-unique indexes we must pass the
// pass the document shape to the hash remove function. // document shape to the hash remove function.
// .......................................................................... // .............................................................................
union { void* p; void const* c; } cnv; union { void* p; void const* c; } cnv;
HashIndexElement hashElement; HashIndexElement hashElement;
TRI_hash_index_t* hashIndex; TRI_hash_index_t* hashIndex;
int res; int res;
// ............................................................................ // .............................................................................
// Obtain the hash index structure // Obtain the hash index structure
// ............................................................................ // .............................................................................
hashIndex = (TRI_hash_index_t*) idx; hashIndex = (TRI_hash_index_t*) idx;
@ -1638,27 +1572,18 @@ static int UpdateHashIndex (TRI_index_t* idx,
return TRI_ERROR_INTERNAL; return TRI_ERROR_INTERNAL;
} }
// ............................................................................ // .............................................................................
// Allocate some memory for the HashIndexElement structure // Allocate some memory for the HashIndexElement structure
// ............................................................................ // .............................................................................
hashElement.numFields = hashIndex->_paths._length; hashElement.numFields = hashIndex->_paths._length;
hashElement.fields = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false); hashElement.fields = TRI_Allocate(TRI_CORE_MEM_ZONE, sizeof(TRI_shaped_json_t) * hashElement.numFields, false);
if (hashElement.fields == NULL) { // .............................................................................
LOG_WARNING("out-of-memory in UpdateHashIndex");
return TRI_ERROR_OUT_OF_MEMORY;
}
// ............................................................................
// Update for unique hash index // Update for unique hash index
// ............................................................................ //
// ............................................................................
// Fill in the fields with the values from oldDoc // Fill in the fields with the values from oldDoc
// ............................................................................ // .............................................................................
if (hashIndex->base._unique) {
assert(oldDoc != NULL); assert(oldDoc != NULL);
@ -1675,12 +1600,15 @@ static int UpdateHashIndex (TRI_index_t* idx,
hashElement.data = cnv.p; hashElement.data = cnv.p;
// ............................................................................ // ............................................................................
// Remove the hash index entry and return. // Remove the old hash index entry
// ............................................................................ // ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_remove(hashIndex->_hashIndex, &hashElement); res = HashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
if (res != TRI_ERROR_NO_ERROR) { else {
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
}
// .......................................................................... // ..........................................................................
// This error is common, when a document 'update' occurs, but fails // This error is common, when a document 'update' occurs, but fails
@ -1688,44 +1616,32 @@ static int UpdateHashIndex (TRI_index_t* idx,
// is applied, there is no document to remove -- so we get this error. // is applied, there is no document to remove -- so we get this error.
// .......................................................................... // ..........................................................................
LOG_WARNING("could not remove existing document from hash index in UpdateHashIndex"); if (res != TRI_ERROR_NO_ERROR) {
LOG_DEBUG("could not remove existing document from hash index in UpdateHashIndex");
} }
} }
// .............................................................................. else if (res != TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
// Here we are assuming that the existing document could not be removed, because
// the doc did not have the correct attributes. TODO: do not make this assumption.
// ..............................................................................
else {
LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex"); LOG_WARNING("existing document was not removed from hash index in UpdateHashIndex");
} }
// ............................................................................ // ............................................................................
// Fill the json simple list from the document // Fill the json simple list from the document
// ............................................................................ // ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL); res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL);
// ............................................................................ // ............................................................................
// Deal with any errors reported back. // Deal with any errors reported back.
// ............................................................................ // ............................................................................
if (res != TRI_ERROR_NO_ERROR) { if (res != TRI_ERROR_NO_ERROR) {
// ..........................................................................
// Deallocated memory given to hashElement.fields // Deallocated memory given to hashElement.fields
// ..........................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
// ........................................................................
// probably fields do not match. // probably fields do not match.
// ........................................................................ if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
return TRI_ERROR_NO_ERROR; return TRI_ERROR_NO_ERROR;
} }
@ -1736,81 +1652,18 @@ static int UpdateHashIndex (TRI_index_t* idx,
// Attempt to add the hash entry from the new doc // Attempt to add the hash entry from the new doc
// ............................................................................ // ............................................................................
if (hashIndex->base._unique) {
res = HashIndex_insert(hashIndex->_hashIndex, &hashElement); res = HashIndex_insert(hashIndex->_hashIndex, &hashElement);
} }
// ............................................................................
// Update for non-unique hash index
// ............................................................................
else { else {
// ............................................................................
// Fill in the fields with the values from oldDoc
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, NULL, oldDoc);
if (res == TRI_ERROR_NO_ERROR) {
// ............................................................................
// We must fill the hashElement with the value of the document shape -- this
// is necessary when we attempt to remove non-unique hash indexes.
// ............................................................................
cnv.c = newDoc;
hashElement.data = cnv.p;
// ............................................................................
// Remove the hash index entry and return.
// ............................................................................
res = MultiHashIndex_remove(hashIndex->_hashIndex, &hashElement);
if (res != TRI_ERROR_NO_ERROR) {
LOG_WARNING("could not remove old document from (non-unique) hash index in UpdateHashIndex");
}
}
else {
LOG_WARNING("existing document was not removed from (non-unique) hash index in UpdateHashIndex");
}
// ............................................................................
// Fill the shaped json simple list from the document
// ............................................................................
res = HashIndexHelper(hashIndex, &hashElement, newDoc, NULL);
if (res != TRI_ERROR_NO_ERROR) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields);
if (res == TRI_WARNING_AVOCADO_INDEX_HASH_DOCUMENT_ATTRIBUTE_MISSING) {
// ........................................................................
// probably fields do not match -- report internal error for now
// ........................................................................
return TRI_ERROR_NO_ERROR;
}
return res;
}
// ............................................................................
// Attempt to add the hash entry from the new doc
// ............................................................................
res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement); res = MultiHashIndex_insert(hashIndex->_hashIndex, &hashElement);
} }
// ............................................................................ // ............................................................................
// Deallocate memory given to hashElement.fields // Deallocate memory given to hashElement.fields
// ............................................................................ // ............................................................................
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashElement.fields); TRI_Free(TRI_CORE_MEM_ZONE, hashElement.fields);
return res; return res;
} }
@ -1941,7 +1794,7 @@ void TRI_FreeHashIndex (TRI_index_t* idx) {
/// HashIndexElements* results /// HashIndexElements* results
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
HashIndexElements* TRI_LookupHashIndex(TRI_index_t* idx, TRI_json_t* parameterList) { HashIndexElements* TRI_LookupHashIndex (TRI_index_t* idx, TRI_json_t* parameterList) {
TRI_hash_index_t* hashIndex; TRI_hash_index_t* hashIndex;
HashIndexElements* result; HashIndexElements* result;
HashIndexElement element; HashIndexElement element;

View File

@ -337,6 +337,39 @@ function POST_api_index_geo (req, res, collection, body) {
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @brief creates a hash index /// @brief creates a hash index
///
/// @REST{POST /_api/index?collection=@FA{collection-identifier}}
///
/// Creates a hash index for the collection @FA{collection-identifier}, if it
/// does not already exist. The call expects an object containing the index
/// details.
///
/// - @LIT{type}: must be equal to @LIT{"hash"}.
///
/// - @LIT{fields}: A list of attribute paths.
///
/// - @LIT{unique}: If @LIT{true}, then create a unique index.
///
/// If the index does not already exists and could be created, then a @LIT{HTTP
/// 201} is returned. If the index already exists, then a @LIT{HTTP 200} is
/// returned.
///
/// If the @FA{collection-identifier} is unknown, then a @LIT{HTTP 404} is
/// returned. It is possible to specify a name instead of an identifier.
///
/// If the collection already contains documents and you try to create a unique
/// hash index in such a way that there are documents violating the uniqueness,
/// then a @LIT{HTTP 400} is returned.
///
/// @EXAMPLES
///
/// Creating an unique constraint:
///
/// @verbinclude api-index-create-new-unique-constraint
///
/// Creating a hash index:
///
/// @verbinclude api-index-create-new-hash-index
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
function POST_api_index_hash (req, res, collection, body) { function POST_api_index_hash (req, res, collection, body) {

View File

@ -37,10 +37,10 @@ var actions = require("actions");
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_system_status /// @fn JSF_GET_admin_status
/// @brief returns system status information for the server /// @brief returns system status information for the server
/// ///
/// @REST{GET /_system/status} /// @REST{GET /_admin/status}
/// ///
/// The call returns an object with the following attributes: /// The call returns an object with the following attributes:
/// ///
@ -70,7 +70,7 @@ var actions = require("actions");
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({ actions.defineHttp({
url : "_system/status", url : "_admin/status",
context : "admin", context : "admin",
callback : function (req, res) { callback : function (req, res) {
@ -86,6 +86,135 @@ actions.defineHttp({
} }
}); });
////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_admin_config_description
/// @brief returns configuration description
///
/// @REST{GET /_admin/config/desciption}
///
/// The call returns an object describing the configuration.
////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({
url : "_admin/config/description",
context : "admin",
callback : function (req, res) {
try {
result = {
database : {
name : "Database",
type : "section",
path : {
name : "Path",
type : "string",
readonly : true
},
access : {
name : "Combined Access",
type : "string",
readonly : true
}
},
logging : {
name : "Logging",
type : "section",
level : {
name : "Log Level",
type : "pull-down",
values : [ "fatal", "error", "warning", "info", "debug", "trace" ]
},
syslog : {
name : "Use Syslog",
type : "boolean"
},
bufferSize : {
name : "Log Buffer Size",
type : "integer"
},
output : {
name : "Output",
type : "section",
file : {
name : "Log File",
type : "string",
readonly : true
}
}
}
};
actions.resultOk(req, res, 200, result);
}
catch (err) {
actions.resultError(req, res, err);
}
}
});
////////////////////////////////////////////////////////////////////////////////
/// @fn JSF_GET_admin_config_configuration
/// @brief returns configuration description
///
/// @REST{GET /_admin/config/configuration}
///
/// The call returns an object containing configuration.
////////////////////////////////////////////////////////////////////////////////
actions.defineHttp({
url : "_admin/config/configuration",
context : "admin",
callback : function (req, res) {
try {
result = {
database : {
path : {
value : "/tmp/emil/vocbase"
},
access : {
value : "localhost:8529"
}
},
logging : {
level : {
value : "info"
},
syslog : {
value : true
},
bufferSize : {
value : 100
},
output : {
file : {
value : "/var/log/message/arango.log"
}
}
}
};
actions.resultOk(req, res, 200, result);
}
catch (err) {
actions.resultError(req, res, err);
}
}
});
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// @} /// @}
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////