From 96dd32cdd3b88329a2b09792bf97fa277dd25fda Mon Sep 17 00:00:00 2001 From: Esteban Lombeyda Date: Mon, 23 Jun 2014 09:47:44 +0200 Subject: [PATCH 01/13] bug fix: autocompletion and tab --- 3rdParty/linenoise/linenoise.c | 64 ++++++++++++++++++++++++++++++---- 1 file changed, 57 insertions(+), 7 deletions(-) diff --git a/3rdParty/linenoise/linenoise.c b/3rdParty/linenoise/linenoise.c index c9795331bc..1da2958ddb 100644 --- a/3rdParty/linenoise/linenoise.c +++ b/3rdParty/linenoise/linenoise.c @@ -981,6 +981,44 @@ static void displayItems(const struct linenoiseCompletions * lc, struct current newLine(current); } +char * append (char * l, size_t ls, char *m, size_t ms, char *r, size_t rs) { + size_t size = 0; + if(l) { + size += ls; + } + if(m) { + size += ms; + } + if(r) { + size += rs; + } + + if(!size) { + return NULL; + } + char * new_buf = malloc(size + 1); + if(!new_buf) { + printf("Error out of memory\n"); + return NULL; + } + memcpy(new_buf, l, ls); + memcpy(new_buf + ls, m, ms); + memcpy(new_buf + ls + ms, r, rs); + new_buf[size] = '\0'; + return new_buf; +} + +char * BC(struct current const * current, char const * completion_buf, size_t completion_buf_size) { + char * buf = current->buf + current->len-1; + size_t bytes_length = 0; + while((buf > current->buf) && ((*buf != ';') && (*buf != ' '))) { + buf--; + bytes_length++; + } + memset(current->buf + (current->len - bytes_length ), '\0', bytes_length-1); + return append(current->buf, (current->len - bytes_length), completion_buf, completion_buf_size, NULL, 0); + +} static void refreshPage(const struct linenoiseCompletions * lc, struct current *current) { size_t j; @@ -1015,9 +1053,11 @@ static void refreshPage(const struct linenoiseCompletions * lc, struct current * newLine(current); if(min_chars!=NULL) { // char * new_buf = strndup(min_chars, common_min_len); - char * new_buf = malloc(common_min_len + 1); - memcpy(new_buf, min_chars, common_min_len); - new_buf[common_min_len] = '\0'; + char * new_buf = BC(current, min_chars, common_min_len); + if(!new_buf) { + printf(" Out of memory "); + return; + } set_current(current, new_buf); // this is posible because set_current copies the given pointer free(new_buf); @@ -1411,11 +1451,21 @@ static void freeCompletions(linenoiseCompletions *lc) { free(lc->cvec); } + static int completeLine(struct current *current) { linenoiseCompletions lc = { 0, NULL, 0 }; int c = 0; + char * buf = current->buf + current->pos; - completionCallback(current->buf,&lc); + while((buf > current->buf)) { + if((*buf == ';') || (*buf == ' ')) { + buf++; + break; + } + buf--; + } + + completionCallback(buf,&lc); if (lc.len == 0) { beep(); } else { @@ -1423,7 +1473,7 @@ static int completeLine(struct current *current) { if(lc.len>1 && lc.multiLine) { refreshPage(&lc, current); freeCompletions(&lc); - return c; + return c; } stop = 0, i = 0; @@ -1830,8 +1880,8 @@ history_navigation: refreshLine(current->prompt, current); break; default: - /* Only tab is allowed without ^V */ - if (c == '\t' || c >= ' ') { + /* Only characters greater than white space are allowed */ + if (c >= ' ') { eraseEol(current); // if (insert_char(current, current->pos, c) == 1) { insert_char(current, current->pos, c); From d429e850ab3eaf457883c09fc709db90d0052438 Mon Sep 17 00:00:00 2001 From: Esteban Lombeyda Date: Mon, 23 Jun 2014 13:48:21 +0200 Subject: [PATCH 02/13] cleaning compilere warnings --- 3rdParty/linenoise/linenoise.c | 109 +++++++++++++++++---------------- 1 file changed, 55 insertions(+), 54 deletions(-) diff --git a/3rdParty/linenoise/linenoise.c b/3rdParty/linenoise/linenoise.c index 1da2958ddb..cc116ace8f 100644 --- a/3rdParty/linenoise/linenoise.c +++ b/3rdParty/linenoise/linenoise.c @@ -949,7 +949,7 @@ static int utf8_getchars(char *buf, int c) */ static int get_char(struct current *current, size_t pos) { - if (pos >= 0 && pos < current->chars) { + if (/*pos >= 0 &&*/pos < current->chars) { int c; int i = utf8_index(current->buf, pos); (void)utf8_tounicode(current->buf + i, &c); @@ -960,7 +960,7 @@ static int get_char(struct current *current, size_t pos) static void displayItems(const struct linenoiseCompletions * lc, struct current *current, int max_len) { size_t wcols = current->cols; - size_t cols = max_len > wcols ? 1 : wcols/(max_len+2); + size_t cols = (size_t)max_len > wcols ? 1 : wcols/((size_t)max_len+2); size_t rows = (int)ceil((float)lc->len/cols); size_t i, j; size_t idx; @@ -981,43 +981,52 @@ static void displayItems(const struct linenoiseCompletions * lc, struct current newLine(current); } -char * append (char * l, size_t ls, char *m, size_t ms, char *r, size_t rs) { - size_t size = 0; - if(l) { - size += ls; - } - if(m) { - size += ms; - } - if(r) { - size += rs; +char * update_current_buffer(struct current * current, size_t tmp_buf_len, char const * completion_buf, size_t completion_buf_size) { + size_t bytes_length = 0; + bytes_length = current->len - tmp_buf_len; + if(tmp_buf_len + completion_buf_size >= (size_t)current->bufmax) { + // at moment buffer is not modified + return NULL; } + memset(current->buf + tmp_buf_len, '\0', bytes_length); + memcpy(current->buf + tmp_buf_len, completion_buf, completion_buf_size); - if(!size) { - return NULL; - } - char * new_buf = malloc(size + 1); - if(!new_buf) { - printf("Error out of memory\n"); - return NULL; - } - memcpy(new_buf, l, ls); - memcpy(new_buf + ls, m, ms); - memcpy(new_buf + ls + ms, r, rs); - new_buf[size] = '\0'; - return new_buf; + current->len = strlen(current->buf); + current->buf[current->bufmax - 1] = 0; + current->len = strlen(current->buf); + current->pos = current->chars = utf8_strlen(current->buf, current->len); + return current->buf; } - -char * BC(struct current const * current, char const * completion_buf, size_t completion_buf_size) { +/** + * computes the last string after a semicolon or a white space (toke separators) + * when the string does not contain token separators the quite string buffer + * of the current struct is returned + * @param current is the current edited buffer + * @param token_buf will point to the begin of the token + * @param token_length is optional, when the pointer is not null will contain the + * length of the token + */ +void last_token(struct current const * current, char ** token_buf, size_t * token_length) { char * buf = current->buf + current->len-1; size_t bytes_length = 0; while((buf > current->buf) && ((*buf != ';') && (*buf != ' '))) { buf--; bytes_length++; } - memset(current->buf + (current->len - bytes_length ), '\0', bytes_length-1); - return append(current->buf, (current->len - bytes_length), completion_buf, completion_buf_size, NULL, 0); + if(buf == current->buf) { + bytes_length++; + } + if(token_length) { + *token_length = bytes_length; + } + *token_buf = buf; +} +char * update_completion_buffer(struct current * current, char const * completion_buf, size_t completion_buf_size) { + char * buf = current->buf + current->len-1; + size_t bytes_length = 0; + last_token(current, &buf, &bytes_length); + return update_current_buffer(current, current->len - bytes_length, completion_buf, completion_buf_size); } static void refreshPage(const struct linenoiseCompletions * lc, struct current *current) { @@ -1026,7 +1035,7 @@ static void refreshPage(const struct linenoiseCompletions * lc, struct current * size_t max_len = 0; char * min_chars = NULL; for(j=0; jlen; j++) { - size_t j_len = utf8_strlen(lc->cvec[j], (int)strlen(lc->cvec[j])); + size_t j_len = utf8_strlen(lc->cvec[j], (int)strlen(lc->cvec[j])); if(min_chars == NULL) { min_chars = lc->cvec[j]; common_min_len = j_len; @@ -1053,14 +1062,11 @@ static void refreshPage(const struct linenoiseCompletions * lc, struct current * newLine(current); if(min_chars!=NULL) { // char * new_buf = strndup(min_chars, common_min_len); - char * new_buf = BC(current, min_chars, common_min_len); - if(!new_buf) { + char * updated_buf = update_completion_buffer(current, min_chars, common_min_len); + if(!updated_buf) { printf(" Out of memory "); return; } - set_current(current, new_buf); - // this is posible because set_current copies the given pointer - free(new_buf); } initLinenoiseLine(current); refreshLine(current->prompt, current); @@ -1141,7 +1147,7 @@ static size_t new_line_numbers(size_t pos, struct current * current) static int next_allowed_x(size_t pos, int cols, int pchars) { - if (pos < cols - pchars) + if ((int)pos < cols - pchars) { return pos + pchars; } @@ -1292,7 +1298,7 @@ static void set_current(struct current *current, const char *str) static int has_room(struct current *current, int bytes) { - return current->len + bytes < current->bufmax - 1; + return current->len + (size_t)bytes < (size_t)current->bufmax - 1; } /** @@ -1303,7 +1309,7 @@ static int has_room(struct current *current, int bytes) */ static int remove_char(struct current *current, size_t pos) { - if (pos >= 0 && pos < current->chars) { + if (/*pos >= 0 &&*/ pos < (size_t)current->chars) { int p1, p2; int ret = 1; p1 = utf8_index(current->buf, pos); @@ -1312,7 +1318,7 @@ static int remove_char(struct current *current, size_t pos) #ifdef USE_TERMIOS /* optimise remove char in the case of removing the last char */ if (current->pos == pos + 1 && current->pos == current->chars) { - if (current->buf[pos] >= ' ' && utf8_strlen(current->prompt, strlen(current->prompt)) + utf8_strlen(current->buf, current->len) < current->cols - 1) { + if (current->buf[pos] >= ' ' && utf8_strlen(current->prompt, strlen(current->prompt)) + utf8_strlen(current->buf, current->len) < (size_t)current->cols - 1) { ret = 2; fd_printf(current->fd, "\b \b"); } @@ -1338,12 +1344,12 @@ static int remove_char(struct current *current, size_t pos) * Returns 1 if the line needs to be refreshed, 2 if not * and 0 if nothing was inserted (no room) */ -static int insert_char(struct current *current, size_t pos, int ch) +static int insert_char(struct current *current, int pos, int ch) { char buf[3] = {'\0','\0','\0'}; int n = utf8_getchars(buf, ch); - if (has_room(current, n) && pos >= 0 && pos <= current->chars) { + if ((size_t)has_room(current, n) && (pos >= 0) && (size_t)pos <= current->chars) { int p1, p2; int ret = 1; p1 = utf8_index(current->buf, pos); @@ -1366,7 +1372,7 @@ static int insert_char(struct current *current, size_t pos, int ch) current->len += n; current->chars++; - if (current->pos >= pos) { + if ((int)current->pos >= pos) { current->pos++; } return ret; @@ -1379,7 +1385,7 @@ static int insert_char(struct current *current, size_t pos, int ch) * * This replaces any existing characters in the cut buffer. */ -static void capture_chars(struct current *current, size_t pos, size_t n) +static void capture_chars(struct current *current, int pos, size_t n) { if (pos >= 0 && (pos + n - 1) < current->chars) { int p1 = utf8_index(current->buf, pos); @@ -1455,22 +1461,17 @@ static void freeCompletions(linenoiseCompletions *lc) { static int completeLine(struct current *current) { linenoiseCompletions lc = { 0, NULL, 0 }; int c = 0; - char * buf = current->buf + current->pos; - - while((buf > current->buf)) { - if((*buf == ';') || (*buf == ' ')) { - buf++; - break; - } - buf--; + char * buf = NULL; + last_token(current, &buf, NULL); + if(buf && (buf != current->buf)) { + buf++; } - completionCallback(buf,&lc); if (lc.len == 0) { beep(); } else { size_t stop = 0, i = 0; - if(lc.len>1 && lc.multiLine) { + if(lc.len>=1 && lc.multiLine) { refreshPage(&lc, current); freeCompletions(&lc); return c; @@ -1904,7 +1905,7 @@ int linenoiseColumns(void) } char *linenoise(const char *prompt) { - size_t count; + int count; struct current current; char buf[LINENOISE_MAX_LINE]; From 38e4853a999f4a5a786238d37322d64f622b579f Mon Sep 17 00:00:00 2001 From: gschwab Date: Mon, 23 Jun 2014 14:12:19 +0200 Subject: [PATCH 03/13] deleted tmp file --- .../Users/General-Graphs/Management.mdpp | 1 - .../Users/General-Graphs/Management.orig.mdpp | 141 ------------------ 2 files changed, 142 deletions(-) delete mode 100644 Documentation/Books/Users/General-Graphs/Management.orig.mdpp diff --git a/Documentation/Books/Users/General-Graphs/Management.mdpp b/Documentation/Books/Users/General-Graphs/Management.mdpp index 6fb800f303..260eea8912 100644 --- a/Documentation/Books/Users/General-Graphs/Management.mdpp +++ b/Documentation/Books/Users/General-Graphs/Management.mdpp @@ -50,7 +50,6 @@ After having introduced edge definitions and orphan collections a graph can be c @startDocuBlock JSF_general_graph_create - !SUBSUBSECTION Complete Example to create a graph Example Call: diff --git a/Documentation/Books/Users/General-Graphs/Management.orig.mdpp b/Documentation/Books/Users/General-Graphs/Management.orig.mdpp deleted file mode 100644 index f41a68fe77..0000000000 --- a/Documentation/Books/Users/General-Graphs/Management.orig.mdpp +++ /dev/null @@ -1,141 +0,0 @@ -!CHAPTER Graph Management - -In order to create a graph the philosophy of handling the graph content has to introduced. -A graph contains a set of edge definitions each referring to one edge collection and -defining constraints on the vertex collections used as start and end points of the edges. -Furthermore a graph can contain an arbitrary amount of vertex collections, called orphan collections, that are not used in any edge definition but should be managed by the graph. -In order to create a graph the functionality to create edge definitions has to be introduced first: - -!SECTION Edge Definitions - -The edge definitions for a graph is an Array containing arbitrary many directed and/or undirected relations as defined below. - -!SUBSECTION Initialize the list - - - -!SUBSECTION Extend the list - - - -!SUBSUBSECTION Undirected Relation - - - -!SUBSUBSECTION Directed Relation - - - -!SUBSECTION Orphan Collections - -Each graph has an orphan collection. It consists of arbitrary many vertex collection (type *document*), that are not -used in an edge definition of the graph. If the graph is extended with an edge definition using one of the orphans, -it will be removed from the orphan collection automatically. - -!SUBSUBSECTION Add - - - -!SUBSUBSECTION Read - - - -!SUBSUBSECTION Remove - - - -!SECTION Create a graph - -After having introduced edge definitions and orphan collections a graph can be created. - - - - -!SUBSUBSECTION Complete Example to create a graph - -Example Call: - -```js -> var graph = require("org/arangodb/graph"); -> var edgeDefinitions = graph._edgeDefinitions(); -> graph._extendEdgeDefinitions(edgeDefinitions, graph._undirectedRelation("friend_of", ["Customer"])); -> graph._extendEdgeDefinitions(edgeDefinitions, graph._directedRelation("has_bought", ["Customer", "Company"], ["Groceries", "Electronics"])); -> graph._create("myStore", edgeDefinitions); -{ - _id: "_graphs/123", - _rev: "123", - _key: "123" -} -``` - -alternative call: - -```js -> var graph = require("org/arangodb/graph"); -> var edgeDefinitions = graph._edgeDefinitions(graph._undirectedRelation("friend_of", ["Customer"]), graph._directedRelation("has_bought", ["Customer", "Company"], ["Groceries", "Electronics"])); -> graph._create("myStore", edgeDefinitions); -{ - _id: "_graphs/123", - _rev: "123", - _key: "123" -}; -``` - -!SUBSECTION List available graphs - - -`general-graph._list()` *List all graphs.* -
-
- - - -
-@EXAMPLES -
- - - -!SUBSECTION Load a graph - - - -!SUBSECTION Remove a graph - - - -!SECTION Edge - -!SUBSECTION Save - - - -!SUBSECTION Replace - - - -!SUBSECTION Update - - - -!SUBSECTION Remove - - - -!SECTION Edge - -!SUBSECTION Save - - - -!SUBSECTION Replace - - - -!SUBSECTION Update - - - -!SUBSECTION Remove - - From f62ce6cd7fc6d486a419262d29455178b61364ce Mon Sep 17 00:00:00 2001 From: Lucas Dohmen Date: Mon, 23 Jun 2014 14:14:22 +0200 Subject: [PATCH 04/13] Foxx: Fix error for `body()` when body is empty --- js/server/modules/org/arangodb/foxx/base_middleware.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/js/server/modules/org/arangodb/foxx/base_middleware.js b/js/server/modules/org/arangodb/foxx/base_middleware.js index cfa3549083..f282db0090 100644 --- a/js/server/modules/org/arangodb/foxx/base_middleware.js +++ b/js/server/modules/org/arangodb/foxx/base_middleware.js @@ -61,7 +61,8 @@ BaseMiddleware = function () { //////////////////////////////////////////////////////////////////////////////// body: function () { - return JSON.parse(this.requestBody); + var requestBody = this.requestBody || '{}'; + return JSON.parse(requestBody); }, //////////////////////////////////////////////////////////////////////////////// From b31c2330d43ff4bccf145a8b9741d392a8b0e46c Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Mon, 23 Jun 2014 14:24:12 +0200 Subject: [PATCH 05/13] exclude collections --- .../RestHandler/RestReplicationHandler.cpp | 126 +++--- arangod/VocBase/collection.h | 6 + arangod/VocBase/replication-common.cpp | 6 +- arangod/VocBase/replication-dump.cpp | 366 ++++++++++-------- arangod/VocBase/replication-dump.h | 62 +-- arangod/VocBase/vocbase.cpp | 4 +- arangod/Wal/Marker.h | 6 +- 7 files changed, 303 insertions(+), 273 deletions(-) diff --git a/arangod/RestHandler/RestReplicationHandler.cpp b/arangod/RestHandler/RestReplicationHandler.cpp index a79d0a5ca3..b743bbabb7 100644 --- a/arangod/RestHandler/RestReplicationHandler.cpp +++ b/arangod/RestHandler/RestReplicationHandler.cpp @@ -1331,64 +1331,68 @@ void RestReplicationHandler::handleCommandLoggerFollow () { return; } - uint64_t const chunkSize = determineChunkSize(); + int res = TRI_ERROR_NO_ERROR; - // initialise the dump container - TRI_replication_dump_t dump; - if (TRI_InitDumpReplication(&dump, _vocbase, (size_t) defaultChunkSize) != TRI_ERROR_NO_ERROR) { - generateError(HttpResponse::SERVER_ERROR, TRI_ERROR_OUT_OF_MEMORY); - return; + try { + // initialise the dump container + TRI_replication_dump_t dump(_vocbase, (size_t) determineChunkSize()); + + // and dump + res = TRI_DumpLogReplication(_vocbase, &dump, tickStart, tickEnd); + + if (res == TRI_ERROR_NO_ERROR) { + bool const checkMore = (dump._lastFoundTick > 0 && dump._lastFoundTick != state.lastDataTick); + + // generate the result + size_t const length = TRI_LengthStringBuffer(dump._buffer); + + if (length == 0) { + _response = createResponse(HttpResponse::NO_CONTENT); + } + else { + _response = createResponse(HttpResponse::OK); + } + + _response->setContentType("application/x-arango-dump; charset=utf-8"); + + // set headers + _response->setHeader(TRI_REPLICATION_HEADER_CHECKMORE, + strlen(TRI_REPLICATION_HEADER_CHECKMORE), + checkMore ? "true" : "false"); + + _response->setHeader(TRI_REPLICATION_HEADER_LASTINCLUDED, + strlen(TRI_REPLICATION_HEADER_LASTINCLUDED), + StringUtils::itoa(dump._lastFoundTick)); + + _response->setHeader(TRI_REPLICATION_HEADER_LASTTICK, + strlen(TRI_REPLICATION_HEADER_LASTTICK), + StringUtils::itoa(state.lastTick)); + + _response->setHeader(TRI_REPLICATION_HEADER_ACTIVE, + strlen(TRI_REPLICATION_HEADER_ACTIVE), + "true"); + + if (length > 0) { + // transfer ownership of the buffer contents + _response->body().set(dump._buffer); + + // to avoid double freeing + TRI_StealStringBuffer(dump._buffer); + } + + insertClient(dump._lastFoundTick); + } + } + catch (triagens::arango::Exception const& ex) { + res = ex.code(); + } + catch (...) { + res = TRI_ERROR_INTERNAL; } - int res = TRI_DumpLogReplication(_vocbase, &dump, tickStart, tickEnd, chunkSize); - - if (res == TRI_ERROR_NO_ERROR) { - bool const checkMore = (dump._lastFoundTick > 0 && dump._lastFoundTick != state.lastDataTick); - - // generate the result - size_t const length = TRI_LengthStringBuffer(dump._buffer); - - if (length == 0) { - _response = createResponse(HttpResponse::NO_CONTENT); - } - else { - _response = createResponse(HttpResponse::OK); - } - - _response->setContentType("application/x-arango-dump; charset=utf-8"); - - // set headers - _response->setHeader(TRI_REPLICATION_HEADER_CHECKMORE, - strlen(TRI_REPLICATION_HEADER_CHECKMORE), - checkMore ? "true" : "false"); - - _response->setHeader(TRI_REPLICATION_HEADER_LASTINCLUDED, - strlen(TRI_REPLICATION_HEADER_LASTINCLUDED), - StringUtils::itoa(dump._lastFoundTick)); - - _response->setHeader(TRI_REPLICATION_HEADER_LASTTICK, - strlen(TRI_REPLICATION_HEADER_LASTTICK), - StringUtils::itoa(state.lastTick)); - - _response->setHeader(TRI_REPLICATION_HEADER_ACTIVE, - strlen(TRI_REPLICATION_HEADER_ACTIVE), - "true"); - - if (length > 0) { - // transfer ownership of the buffer contents - _response->body().set(dump._buffer); - - // avoid double freeing - TRI_StealStringBuffer(dump._buffer); - } - - insertClient(dump._lastFoundTick); - } - else { + if (res != TRI_ERROR_NO_ERROR) { generateError(HttpResponse::SERVER_ERROR, res); } - - TRI_DestroyDumpReplication(&dump); } //////////////////////////////////////////////////////////////////////////////// @@ -3177,8 +3181,6 @@ void RestReplicationHandler::handleCommandDump () { translateCollectionIds = StringUtils::boolean(value); } - uint64_t const chunkSize = determineChunkSize(); - TRI_vocbase_col_t* c = TRI_LookupCollectionByNameVocBase(_vocbase, collection); if (c == nullptr) { @@ -3204,19 +3206,13 @@ void RestReplicationHandler::handleCommandDump () { TRI_ASSERT(col != nullptr); // initialise the dump container - TRI_replication_dump_t dump; - res = TRI_InitDumpReplication(&dump, _vocbase, (size_t) defaultChunkSize); + TRI_replication_dump_t dump(_vocbase, (size_t) determineChunkSize()); + + res = TRI_DumpCollectionReplication(&dump, col, tickStart, tickEnd, withTicks, translateCollectionIds); if (res != TRI_ERROR_NO_ERROR) { THROW_ARANGO_EXCEPTION(res); } - - res = TRI_DumpCollectionReplication(&dump, col, tickStart, tickEnd, chunkSize, withTicks, translateCollectionIds); - - if (res != TRI_ERROR_NO_ERROR) { - TRI_DestroyDumpReplication(&dump); - THROW_ARANGO_EXCEPTION(res); - } // generate the result size_t const length = TRI_LengthStringBuffer(dump._buffer); @@ -3244,8 +3240,6 @@ void RestReplicationHandler::handleCommandDump () { // avoid double freeing TRI_StealStringBuffer(dump._buffer); - - TRI_DestroyDumpReplication(&dump); } catch (triagens::arango::Exception const& ex) { res = ex.code(); diff --git a/arangod/VocBase/collection.h b/arangod/VocBase/collection.h index 9c15143e77..57e3f2b7b9 100644 --- a/arangod/VocBase/collection.h +++ b/arangod/VocBase/collection.h @@ -100,6 +100,12 @@ struct TRI_vocbase_col_s; #define TRI_COL_NAME_TRANSACTION "_trx" +//////////////////////////////////////////////////////////////////////////////// +/// @brief predefined system collection name for replication +//////////////////////////////////////////////////////////////////////////////// + +#define TRI_COL_NAME_REPLICATION "_replication" + //////////////////////////////////////////////////////////////////////////////// /// @brief predefined collection name for users //////////////////////////////////////////////////////////////////////////////// diff --git a/arangod/VocBase/replication-common.cpp b/arangod/VocBase/replication-common.cpp index 4157b8e288..4af2b62125 100644 --- a/arangod/VocBase/replication-common.cpp +++ b/arangod/VocBase/replication-common.cpp @@ -62,7 +62,7 @@ void TRI_GetTimeStampReplication (char* dst, //////////////////////////////////////////////////////////////////////////////// bool TRI_ExcludeCollectionReplication (const char* name) { - if (name == NULL) { + if (name == nullptr) { // name invalid return true; } @@ -72,7 +72,9 @@ bool TRI_ExcludeCollectionReplication (const char* name) { return false; } - if (TRI_EqualString(name, TRI_COL_NAME_USERS) || + if (TRI_EqualString(name, TRI_COL_NAME_REPLICATION) || + TRI_EqualString(name, TRI_COL_NAME_TRANSACTION) || + TRI_EqualString(name, TRI_COL_NAME_USERS) || TRI_IsPrefixString(name, TRI_COL_NAME_STATISTICS) || TRI_EqualString(name, "_aal") || TRI_EqualString(name, "_fishbowl") || diff --git a/arangod/VocBase/replication-dump.cpp b/arangod/VocBase/replication-dump.cpp index 00acf05de8..bcce01c2a3 100644 --- a/arangod/VocBase/replication-dump.cpp +++ b/arangod/VocBase/replication-dump.cpp @@ -33,7 +33,6 @@ #include "BasicsC/files.h" #include "BasicsC/json.h" #include "BasicsC/logging.h" -#include "BasicsC/string-buffer.h" #include "BasicsC/tri-strings.h" #include "VocBase/collection.h" @@ -42,8 +41,6 @@ #include "VocBase/transaction.h" #include "VocBase/vocbase.h" #include "VocBase/voc-shaper.h" -#include "Utils/Exception.h" -#include "Utils/transactions.h" #include "Wal/Logfile.h" #include "Wal/LogfileManager.h" #include "Wal/Marker.h" @@ -108,86 +105,36 @@ typedef struct df_entry_s { } df_entry_t; -//////////////////////////////////////////////////////////////////////////////// -/// @brief container for a resolved collection name (cid => name) -//////////////////////////////////////////////////////////////////////////////// - -typedef struct resolved_name_s { - TRI_voc_cid_t _cid; - char* _name; -} -resolved_name_t; - // ----------------------------------------------------------------------------- // --SECTION-- private functions // ----------------------------------------------------------------------------- //////////////////////////////////////////////////////////////////////////////// -/// @brief hashes a collection id +/// @brief translate a collection id into a collection name //////////////////////////////////////////////////////////////////////////////// + +char const* NameFromCid (TRI_replication_dump_t* dump, + TRI_voc_cid_t cid) { + auto it = dump->_collectionNames.find(cid); -static uint64_t HashKeyCid (TRI_associative_pointer_t* array, - void const* key) { - TRI_voc_cid_t const* k = static_cast(key); + if (it != dump->_collectionNames.end()) { + // collection name is in cache already + return (*it).second.c_str(); + } + + // collection name not in cache yet + char* name = TRI_GetCollectionNameByIdVocBase(dump->_vocbase, cid); - return *k; -} + if (name != nullptr) { + // insert into cache + dump->_collectionNames.insert(it, std::make_pair(cid, std::string(name))); + TRI_FreeString(TRI_UNKNOWN_MEM_ZONE, name); -//////////////////////////////////////////////////////////////////////////////// -/// @brief hashes a collection name -//////////////////////////////////////////////////////////////////////////////// - -static uint64_t HashElementCid (TRI_associative_pointer_t* array, - void const* element) { - resolved_name_t const* e = static_cast(element); - - return e->_cid; -} - -//////////////////////////////////////////////////////////////////////////////// -/// @brief compares a collection -//////////////////////////////////////////////////////////////////////////////// - -static bool IsEqualKeyElementCid (TRI_associative_pointer_t* array, - void const* key, - void const* element) { - TRI_voc_cid_t const* k = static_cast(key); - resolved_name_t const* e = static_cast(element); - - return *k == e->_cid; -} - -//////////////////////////////////////////////////////////////////////////////// -/// @brief lookup a collection name -//////////////////////////////////////////////////////////////////////////////// - -static bool LookupCollectionName (TRI_replication_dump_t* dump, - TRI_voc_cid_t cid, - char** result) { - - TRI_ASSERT(cid > 0); - - resolved_name_t* found = static_cast(TRI_LookupByKeyAssociativePointer(&dump->_collectionNames, &cid)); - - if (found == NULL) { - found = static_cast(TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(resolved_name_t), false)); - - if (found == NULL) { - // out of memory; - return false; - } - - found->_cid = cid; - // name can be NULL if collection is not found. - // but we will still cache a NULL result! - found->_name = TRI_GetCollectionNameByIdVocBase(dump->_vocbase, cid); - - TRI_InsertKeyAssociativePointer(&dump->_collectionNames, &found->_cid, found, false); + // and look it up again + return NameFromCid(dump, cid); } - *result = found->_name; - - return true; + return nullptr; } //////////////////////////////////////////////////////////////////////////////// @@ -199,13 +146,9 @@ static bool AppendCollection (TRI_replication_dump_t* dump, bool translateCollectionIds) { if (translateCollectionIds) { if (cid > 0) { - char* name; + char const* name = NameFromCid(dump, cid); - if (! LookupCollectionName(dump, cid, &name)) { - return false; - } - - if (name != NULL) { + if (name != nullptr) { APPEND_STRING(dump->_buffer, name); return true; } @@ -306,6 +249,21 @@ static TRI_vector_t GetRangeDatafiles (TRI_document_collection_t* document, return datafiles; } +//////////////////////////////////////////////////////////////////////////////// +/// @brief append database id plus collection id +//////////////////////////////////////////////////////////////////////////////// + +static bool AppendContext (TRI_replication_dump_t* dump, + TRI_voc_tick_t databaseId, + TRI_voc_cid_t collectionId) { + APPEND_STRING(dump->_buffer, "\"database\":\""); + APPEND_UINT64(dump->_buffer, databaseId); + APPEND_STRING(dump->_buffer, "\",\"cid\":\""); + APPEND_UINT64(dump->_buffer, collectionId); + APPEND_STRING(dump->_buffer, "\","); + return true; +} + //////////////////////////////////////////////////////////////////////////////// /// @brief stringify a raw marker from a datafile for a collection dump //////////////////////////////////////////////////////////////////////////////// @@ -487,12 +445,12 @@ static bool StringifyMarkerDump (TRI_replication_dump_t* dump, static bool StringifyWalMarkerDocument (TRI_replication_dump_t* dump, TRI_df_marker_t const* marker) { auto m = reinterpret_cast(marker); + + if (! AppendContext(dump, m->_databaseId, m->_collectionId)) { + return false; + } - APPEND_STRING(dump->_buffer, "\"database\":\""); - APPEND_UINT64(dump->_buffer, m->_databaseId); - APPEND_STRING(dump->_buffer, "\",\"cid\":\""); - APPEND_UINT64(dump->_buffer, m->_collectionId); - APPEND_STRING(dump->_buffer, "\",\"tid\":\""); + APPEND_STRING(dump->_buffer, "\"tid\":\""); APPEND_UINT64(dump->_buffer, m->_transactionId); APPEND_STRING(dump->_buffer, "\",\"key\":\""); APPEND_STRING(dump->_buffer, (char const*) m + m->_offsetKey); @@ -529,12 +487,12 @@ static bool StringifyWalMarkerDocument (TRI_replication_dump_t* dump, static bool StringifyWalMarkerEdge (TRI_replication_dump_t* dump, TRI_df_marker_t const* marker) { auto m = reinterpret_cast(marker); + + if (! AppendContext(dump, m->_databaseId, m->_collectionId)) { + return false; + } - APPEND_STRING(dump->_buffer, "\"database\":\""); - APPEND_UINT64(dump->_buffer, m->_databaseId); - APPEND_STRING(dump->_buffer, "\",\"cid\":\""); - APPEND_UINT64(dump->_buffer, m->_collectionId); - APPEND_STRING(dump->_buffer, "\",\"tid\":\""); + APPEND_STRING(dump->_buffer, "\"tid\":\""); APPEND_UINT64(dump->_buffer, m->_transactionId); APPEND_STRING(dump->_buffer, "\",\"key\":\""); APPEND_STRING(dump->_buffer, (char const*) m + m->_offsetKey); @@ -550,13 +508,13 @@ static bool StringifyWalMarkerEdge (TRI_replication_dump_t* dump, // from APPEND_STRING(dump->_buffer, ",\"" TRI_VOC_ATTRIBUTE_FROM "\":\""); - APPEND_UINT64(dump->_buffer, m->_fromCid); + APPEND_UINT64(dump->_buffer, (uint64_t) m->_fromCid); APPEND_STRING(dump->_buffer, "\\/"); APPEND_STRING(dump->_buffer, (char const*) m + m->_offsetFromKey); // to APPEND_STRING(dump->_buffer, "\",\"" TRI_VOC_ATTRIBUTE_TO "\":\""); - APPEND_UINT64(dump->_buffer, m->_toCid); + APPEND_UINT64(dump->_buffer, (uint64_t) m->_toCid); APPEND_STRING(dump->_buffer, "\\/"); APPEND_STRING(dump->_buffer, (char const*) m + m->_offsetFromKey); APPEND_STRING(dump->_buffer, "\""); @@ -584,11 +542,10 @@ static bool StringifyWalMarkerRemove (TRI_replication_dump_t* dump, TRI_df_marker_t const* marker) { auto m = reinterpret_cast(marker); - APPEND_STRING(dump->_buffer, "\"database\":\""); - APPEND_UINT64(dump->_buffer, m->_databaseId); - APPEND_STRING(dump->_buffer, "\",\"cid\":\""); - APPEND_UINT64(dump->_buffer, m->_collectionId); - APPEND_STRING(dump->_buffer, "\",\"tid\":\""); + if (! AppendContext(dump, m->_databaseId, m->_collectionId)) { + return false; + } + APPEND_STRING(dump->_buffer, "\"tid\":\""); APPEND_UINT64(dump->_buffer, m->_transactionId); APPEND_STRING(dump->_buffer, "\",\"key\":\""); APPEND_STRING(dump->_buffer, (char const*) m + sizeof(triagens::wal::remove_marker_t)); @@ -845,10 +802,108 @@ static bool StringifyWalMarker (TRI_replication_dump_t* dump, } //////////////////////////////////////////////////////////////////////////////// -/// @brief whether or not a marker is replicated +/// @brief helper function to extract a database id from a marker +//////////////////////////////////////////////////////////////////////////////// + +template +static TRI_voc_tick_t GetDatabaseId (TRI_df_marker_t const* marker) { + T const* m = reinterpret_cast(marker); + return m->_databaseId; +} + +//////////////////////////////////////////////////////////////////////////////// +/// @brief get the database id from a marker +//////////////////////////////////////////////////////////////////////////////// + +static TRI_voc_tick_t GetDatabaseFromWalMarker (TRI_df_marker_t const* marker) { + switch (marker->_type) { + case TRI_WAL_MARKER_ATTRIBUTE: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_SHAPE: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_DOCUMENT: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_EDGE: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_REMOVE: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_BEGIN_TRANSACTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_COMMIT_TRANSACTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_ABORT_TRANSACTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_CREATE_COLLECTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_DROP_COLLECTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_RENAME_COLLECTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_CHANGE_COLLECTION: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_CREATE_INDEX: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_DROP_INDEX: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_CREATE_DATABASE: + return GetDatabaseId(marker); + case TRI_WAL_MARKER_DROP_DATABASE: + return GetDatabaseId(marker); + default: { + return 0; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// +/// @brief helper function to extract a collection id from a marker +//////////////////////////////////////////////////////////////////////////////// + +template +static TRI_voc_tick_t GetCollectionId (TRI_df_marker_t const* marker) { + T const* m = reinterpret_cast(marker); + return m->_collectionId; +} + +//////////////////////////////////////////////////////////////////////////////// +/// @brief get the collection id from a marker +//////////////////////////////////////////////////////////////////////////////// + +static TRI_voc_tick_t GetCollectionFromWalMarker (TRI_df_marker_t const* marker) { + switch (marker->_type) { + case TRI_WAL_MARKER_ATTRIBUTE: + return GetCollectionId(marker); + case TRI_WAL_MARKER_SHAPE: + return GetCollectionId(marker); + case TRI_WAL_MARKER_DOCUMENT: + return GetCollectionId(marker); + case TRI_WAL_MARKER_EDGE: + return GetCollectionId(marker); + case TRI_WAL_MARKER_REMOVE: + return GetCollectionId(marker); + case TRI_WAL_MARKER_CREATE_COLLECTION: + return GetCollectionId(marker); + case TRI_WAL_MARKER_DROP_COLLECTION: + return GetCollectionId(marker); + case TRI_WAL_MARKER_RENAME_COLLECTION: + return GetCollectionId(marker); + case TRI_WAL_MARKER_CHANGE_COLLECTION: + return GetCollectionId(marker); + case TRI_WAL_MARKER_CREATE_INDEX: + return GetCollectionId(marker); + case TRI_WAL_MARKER_DROP_INDEX: + return GetCollectionId(marker); + default: { + return 0; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// +/// @brief whether or not a marker should be replicated //////////////////////////////////////////////////////////////////////////////// -static inline bool MustReplicateMarker (TRI_df_marker_t const* marker) { +static inline bool MustReplicateWalMarkerType (TRI_df_marker_t const* marker) { return (marker->_type == TRI_WAL_MARKER_DOCUMENT || marker->_type == TRI_WAL_MARKER_EDGE || marker->_type == TRI_WAL_MARKER_REMOVE || @@ -863,6 +918,38 @@ static inline bool MustReplicateMarker (TRI_df_marker_t const* marker) { marker->_type == TRI_WAL_MARKER_DROP_INDEX); } +//////////////////////////////////////////////////////////////////////////////// +/// @brief whether or not a marker is replicated +//////////////////////////////////////////////////////////////////////////////// + +static bool MustReplicateWalMarker (TRI_replication_dump_t* dump, + TRI_df_marker_t const* marker) { + // first check the marker type + if (! MustReplicateWalMarkerType(marker)) { + return false; + } + + // then check if the marker belongs to the "correct" database + if (dump->_vocbase->_id != GetDatabaseFromWalMarker(marker)) { + return false; + } + + // finally check if the marker is for a collection that we want to ignore + TRI_voc_cid_t cid = GetCollectionFromWalMarker(marker); + if (cid != 0) { + auto it = dump->_collectionNames.find(cid); + + if (it != dump->_collectionNames.end()) { + char const* name = NameFromCid(dump, cid); + if (TRI_ExcludeCollectionReplication(name)) { + return false; + } + } + } + + return true; +} + //////////////////////////////////////////////////////////////////////////////// /// @brief dump data from a collection //////////////////////////////////////////////////////////////////////////////// @@ -871,7 +958,6 @@ static int DumpCollection (TRI_replication_dump_t* dump, TRI_document_collection_t* document, TRI_voc_tick_t dataMin, TRI_voc_tick_t dataMax, - uint64_t chunkSize, bool withTicks, bool translateCollectionIds) { TRI_vector_t datafiles; @@ -892,11 +978,10 @@ static int DumpCollection (TRI_replication_dump_t* dump, // until a certain tick. triagens::arango::TransactionBase trx(true); - LOG_TRACE("dumping collection %llu, tick range %llu - %llu, chunk size %llu", + LOG_TRACE("dumping collection %llu, tick range %llu - %llu", (unsigned long long) document->_info._cid, (unsigned long long) dataMin, - (unsigned long long) dataMax, - (unsigned long long) chunkSize); + (unsigned long long) dataMax); buffer = dump->_buffer; datafiles = GetRangeDatafiles(document, dataMin, dataMax); @@ -1034,7 +1119,7 @@ static int DumpCollection (TRI_replication_dump_t* dump, goto NEXT_DF; } - if ((uint64_t) TRI_LengthStringBuffer(buffer) > chunkSize) { + if ((uint64_t) TRI_LengthStringBuffer(buffer) > dump->_chunkSize) { // abort the iteration bufferFull = true; @@ -1085,7 +1170,6 @@ int TRI_DumpCollectionReplication (TRI_replication_dump_t* dump, TRI_vocbase_col_t* col, TRI_voc_tick_t dataMin, TRI_voc_tick_t dataMax, - uint64_t chunkSize, bool withTicks, bool translateCollectionIds) { TRI_ASSERT(col != nullptr); @@ -1103,7 +1187,7 @@ int TRI_DumpCollectionReplication (TRI_replication_dump_t* dump, // block compaction TRI_ReadLockReadWriteLock(&document->_compactionLock); - int res = DumpCollection(dump, document, dataMin, dataMax, chunkSize, withTicks, translateCollectionIds); + int res = DumpCollection(dump, document, dataMin, dataMax, withTicks, translateCollectionIds); TRI_ReadUnlockReadWriteLock(&document->_compactionLock); @@ -1119,12 +1203,10 @@ int TRI_DumpCollectionReplication (TRI_replication_dump_t* dump, int TRI_DumpLogReplication (TRI_vocbase_t* vocbase, TRI_replication_dump_t* dump, TRI_voc_tick_t tickMin, - TRI_voc_tick_t tickMax, - uint64_t chunkSize) { - LOG_TRACE("dumping log, tick range %llu - %llu, chunk size %llu", + TRI_voc_tick_t tickMax) { + LOG_TRACE("dumping log, tick range %llu - %llu", (unsigned long long) tickMin, - (unsigned long long) tickMax, - (unsigned long long) chunkSize); + (unsigned long long) tickMax); // ask the logfile manager which datafiles qualify std::vector logfiles = triagens::wal::LogfileManager::instance()->getLogfilesForTickRange(tickMin, tickMax); @@ -1172,8 +1254,8 @@ int TRI_DumpLogReplication (TRI_vocbase_t* vocbase, // marker too new break; } - - if (! MustReplicateMarker(marker)) { + + if (! MustReplicateWalMarker(dump, marker)) { // check if we can abort searching continue; } @@ -1185,7 +1267,7 @@ int TRI_DumpLogReplication (TRI_vocbase_t* vocbase, THROW_ARANGO_EXCEPTION(TRI_ERROR_INTERNAL); } - if ((uint64_t) TRI_LengthStringBuffer(dump->_buffer) > chunkSize) { + if ((uint64_t) TRI_LengthStringBuffer(dump->_buffer) > dump->_chunkSize) { // abort the iteration bufferFull = true; break; @@ -1227,66 +1309,6 @@ int TRI_DumpLogReplication (TRI_vocbase_t* vocbase, return res; } -//////////////////////////////////////////////////////////////////////////////// -/// @brief initialise a replication dump container -//////////////////////////////////////////////////////////////////////////////// - -int TRI_InitDumpReplication (TRI_replication_dump_t* dump, - TRI_vocbase_t* vocbase, - size_t bufferSize) { - int res; - - TRI_ASSERT(vocbase != nullptr); - - dump->_vocbase = vocbase; - dump->_lastFoundTick = 0; - dump->_lastSid = 0; - dump->_lastShape = nullptr; - dump->_failed = false; - dump->_bufferFull = false; - dump->_hasMore = false; - - dump->_buffer = TRI_CreateSizedStringBuffer(TRI_CORE_MEM_ZONE, bufferSize); - - if (dump->_buffer == nullptr) { - return TRI_ERROR_OUT_OF_MEMORY; - } - - res = TRI_InitAssociativePointer(&dump->_collectionNames, - TRI_UNKNOWN_MEM_ZONE, - HashKeyCid, - HashElementCid, - IsEqualKeyElementCid, - nullptr); - - if (res != TRI_ERROR_NO_ERROR) { - TRI_FreeStringBuffer(TRI_CORE_MEM_ZONE, dump->_buffer); - } - - return res; -} - -//////////////////////////////////////////////////////////////////////////////// -/// @brief destroy a replication dump container -//////////////////////////////////////////////////////////////////////////////// - -void TRI_DestroyDumpReplication (TRI_replication_dump_t* dump) { - for (size_t i = 0; i < dump->_collectionNames._nrAlloc; ++i) { - resolved_name_t* found = static_cast(dump->_collectionNames._table[i]); - - if (found != nullptr) { - if (found->_name != nullptr) { - // name can be NULL - TRI_Free(TRI_UNKNOWN_MEM_ZONE, found->_name); - } - TRI_Free(TRI_UNKNOWN_MEM_ZONE, found); - } - } - - TRI_DestroyAssociativePointer(&dump->_collectionNames); - TRI_FreeStringBuffer(TRI_CORE_MEM_ZONE, dump->_buffer); -} - // ----------------------------------------------------------------------------- // --SECTION-- END-OF-FILE // ----------------------------------------------------------------------------- diff --git a/arangod/VocBase/replication-dump.h b/arangod/VocBase/replication-dump.h index e68a262451..7aac53a863 100644 --- a/arangod/VocBase/replication-dump.h +++ b/arangod/VocBase/replication-dump.h @@ -33,18 +33,19 @@ #include "Basics/Common.h" #include "BasicsC/associative.h" +#include "BasicsC/string-buffer.h" #include "ShapedJson/shaped-json.h" +#include "Utils/Exception.h" #include "VocBase/replication-common.h" #include "VocBase/voc-types.h" +#include "VocBase/vocbase.h" // ----------------------------------------------------------------------------- // --SECTION-- forward declarations // ----------------------------------------------------------------------------- struct TRI_shape_s; -struct TRI_string_buffer_s; struct TRI_vocbase_col_s; -struct TRI_vocbase_s; // ----------------------------------------------------------------------------- // --SECTION-- REPLICATION LOGGER @@ -58,18 +59,43 @@ struct TRI_vocbase_s; /// @brief replication dump container //////////////////////////////////////////////////////////////////////////////// -typedef struct TRI_replication_dump_s { - struct TRI_string_buffer_s* _buffer; +struct TRI_replication_dump_t { + TRI_replication_dump_t (TRI_vocbase_t* vocbase, + size_t chunkSize) + : _vocbase(vocbase), + _buffer(nullptr), + _chunkSize(chunkSize), + _lastFoundTick(0), + _lastSid(0), + _lastShape(nullptr), + _collectionNames(), + _failed(false), + _bufferFull(false), + _hasMore(false) { + + _buffer = TRI_CreateSizedStringBuffer(TRI_UNKNOWN_MEM_ZONE, chunkSize); + + if (_buffer == nullptr) { + THROW_ARANGO_EXCEPTION(TRI_ERROR_OUT_OF_MEMORY); + } + } + + ~TRI_replication_dump_t () { + TRI_FreeStringBuffer(TRI_UNKNOWN_MEM_ZONE, _buffer); + _buffer = nullptr; + } + + TRI_vocbase_t* _vocbase; + TRI_string_buffer_t* _buffer; + size_t _chunkSize; TRI_voc_tick_t _lastFoundTick; TRI_shape_sid_t _lastSid; struct TRI_shape_s const* _lastShape; - struct TRI_vocbase_s* _vocbase; - TRI_associative_pointer_t _collectionNames; + std::unordered_map _collectionNames; bool _failed; - bool _hasMore; bool _bufferFull; -} -TRI_replication_dump_t; + bool _hasMore; +}; // ----------------------------------------------------------------------------- // --SECTION-- public functions @@ -83,7 +109,6 @@ int TRI_DumpCollectionReplication (TRI_replication_dump_t*, struct TRI_vocbase_col_s*, TRI_voc_tick_t, TRI_voc_tick_t, - uint64_t, bool, bool); @@ -94,22 +119,7 @@ int TRI_DumpCollectionReplication (TRI_replication_dump_t*, int TRI_DumpLogReplication (struct TRI_vocbase_s*, TRI_replication_dump_t*, TRI_voc_tick_t, - TRI_voc_tick_t, - uint64_t); - -//////////////////////////////////////////////////////////////////////////////// -/// @brief initialise a replication dump container -//////////////////////////////////////////////////////////////////////////////// - -int TRI_InitDumpReplication (TRI_replication_dump_t*, - struct TRI_vocbase_s*, - size_t); - -//////////////////////////////////////////////////////////////////////////////// -/// @brief destroy a replication dump container -//////////////////////////////////////////////////////////////////////////////// - -void TRI_DestroyDumpReplication (TRI_replication_dump_t*); + TRI_voc_tick_t); #endif diff --git a/arangod/VocBase/vocbase.cpp b/arangod/VocBase/vocbase.cpp index 689bec5612..c7279fad17 100644 --- a/arangod/VocBase/vocbase.cpp +++ b/arangod/VocBase/vocbase.cpp @@ -1839,8 +1839,8 @@ char* TRI_GetCollectionNameByIdVocBase (TRI_vocbase_t* vocbase, TRI_vocbase_col_t* found = static_cast(TRI_LookupByKeyAssociativePointer(&vocbase->_collectionsById, &id)); - if (found == NULL) { - name = NULL; + if (found == nullptr) { + name = nullptr; } else { name = TRI_DuplicateStringZ(TRI_UNKNOWN_MEM_ZONE, found->_name); diff --git a/arangod/Wal/Marker.h b/arangod/Wal/Marker.h index 67911c567d..fc023ecbaf 100644 --- a/arangod/Wal/Marker.h +++ b/arangod/Wal/Marker.h @@ -275,11 +275,7 @@ namespace triagens { inline char* end () const { return _buffer + _size; } - /* - inline char* payload () const { - return begin() + sizeof(TRI_df_marker_t); - } -*/ + //////////////////////////////////////////////////////////////////////////////// /// @brief return the size of the marker //////////////////////////////////////////////////////////////////////////////// From 82bd150a11f4d8c643e48cc8da2af30e8d17b4d2 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Mon, 23 Jun 2014 14:32:06 +0200 Subject: [PATCH 06/13] dump --- arangod/VocBase/replication-dump.cpp | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/arangod/VocBase/replication-dump.cpp b/arangod/VocBase/replication-dump.cpp index bcce01c2a3..84db1e383d 100644 --- a/arangod/VocBase/replication-dump.cpp +++ b/arangod/VocBase/replication-dump.cpp @@ -471,7 +471,7 @@ static bool StringifyWalMarkerDocument (TRI_replication_dump_t* dump, shaped._data.data = (char*) m + m->_offsetJson; triagens::basics::LegendReader lr((char const*) m + m->_offsetLegend); - if (! TRI_StringifyArrayShapedJson(&lr, dump->_buffer, &shaped, false)) { + if (! TRI_StringifyArrayShapedJson(&lr, dump->_buffer, &shaped, true)) { return false; } @@ -525,7 +525,7 @@ static bool StringifyWalMarkerEdge (TRI_replication_dump_t* dump, shaped._data.data = (char*) m + m->_offsetJson; triagens::basics::LegendReader lr((char const*) m + m->_offsetLegend); - if (! TRI_StringifyArrayShapedJson(&lr, dump->_buffer, &shaped, false)) { + if (! TRI_StringifyArrayShapedJson(&lr, dump->_buffer, &shaped, true)) { return false; } @@ -937,13 +937,10 @@ static bool MustReplicateWalMarker (TRI_replication_dump_t* dump, // finally check if the marker is for a collection that we want to ignore TRI_voc_cid_t cid = GetCollectionFromWalMarker(marker); if (cid != 0) { - auto it = dump->_collectionNames.find(cid); - - if (it != dump->_collectionNames.end()) { - char const* name = NameFromCid(dump, cid); - if (TRI_ExcludeCollectionReplication(name)) { - return false; - } + char const* name = NameFromCid(dump, cid); + + if (name != nullptr && TRI_ExcludeCollectionReplication(name)) { + return false; } } From da1cb2e62d27e18f6afdf6939c60daa7c2755b7c Mon Sep 17 00:00:00 2001 From: scottashton Date: Mon, 23 Jun 2014 14:42:38 +0200 Subject: [PATCH 07/13] minor docu fixes --- Documentation/Books/Users/General-Graphs/Functions.mdpp | 5 +++++ Documentation/Books/Users/General-Graphs/Management.mdpp | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Documentation/Books/Users/General-Graphs/Functions.mdpp b/Documentation/Books/Users/General-Graphs/Functions.mdpp index f9fa1b1da9..7977ea2fa0 100644 --- a/Documentation/Books/Users/General-Graphs/Functions.mdpp +++ b/Documentation/Books/Users/General-Graphs/Functions.mdpp @@ -1,6 +1,11 @@ !CHAPTER Graph Functions This chapter describes various functions on a graph. +A lot of these accept a vertex (or edge) example as parameter as defined in the next section. + +!SECTION Definition of examples + +@startDocuBlock JSF_general_graph_example_description !SECTION Get vertices from edges. diff --git a/Documentation/Books/Users/General-Graphs/Management.mdpp b/Documentation/Books/Users/General-Graphs/Management.mdpp index 6fb800f303..83b29cdb26 100644 --- a/Documentation/Books/Users/General-Graphs/Management.mdpp +++ b/Documentation/Books/Users/General-Graphs/Management.mdpp @@ -1,6 +1,6 @@ !CHAPTER Graph Management -In order to create a graph the philosophy of handling the graph content has to introduced. +In order to create a graph the philosophy of handling the graph content has to be introduced. A graph contains a set of edge definitions each referring to one edge collection and defining constraints on the vertex collections used as start and end points of the edges. Furthermore a graph can contain an arbitrary amount of vertex collections, called orphan collections, that are not used in any edge definition but should be managed by the graph. @@ -28,7 +28,7 @@ The edge definitions for a graph is an Array containing arbitrary many directed !SUBSECTION Orphan Collections -Each graph has an orphan collection. It consists of arbitrary many vertex collection (type *document*), that are not +Each graph can have an arbitrary amount of orphan collections. These are vertex collections (type *document*), that are not used in an edge definition of the graph. If the graph is extended with an edge definition using one of the orphans, it will be removed from the orphan collection automatically. From c67b1d9d5a215b54a789050992697b56d6d4db7b Mon Sep 17 00:00:00 2001 From: Lucas Dohmen Date: Mon, 23 Jun 2014 14:43:28 +0200 Subject: [PATCH 08/13] Foxx: Fix Model.fromClient --- js/server/modules/org/arangodb/foxx/model.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/js/server/modules/org/arangodb/foxx/model.js b/js/server/modules/org/arangodb/foxx/model.js index e1bfecacc9..d13d5135ed 100644 --- a/js/server/modules/org/arangodb/foxx/model.js +++ b/js/server/modules/org/arangodb/foxx/model.js @@ -108,9 +108,10 @@ Model = function (attributes) { Model.fromClient = function (attributes) { 'use strict'; - var instance = new this(); + var instance = new this(attributes); instance.attributes = whitelistProperties(attributes, this.attributes, false); instance.attributes = fillInDefaults(instance.attributes, this.attributes); + instance.whitelistedAttributes = whitelistProperties(instance.attributes, this.attributes); return instance; }; From b69b016f875c30c98956e882a1f1ccb8c1321279 Mon Sep 17 00:00:00 2001 From: gschwab Date: Mon, 23 Jun 2014 14:44:07 +0200 Subject: [PATCH 09/13] layout --- js/common/modules/org/arangodb/general-graph.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/common/modules/org/arangodb/general-graph.js b/js/common/modules/org/arangodb/general-graph.js index 76e892883c..3fb0fa4821 100644 --- a/js/common/modules/org/arangodb/general-graph.js +++ b/js/common/modules/org/arangodb/general-graph.js @@ -3078,7 +3078,7 @@ Graph.prototype._eccentricity = function(options) { /// *of the vertices defined by the examples.* /// /// The function accepts an id, an example, a list of examples or even an empty -/// example as parameter for vertexExample. +/// example as parameter for *vertexExample*. /// /// *Parameter* /// From 34a80c08b11206f5ce520276f1913ed1533976c0 Mon Sep 17 00:00:00 2001 From: Jan Steemann Date: Mon, 23 Jun 2014 14:53:23 +0200 Subject: [PATCH 10/13] fixed assertion --- arangod/Wal/Slot.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/arangod/Wal/Slot.h b/arangod/Wal/Slot.h index 92548ceaf0..78875b41cf 100644 --- a/arangod/Wal/Slot.h +++ b/arangod/Wal/Slot.h @@ -220,6 +220,10 @@ namespace triagens { void* _mem; +#ifdef TRI_PADDING_32 + char _padding[4]; +#endif + //////////////////////////////////////////////////////////////////////////////// /// @brief slot raw memory size //////////////////////////////////////////////////////////////////////////////// From 10a5b782fd1a54ef9c5386421b14092d6eb66b25 Mon Sep 17 00:00:00 2001 From: Thomas Schmidts Date: Mon, 23 Jun 2014 14:55:14 +0200 Subject: [PATCH 11/13] Some changes in the documentation code --- .../Users/Collections/CollectionMethods.mdpp | 85 ++------- .../Books/Users/Databases/WorkingWith.mdpp | 16 +- arangod/V8Server/v8-query.cpp | 56 +++--- arangod/V8Server/v8-vocbase.cpp | 79 ++++---- .../modules/org/arangodb/arango-collection.js | 176 +++++++++--------- 5 files changed, 181 insertions(+), 231 deletions(-) diff --git a/Documentation/Books/Users/Collections/CollectionMethods.mdpp b/Documentation/Books/Users/Collections/CollectionMethods.mdpp index 8820e3bad5..4345e04c82 100644 --- a/Documentation/Books/Users/Collections/CollectionMethods.mdpp +++ b/Documentation/Books/Users/Collections/CollectionMethods.mdpp @@ -1,33 +1,16 @@ !CHAPTER Collection Methods -@startDocuBlock collection_drop +@startDocuBlock collectionDrop -`collection.truncate()` - -Truncates a collection, removing all documents but keeping all its indexes. - -*Examples* - -Truncates a collection: - -``` -arango> col = db.examples; -[ArangoCollection 91022, "examples" (status new born)] -arango> col.save({ "Hello" : "World" }); -{ "_id" : "91022/1532814", "_rev" : 1532814 } -arango> col.count(); -1 -arango> col.truncate(); -arango> col.count(); -0 -``` + +@startDocuBlock collectionTruncate -@startDocuBlock collection_properties +@startDocuBlock collectionProperties -@startDocuBlock collection_figures +@startDocuBlock collectionFigures `collection.reserve( number)` @@ -38,62 +21,16 @@ The reserve hint can be sent before a mass insertion into the collection is star Not all indexes implement the reserve function at the moment. The indexes that don't implement it will simply ignore the request. returns the revision id of a collection -@startDocuBlock collection_revision +@startDocuBlock collectionRevision -`collection.checksum( withRevisions, withData)` - -The checksum operation calculates a CRC32 checksum of the keys contained in collection collection. - -If the optional argument withRevisions is set to true, then the revision ids of the documents are also included in the checksumming. - -If the optional argument withData is set to true, then the actual document data is also checksummed. Including the document data in checksumming will make the calculation slower, but is more accurate. + +@startDocuBlock collectionChecksum -@startDocuBlock collection_unload +@startDocuBlock collectionUnload -@startDocuBlock collection_rename +@startDocuBlock collectionRename -@startDocuBlock collection_rotate - - \ No newline at end of file +@startDocuBlock collectionRotate \ No newline at end of file diff --git a/Documentation/Books/Users/Databases/WorkingWith.mdpp b/Documentation/Books/Users/Databases/WorkingWith.mdpp index 45d8513c12..031b0c7e47 100644 --- a/Documentation/Books/Users/Databases/WorkingWith.mdpp +++ b/Documentation/Books/Users/Databases/WorkingWith.mdpp @@ -8,32 +8,32 @@ database only. -@startDocuBlock database_name +@startDocuBlock databaseName -@startDocuBlock database_id +@startDocuBlock databaseId -@startDocuBlock database_path +@startDocuBlock databasePath -@startDocuBlock database_isSystem +@startDocuBlock databaseIsSystem -@startDocuBlock database_useDatabase +@startDocuBlock databaseUseDatabase -@startDocuBlock database_listDatabase +@startDocuBlock databaseListDatabase -@startDocuBlock database_createDatabase +@startDocuBlock databaseCreateDatabase -@startDocuBlock database_dropDatabase \ No newline at end of file +@startDocuBlock databaseDropDatabase \ No newline at end of file diff --git a/arangod/V8Server/v8-query.cpp b/arangod/V8Server/v8-query.cpp index cb9a08548c..22a2b006b5 100644 --- a/arangod/V8Server/v8-query.cpp +++ b/arangod/V8Server/v8-query.cpp @@ -2216,7 +2216,7 @@ template static bool ChecksumCalculator (TRI_doc_mptr_t const* //////////////////////////////////////////////////////////////////////////////// /// @brief calculates a checksum for the data in a collection -/// @startDocuBlock collection_checksum +/// @startDocuBlock collectionChecksum /// `collection.checksum(withRevisions, withData)` /// /// The *checksum* operation calculates a CRC32 checksum of the keys @@ -2229,7 +2229,7 @@ template static bool ChecksumCalculator (TRI_doc_mptr_t const* /// actual document data is also checksummed. Including the document data in /// checksumming will make the calculation slower, but is more accurate. /// -/// Note: this method is not available in a cluster. +/// **Note**: this method is not available in a cluster. /// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// @@ -2320,21 +2320,22 @@ static v8::Handle JS_ChecksumCollection (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief selects all edges for a set of vertices +/// @startDocuBlock edgeCollectionEdges +/// `edge-collection.edges(vertex)` /// -/// @FUN{@FA{edge-collection}.edges(@FA{vertex})} +/// The *edges* operator finds all edges starting from (outbound) or ending +/// in (inbound) *vertex*. /// -/// The @FN{edges} operator finds all edges starting from (outbound) or ending -/// in (inbound) @FA{vertex}. +/// `edge-collection.edges(vertices)` /// -/// @FUN{@FA{edge-collection}.edges(@FA{vertices})} -/// -/// The @FN{edges} operator finds all edges starting from (outbound) or ending -/// in (inbound) a document from @FA{vertices}, which must a list of documents +/// The *edges* operator finds all edges starting from (outbound) or ending +/// in (inbound) a document from *vertices*, which must a list of documents /// or document handles. /// /// @EXAMPLES /// /// @verbinclude shell-edge-edges +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_EdgesQuery (v8::Arguments const& argv) { @@ -2343,7 +2344,7 @@ static v8::Handle JS_EdgesQuery (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief selects all inbound edges -/// +/// @startDocuBlock edgeCollectionInEdges /// @FUN{@FA{edge-collection}.inEdges(@FA{vertex})} /// /// The @FN{edges} operator finds all edges ending in (inbound) @FA{vertex}. @@ -2356,6 +2357,7 @@ static v8::Handle JS_EdgesQuery (v8::Arguments const& argv) { /// @EXAMPLES /// /// @verbinclude shell-edge-in-edges +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_InEdgesQuery (v8::Arguments const& argv) { @@ -2535,24 +2537,25 @@ static v8::Handle FulltextQuery (V8ReadTransaction& trx, //////////////////////////////////////////////////////////////////////////////// /// @brief queries the fulltext index +/// @startDocuBlock collectionFulltext +/// `collection.FULLTEXT(index-handle, query)` /// -/// @FUN{@FA{collection}.FULLTEXT(@FA{index-handle}, @FA{query})} +/// The *FULLTEXT* operator performs a fulltext search using the specified +/// index and the specified *query*. /// -/// The @FN{FULLTEXT} operator performs a fulltext search using the specified -/// index and the specified @FA{query}. -/// -/// @FA{query} must contain a comma-separated list of words to look for. +/// *query* must contain a comma-separated list of words to look for. /// Each word can optionally be prefixed with one of the following command /// literals: -/// - @LIT{prefix}: perform a prefix-search for the word following -/// - @LIT{substring}: perform substring-matching for the word following. This +/// - *prefix*: perform a prefix-search for the word following +/// - *substring*: perform substring-matching for the word following. This /// option is only supported for fulltext indexes that have been created with -/// the @LIT{indexSubstrings} option -/// - @LIT{complete}: only match the complete following word (this is the default) +/// the *indexSubstrings* option +/// - *complete*: only match the complete following word (this is the default) /// /// @EXAMPLES /// /// @verbinclude shell-simple-fulltext +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_FulltextQuery (v8::Arguments const& argv) { @@ -2779,20 +2782,21 @@ static v8::Handle JS_NearQuery (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief selects all outbound edges +/// @startDocuBlock edgeCollectionOutEdges +/// `edge-collection.outEdges(vertex)` /// -/// @FUN{@FA{edge-collection}.outEdges(@FA{vertex})} +/// The *edges* operator finds all edges starting from (outbound) +/// *vertices*. /// -/// The @FN{edges} operator finds all edges starting from (outbound) -/// @FA{vertices}. +/// `edge-collection.outEdges(vertices)` /// -/// @FUN{@FA{edge-collection}.outEdges(@FA{vertices})} -/// -/// The @FN{edges} operator finds all edges starting from (outbound) a document -/// from @FA{vertices}, which must a list of documents or document handles. +/// The *edges* operator finds all edges starting from (outbound) a document +/// from *vertices*, which must a list of documents or document handles. /// /// @EXAMPLES /// /// @verbinclude shell-edge-out-edges +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_OutEdgesQuery (v8::Arguments const& argv) { diff --git a/arangod/V8Server/v8-vocbase.cpp b/arangod/V8Server/v8-vocbase.cpp index a685aef759..3f176b3fe6 100644 --- a/arangod/V8Server/v8-vocbase.cpp +++ b/arangod/V8Server/v8-vocbase.cpp @@ -5494,7 +5494,7 @@ static v8::Handle JS_DatafileScanVocbaseCol (v8::Arguments const& arg //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that an index exists -/// @startDocuBlock col_ensureIndex +/// @startDocuBlock collectionEnsureIndex /// `collection.ensureIndex(index-description) /// /// Ensures that an index according to the *index-description* exists. A @@ -5553,7 +5553,7 @@ static v8::Handle JS_LookupIndexVocbaseCol (v8::Arguments const& argv //////////////////////////////////////////////////////////////////////////////// /// @brief counts the number of documents in a result set -/// @startDocuBlock col_count +/// @startDocuBlock colllectionCount /// `collection.count()` /// /// Returns the number of living documents in the collection. @@ -5617,7 +5617,7 @@ static v8::Handle JS_CountVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief returns information about the datafiles -/// @startDocuBlock col_datafiles +/// @startDocuBlock collectionDatafiles /// `collection.datafiles()` /// /// Returns information about the datafiles. The collection must be unloaded. @@ -5683,7 +5683,7 @@ static v8::Handle JS_DatafilesVocbaseCol (v8::Arguments const& argv) //////////////////////////////////////////////////////////////////////////////// /// @brief looks up a document -/// @startDocuBlock documents_collectionName +/// @startDocuBlock documentsCollectionName /// `collection.document(document)` /// /// The *document* method finds a document given its identifier or a document @@ -5771,7 +5771,7 @@ static v8::Handle DropVocbaseColCoordinator (TRI_vocbase_col_t* colle //////////////////////////////////////////////////////////////////////////////// /// @brief drops a collection -/// @startDocuBlock collection_drop +/// @startDocuBlock collectionDrop /// `collection.drop()` /// /// Drops a *collection* and all its indexes. @@ -5779,6 +5779,7 @@ static v8::Handle DropVocbaseColCoordinator (TRI_vocbase_col_t* colle /// @EXAMPLES /// /// @verbinclude shell_collection-drop +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// static v8::Handle JS_DropVocbaseCol (v8::Arguments const& argv) { @@ -5939,7 +5940,7 @@ static v8::Handle JS_DropIndexVocbaseCol (v8::Arguments const& argv) //////////////////////////////////////////////////////////////////////////////// /// @brief checks whether a document exists -/// @startDocuBlock documents_collectionExists +/// @startDocuBlock documentsCollectionExists /// `collection.exists(document)` /// /// The *exists* method determines whether a document exists given its @@ -6021,7 +6022,7 @@ static TRI_doc_collection_info_t* GetFigures (TRI_vocbase_col_t* collection) { //////////////////////////////////////////////////////////////////////////////// /// @brief returns the figures of a collection -/// @startDocuBlock collection_figures +/// @startDocuBlock collectionFigures /// `collection.figures()` /// /// Returns an object containing all collection figures. @@ -6206,7 +6207,7 @@ static v8::Handle GetIndexesCoordinator (TRI_vocbase_col_t const* col //////////////////////////////////////////////////////////////////////////////// /// @brief returns information about the indexes -/// @startDocuBlock col_getIndexes +/// @startDocuBlock collectionGetIndexes /// @FUN{getIndexes()} /// /// Returns a list of all indexes defined for the collection. @@ -6274,7 +6275,7 @@ static v8::Handle JS_GetIndexesVocbaseCol (v8::Arguments const& argv) //////////////////////////////////////////////////////////////////////////////// /// @brief loads a collection -/// @startDocuBlock collection_load +/// @startDocuBlock collectionLoad /// `collection.load()` /// /// Loads a collection into memory. @@ -6375,7 +6376,7 @@ static v8::Handle JS_PlanIdVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief gets or sets the properties of a collection -/// @startDocuBlock collection_properties +/// @startDocuBlock collectionProperties /// `collection.properties()` /// /// Returns an object containing all collection properties. @@ -6665,7 +6666,7 @@ static v8::Handle JS_PropertiesVocbaseCol (v8::Arguments const& argv) //////////////////////////////////////////////////////////////////////////////// /// @brief removes a document -/// @startDocuBlock documents_documentRemove +/// @startDocuBlock documentsDocumentRemove /// `collection.remove(document)` /// /// Removes a document. If there is revision mismatch, then an error is thrown. @@ -6739,7 +6740,7 @@ static v8::Handle JS_RemoveVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief renames a collection -/// @startDocuBlock collection_rename +/// @startDocuBlock collectionRename /// `collection.rename(new-name)` /// /// Renames a collection using the *new-name*. The *new-name* must not @@ -6811,7 +6812,7 @@ static v8::Handle JS_RenameVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief replaces a document -/// @startDocuBlock documents_collectionReplace +/// @startDocuBlock documentsCollectionReplace /// `collection.replace(document, data)` /// /// Replaces an existing *document*. The *document* must be a document in @@ -6911,7 +6912,7 @@ static int GetRevisionCoordinator (TRI_vocbase_col_t* collection, //////////////////////////////////////////////////////////////////////////////// /// @brief returns the revision id of a collection -/// @startDocuBlock collection_load +/// @startDocuBlock collectionLoad /// `collection.revision()` /// /// Returns the revision id of the collection @@ -6956,7 +6957,7 @@ static v8::Handle JS_RevisionVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief rotates the current journal of a collection -/// @startDocuBlock collection_rotate +/// @startDocuBlock collectionRotate /// `collection.rotate()` /// /// Rotates the current journal of a collection (i.e. makes the journal a @@ -7002,7 +7003,7 @@ static v8::Handle JS_RotateVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief updates a document -/// @startDocuBlock documents_collectionUpdate +/// @startDocuBlock documentsCollectionUpdate /// `collection.update(document, data, overwrite, keepNull, waitForSync)` or /// `collection.update(document, data, /// overwrite: true or false, keepNull: true or false, waitForSync: true or false)` @@ -7238,7 +7239,7 @@ static v8::Handle SaveEdgeColCoordinator (TRI_vocbase_col_t* collecti //////////////////////////////////////////////////////////////////////////////// /// @brief saves a new document -/// @startDocuBlock documents_collectionSave +/// @startDocuBlock documentsCollectionSave /// `collection.save(data)` /// /// Creates a new document in the *collection* from the given *data*. The @@ -7422,7 +7423,7 @@ static v8::Handle JS_TruncateDatafileVocbaseCol (v8::Arguments const& //////////////////////////////////////////////////////////////////////////////// /// @brief returns the type of a collection -/// @startDocuBlock col_type +/// @startDocuBlock collectionType /// `collection.type()` /// /// Returns the type of a collection. Possible values are: @@ -7463,7 +7464,7 @@ static v8::Handle JS_TypeVocbaseCol (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief unloads a collection -/// @startDocuBlock collection_unload +/// @startDocuBlock collectionUnload /// `collection.unload()` /// /// Starts unloading a collection from memory. Note that unloading is deferred @@ -7560,7 +7561,7 @@ static v8::Handle WrapVocBase (TRI_vocbase_t const* database) { //////////////////////////////////////////////////////////////////////////////// /// @brief selects a collection from the vocbase -/// @startDocuBlock collection_databaseCollectionName +/// @startDocuBlock collectionDatabaseCollectionName /// `db.collection-name` /// /// Returns the collection with the given *collection-name*. If no such @@ -7796,7 +7797,7 @@ static TRI_vocbase_col_t* GetCollectionFromArgument (TRI_vocbase_t* vocbase, //////////////////////////////////////////////////////////////////////////////// /// @brief returns a single collection or null -/// @startDocuBlock collection_databaseName +/// @startDocuBlock collectionDatabaseName /// `db._collection(collection-name)` /// /// Returns the collection with the given name or null if no such collection @@ -7872,7 +7873,7 @@ static v8::Handle JS_CollectionVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief returns all collections -/// @startDocuBlock collections_databaseName +/// @startDocuBlock collectionsDatabaseName /// `db._collections()` /// /// Returns all collections of the given database. @@ -8001,7 +8002,7 @@ static v8::Handle JS_CompletionsVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief creates a new document or edge collection -/// @startDocuBlock collection_databaseCreate +/// @startDocuBlock collectionDatabaseCreate /// `db._create(collection-name)` /// /// Creates a new document collection named *collection-name*. @@ -8104,7 +8105,7 @@ static v8::Handle JS_CreateVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief creates a new document collection -/// @startDocuBlock col_createDocumentaion +/// @startDocuBlock collectionCreateDocumentaion /// `db._createDocumentCollection(collection-name)` /// /// `db._createDocumentCollection(collection-name, properties)` @@ -8121,7 +8122,7 @@ static v8::Handle JS_CreateDocumentCollectionVocbase (v8::Arguments c //////////////////////////////////////////////////////////////////////////////// /// @brief creates a new edge collection -/// @startDocuBlock col_createEdgeCollection +/// @startDocuBlock collectionCreateEdgeCollection /// `db._createEdgeCollection(collection-name)` /// /// Creates a new edge collection named *collection-name*. If the @@ -8151,7 +8152,7 @@ static v8::Handle JS_CreateEdgeCollectionVocbase (v8::Arguments const //////////////////////////////////////////////////////////////////////////////// /// @brief removes a document -/// @startDocuBlock documents_collectionRemove +/// @startDocuBlock documentsCollectionRemove /// `db._remove(document)` /// /// Removes a document. If there is revision mismatch, then an error is thrown. @@ -8238,7 +8239,7 @@ static v8::Handle JS_RemoveVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief looks up a document and returns it -/// @startDocuBlock documents_documentName +/// @startDocuBlock documentsDocumentName /// `db._document(document)` /// /// This method finds a document given its identifier. It returns the document @@ -8273,7 +8274,7 @@ static v8::Handle JS_DocumentVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief checks whether a document exists -/// @startDocuBlock documents_documentExists +/// @startDocuBlock documentsDocumentExists /// `db._exists(document)` /// /// This method determines whether a document exists given its identifier. @@ -8298,7 +8299,7 @@ static v8::Handle JS_ExistsVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief replaces a document -/// @startDocuBlock documents_documentReplace +/// @startDocuBlock documentsDocumentReplace /// `db._replace(document, data)` /// /// The method returns a document with the attributes *_id*, *_rev* and @@ -8347,7 +8348,7 @@ static v8::Handle JS_ReplaceVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief update a document -/// @startDocuBlock documents_documentUpdate +/// @startDocuBlock documentsDocumentUpdate /// `db._update(document, data, overwrite, keepNull, waitForSync)` /// /// Updates an existing *document*. The *document* must be a document in @@ -8404,7 +8405,7 @@ static v8::Handle JS_UpdateVocbase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief return the server version string -/// @startDocuBlock +/// @startDocuBlock databaseVersion /// `db._version()` /// /// Returns the server version string. @@ -8419,7 +8420,7 @@ static v8::Handle JS_VersionServer (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief return the path to database files -/// @startDocuBlock database_path +/// @startDocuBlock databasePath /// `db._path()` /// /// Returns the filesystem path of the current database as a string. @@ -8440,7 +8441,7 @@ static v8::Handle JS_PathDatabase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief return the database id -/// @startDocuBlock database_id +/// @startDocuBlock databaseId /// `db._id()` /// /// Returns the id of the current database as a string. @@ -8461,7 +8462,7 @@ static v8::Handle JS_IdDatabase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief return the database name -/// @startDocuBlock database_name +/// @startDocuBlock databaseName /// `db._name()` /// /// Returns the name of the current database as a string. @@ -8482,7 +8483,7 @@ static v8::Handle JS_NameDatabase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief return the database type -/// @startDocuBlock database_isSystem +/// @startDocuBlock databaseIsSystem /// `db._isSystem()` /// /// Returns whether the currently used database is the *_system* database. @@ -8507,7 +8508,7 @@ static v8::Handle JS_IsSystemDatabase (v8::Arguments const& argv) { //////////////////////////////////////////////////////////////////////////////// /// @brief change the current database -/// @startDocuBlock database_useDatabase +/// @startDocuBlock databaseUseDatabase /// `db._useDatabase(name)` /// /// Changes the current database to the database specified by *name*. Note @@ -8650,7 +8651,7 @@ static v8::Handle ListDatabasesCoordinator (v8::Arguments const& argv //////////////////////////////////////////////////////////////////////////////// /// @brief return the list of all existing databases -/// @startDocuBlock database_listDatabase +/// @startDocuBlock databaseListDatabase /// `db._listDatabases()` /// /// Returns the list of all databases. This method can only be used from within @@ -8814,7 +8815,7 @@ static v8::Handle CreateDatabaseCoordinator (v8::Arguments const& arg //////////////////////////////////////////////////////////////////////////////// /// @brief create a new database -/// @startDocuBlock database_createDatabase +/// @startDocuBlock databaseCreateDatabase /// `db._createDatabase(name, options, users)` /// /// Creates a new database with the name specified by *name*. @@ -9010,7 +9011,7 @@ static v8::Handle DropDatabaseCoordinator (v8::Arguments const& argv) //////////////////////////////////////////////////////////////////////////////// /// @brief drop an existing database -/// @startDocuBlock database_dropDatabase +/// @startDocuBlock databaseDropDatabase /// `db._dropDatabase(name)` /// /// Drops the database specified by *name*. The database specified by diff --git a/js/server/modules/org/arangodb/arango-collection.js b/js/server/modules/org/arangodb/arango-collection.js index dad73282a1..cd02f3ace8 100644 --- a/js/server/modules/org/arangodb/arango-collection.js +++ b/js/server/modules/org/arangodb/arango-collection.js @@ -83,7 +83,7 @@ ArangoCollection.prototype.toArray = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief truncates a collection -/// @startDocuBlock collection_truncates +/// @startDocuBlock collectionTruncate /// `collection.truncate()` /// /// Truncates a *collection*, removing all documents but keeping all its @@ -93,15 +93,13 @@ ArangoCollection.prototype.toArray = function () { /// /// Truncates a collection: /// -/// arango> col = db.examples; -/// [ArangoCollection 91022, "examples" (status new born)] -/// arango> col.save({ "Hello" : "World" }); -/// { "_id" : "91022/1532814", "_rev" : 1532814 } -/// arango> col.count(); -/// 1 -/// arango> col.truncate(); -/// arango> col.count(); -/// 0 +/// @EXAMPLE_ARANGOSH_OUTPUT{collectionTruncate} +/// col = db.examples; +/// col.save({ "Hello" : "World" }); +/// col.count(); +/// col.truncate(); +/// col.count(); +/// @END_EXAMPLE_ARANGOSH_OUTPUT /// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// @@ -337,7 +335,7 @@ ArangoCollection.prototype.any = function () { /// @fn JSF_ArangoCollection_prototype_first /// /// @brief selects the n first documents in the collection -/// @startDocuBlock documents_collectionFirst +/// @startDocuBlock documentsCollectionFirst /// `collection.first(count)` /// /// The *first* method returns the n first documents from the collection, in @@ -419,7 +417,7 @@ ArangoCollection.prototype.first = function (count) { /// @fn JSF_ArangoCollection_prototype_last /// /// @brief selects the n last documents in the collection -/// @startDocuBlock documents_collectionLast +/// @startDocuBlock documentsCollectionLast /// `collection.last(count)` /// /// The *last* method returns the n last documents from the collection, in @@ -884,18 +882,18 @@ ArangoCollection.prototype.updateByExample = function (example, //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a bitarray index exists -/// -/// @FUN{@FA{collection}.ensureBitarray(@FA{field1}, @FA{value1}, ..., @FA{fieldn}, @FA{valuen})} +/// @startDocuBlock collectionEnsureBitArray +/// `collection.ensureBitarray(field*1*, value*1*, ..., field*n*, value*n*)` /// /// Creates a bitarray index on documents using attributes as paths to the -/// fields (@FA{field1},..., @FA{fieldn}). A value (@FA{value1},...,@FA{valuen}) +/// fields (*field1*,..., *fieldn*}). A value (*value1*,...,*valuen*) /// consists of an array of possible values that the field can take. At least /// one field and one set of possible values must be given. /// /// All documents, which do not have *all* of the attribute paths are ignored /// (that is, are not part of the bitarray index, they are however stored within /// the collection). A document which contains all of the attribute paths yet -/// has one or more values which are *not* part of the defined range of values +/// has one or more values which are **not** part of the defined range of values /// will be rejected and the document will not inserted within the /// collection. Note that, if a bitarray index is created subsequent to /// any documents inserted in the given collection, then the creation of the @@ -906,10 +904,10 @@ ArangoCollection.prototype.updateByExample = function (example, /// returned. /// /// In the example below we create a bitarray index with one field and that -/// field can have the values of either `0` or `1`. Any document which has the -/// attribute `x` defined and does not have a value of `0` or `1` will be +/// field can have the values of either *0* or *1*. Any document which has the +/// attribute *x* defined and does not have a value of *0* or *1* will be /// rejected and therefore not inserted within the collection. Documents without -/// the attribute `x` defined will not take part in the index. +/// the attribute *x* defined will not take part in the index. /// /// @code /// arango> arangod> db.example.ensureBitarray("x", [0,1]); @@ -924,9 +922,9 @@ ArangoCollection.prototype.updateByExample = function (example, /// @endcode /// /// In the example below we create a bitarray index with one field and that -/// field can have the values of either `0`, `1` or *other* (indicated by -/// `[]`). Any document which has the attribute `x` defined will take part in -/// the index. Documents without the attribute `x` defined will not take part in +/// field can have the values of either *0*, *1* or *other* (indicated by +/// *[]*). Any document which has the attribute *x* defined will take part in +/// the index. Documents without the attribute *x* defined will not take part in /// the index. /// /// @code @@ -941,12 +939,12 @@ ArangoCollection.prototype.updateByExample = function (example, /// } /// @endcode /// -/// In the example below we create a bitarray index with two fields. Field `x` -/// can have the values of either `0` or `1`; while field `y` can have the values -/// of `2` or `"a"`. A document which does not have *both* attributes `x` and `y` +/// In the example below we create a bitarray index with two fields. Field *x* +/// can have the values of either *0* or *1*; while field *y* can have the values +/// of *2* or *"a"*. A document which does not have *both* attributes *x* and *y* /// will not take part within the index. A document which does have both attributes -/// `x` and `y` defined must have the values `0` or `1` for attribute `x` and -/// `2` or `a` for attribute `y`, otherwise the document will not be inserted +/// *x* and *y* defined must have the values *0* or *1* for attribute *x* and +/// *2* or *1* for attribute *y*, otherwise the document will not be inserted /// within the collection. /// /// @code @@ -961,12 +959,12 @@ ArangoCollection.prototype.updateByExample = function (example, /// } /// @endcode /// -/// In the example below we create a bitarray index with two fields. Field `x` -/// can have the values of either `0` or `1`; while field `y` can have the -/// values of `2`, `"a"` or *other* . A document which does not have *both* -/// attributes `x` and `y` will not take part within the index. A document -/// which does have both attributes `x` and `y` defined must have the values `0` -/// or `1` for attribute `x` and any value for attribute `y` will be acceptable, +/// In the example below we create a bitarray index with two fields. Field *x* +/// can have the values of either *0* or *1*; while field *y* can have the +/// values of *2*, *"a"* or *other* . A document which does not have *both* +/// attributes *x* and *y* will not take part within the index. A document +/// which does have both attributes *x* and *y* defined must have the values *0* +/// or *1* for attribute *x* and any value for attribute *y* will be acceptable, /// otherwise the document will not be inserted within the collection. /// /// @code @@ -980,6 +978,7 @@ ArangoCollection.prototype.updateByExample = function (example, /// "isNewlyCreated" : true /// } /// @endcode +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureBitarray = function () { @@ -1000,8 +999,8 @@ ArangoCollection.prototype.ensureBitarray = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a bitarray index exists -/// -/// @FUN{@FA{collection}.ensureUndefBitarray(@FA{field1}, @FA{value1}, ..., @FA{fieldn}, @FA{valuen})} +/// @startDocuBlock collectionEnsureUndefBitArray +/// `collection.ensureUndefBitarray(field*1*, value*1*, ..., field*n*, value*n*)` /// /// Creates a bitarray index on all documents using attributes as paths to /// the fields. At least one attribute and one set of possible values must be given. @@ -1012,6 +1011,7 @@ ArangoCollection.prototype.ensureBitarray = function () { /// is returned. /// /// @verbinclude fluent14 +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureUndefBitarray = function () { @@ -1033,17 +1033,17 @@ ArangoCollection.prototype.ensureUndefBitarray = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a cap constraint exists +/// @startDocuBlock collectionEnsureCapConstraint +/// `collection.ensureCapConstraint(size, byteSize)` /// -/// @FUN{@FA{collection}.ensureCapConstraint(@FA{size}, {byteSize})} -/// -/// Creates a size restriction aka cap for the collection of @FA{size} -/// documents and/or @FA{byteSize} data size. If the restriction is in place -/// and the (@FA{size} plus one) document is added to the collection, or the -/// total active data size in the collection exceeds @FA{byteSize}, then the +/// Creates a size restriction aka cap for the collection of *size* +/// documents and/or *byteSize* data size. If the restriction is in place +/// and the (*size* plus one) document is added to the collection, or the +/// total active data size in the collection exceeds *byteSize*, then the /// least recently created or updated documents are removed until all /// constraints are satisfied. /// -/// It is allowed to specify either @FA{size} or @FA{byteSize}, or both at +/// It is allowed to specify either *size* or *byteSize*, or both at /// the same time. If both are specified, then the automatic document removal /// will be triggered by the first non-met constraint. /// @@ -1055,11 +1055,12 @@ ArangoCollection.prototype.ensureUndefBitarray = function () { /// Note that this does not imply any restriction of the number of revisions /// of documents. /// -/// *Examples* +/// @EXAMPLES /// /// Restrict the number of document to at most 10 documents: /// /// @verbinclude ensure-cap-constraint +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureCapConstraint = function (size, byteSize) { @@ -1074,8 +1075,8 @@ ArangoCollection.prototype.ensureCapConstraint = function (size, byteSize) { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a skiplist index exists -/// -/// @FUN{ensureUniqueSkiplist(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} +/// @startDocuBlock ensureUniqueSkiplist +/// `ensureUniqueSkiplist(field*1*, field*2*, ...,field*n*)` /// /// Creates a skiplist index on all documents using attributes as paths to /// the fields. At least one attribute must be given. @@ -1086,6 +1087,7 @@ ArangoCollection.prototype.ensureCapConstraint = function (size, byteSize) { /// is returned. /// /// @verbinclude unique-skiplist +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureUniqueSkiplist = function () { @@ -1100,8 +1102,8 @@ ArangoCollection.prototype.ensureUniqueSkiplist = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a multi skiplist index exists -/// -/// @FUN{ensureSkiplist(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} +/// @startDocuBlock ensureSkiplist +/// `ensureSkiplist(field*1*, field*2*, ...,field*n*)` /// /// Creates a multi skiplist index on all documents using attributes as paths to /// the fields. At least one attribute must be given. @@ -1112,6 +1114,7 @@ ArangoCollection.prototype.ensureUniqueSkiplist = function () { /// is returned. /// /// @verbinclude multi-skiplist +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureSkiplist = function () { @@ -1125,16 +1128,16 @@ ArangoCollection.prototype.ensureSkiplist = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a fulltext index exists +/// @startDocuBlock ensureFulltextIndex +/// `ensureFulltextIndex(field, minWordLength)` /// -/// @FUN{ensureFulltextIndex(@FA{field}, @FA{minWordLength})} -/// -/// Creates a fulltext index on all documents on attribute @FA{field}. -/// All documents, which do not have the attribute @FA{field} or that have a -/// non-textual value inside their @FA{field} attribute are ignored. +/// Creates a fulltext index on all documents on attribute *field*. +/// All documents, which do not have the attribute *field* or that have a +/// non-textual value inside their *field* attribute are ignored. /// /// The minimum length of words that are indexed can be specified with the -/// @FA{minWordLength} parameter. Words shorter than @FA{minWordLength} -/// characters will not be indexed. @FA{minWordLength} has a default value of 2, +/// @FA{minWordLength} parameter. Words shorter than *minWordLength* +/// characters will not be indexed. *minWordLength* has a default value of 2, /// but this value might be changed in future versions of ArangoDB. It is thus /// recommended to explicitly specify this value /// @@ -1142,6 +1145,7 @@ ArangoCollection.prototype.ensureSkiplist = function () { /// is returned. /// /// @verbinclude fulltext +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureFulltextIndex = function (field, minLength) { @@ -1160,10 +1164,10 @@ ArangoCollection.prototype.ensureFulltextIndex = function (field, minLength) { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a unique constraint exists +/// @startDocuBlock ensureUniqueConstraint +/// `ensureUniqueConstraint(field*1*, field*2*, ...,field*n*)` /// -/// @FUN{ensureUniqueConstraint(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} -/// -/// Creates a unique hash index on all documents using @FA{field1}, @FA{field2}, +/// Creates a unique hash index on all documents using *field1*, *field2*, /// ... as attribute paths. At least one attribute path must be given. /// /// When a unique constraint is in effect for a collection, then all documents @@ -1173,7 +1177,7 @@ ArangoCollection.prototype.ensureFulltextIndex = function (field, minLength) { /// document is ignored by the index. /// /// Note that non-existing attribute paths in a document are treated as if the -/// value were @LIT{null}. +/// value were *null*. /// /// In case that the index was successfully created, the index identifier is /// returned. @@ -1181,6 +1185,7 @@ ArangoCollection.prototype.ensureFulltextIndex = function (field, minLength) { /// *Examples* /// /// @verbinclude shell-index-create-unique-constraint +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureUniqueConstraint = function () { @@ -1195,14 +1200,14 @@ ArangoCollection.prototype.ensureUniqueConstraint = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a hash index exists +/// @startDocuBlock ensureHashIndex +/// `ensureHashIndex(field*1*, field*2*, ...,field*n*)` /// -/// @FUN{ensureHashIndex(@FA{field1}, @FA{field2}, ...,@FA{fieldn})} -/// -/// Creates a non-unique hash index on all documents using @FA{field1}, @FA{field2}, +/// Creates a non-unique hash index on all documents using *field1*, *field2*, /// ... as attribute paths. At least one attribute path must be given. /// /// Note that non-existing attribute paths in a document are treated as if the -/// value were @LIT{null}. +/// value were *null*. /// /// In case that the index was successfully created, the index identifier /// is returned. @@ -1210,6 +1215,7 @@ ArangoCollection.prototype.ensureUniqueConstraint = function () { /// *Examples* /// /// @verbinclude shell-index-create-hash-index +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureHashIndex = function () { @@ -1223,10 +1229,10 @@ ArangoCollection.prototype.ensureHashIndex = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a geo index exists +/// @startDocuBlock collectionEnsureGeoIndex +/// `collection.ensureGeoIndex(location)` /// -/// @FUN{@FA{collection}.ensureGeoIndex(@FA{location})} -/// -/// Creates a geo-spatial index on all documents using @FA{location} as path to +/// Creates a geo-spatial index on all documents using *location* as path to /// the coordinates. The value of the attribute must be a list with at least two /// double values. The list must contain the latitude (first value) and the /// longitude (second value). All documents, which do not have the attribute @@ -1235,25 +1241,24 @@ ArangoCollection.prototype.ensureHashIndex = function () { /// In case that the index was successfully created, the index identifier is /// returned. /// -/// @FUN{@FA{collection}.ensureGeoIndex(@FA{location}, @LIT{true})} +/// `collection.ensureGeoIndex(location, true)` /// /// As above which the exception, that the order within the list is longitude /// followed by latitude. This corresponds to the format described in +/// [positions](http://geojson.org/geojson-spec.html) /// -/// http://geojson.org/geojson-spec.html#positions +/// `collection.ensureGeoIndex(latitude, longitude)` /// -/// @FUN{@FA{collection}.ensureGeoIndex(@FA{latitude}, @FA{longitude})} -/// -/// Creates a geo-spatial index on all documents using @FA{latitude} and -/// @FA{longitude} as paths the latitude and the longitude. The value of the -/// attribute @FA{latitude} and of the attribute @FA{longitude} must a +/// Creates a geo-spatial index on all documents using *latitude* and +/// *longitude* as paths the latitude and the longitude. The value of the +/// attribute *latitude* and of the attribute *longitude* must a /// double. All documents, which do not have the attribute paths or which values /// are not suitable, are ignored. /// /// In case that the index was successfully created, the index identifier /// is returned. /// -/// *Examples* +/// @EXAMPLES /// /// Create an geo index for a list attribute: /// @@ -1262,6 +1267,7 @@ ArangoCollection.prototype.ensureHashIndex = function () { /// Create an geo index for a hash array attribute: /// /// @verbinclude ensure-geo-index-array +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureGeoIndex = function (lat, lon) { @@ -1295,17 +1301,18 @@ ArangoCollection.prototype.ensureGeoIndex = function (lat, lon) { //////////////////////////////////////////////////////////////////////////////// /// @brief ensures that a geo constraint exists +/// @startDocuBlock collectionEnsureGeoConstraint +/// `collection.ensureGeoConstraint(location, ignore-null)` /// -/// @FUN{@FA{collection}.ensureGeoConstraint(@FA{location}, @FA{ignore-null})} +/// `collection.ensureGeoConstraint(location, true, ignore-null)` /// -/// @FUN{@FA{collection}.ensureGeoConstraint(@FA{location}, @LIT{true}, @FA{ignore-null})} +/// `collection.ensureGeoConstraint(latitude, longitude, ignore-null)` /// -/// @FUN{@FA{collection}.ensureGeoConstraint(@FA{latitude}, @FA{longitude}, @FA{ignore-null})} -/// -/// Works like @FN{ensureGeoIndex} but requires that the documents contain -/// a valid geo definition. If @FA{ignore-null} is true, then documents with -/// a null in @FA{location} or at least one null in @FA{latitude} or -/// @FA{longitude} are ignored. +/// Works like *ensureGeoIndex* but requires that the documents contain +/// a valid geo definition. If *ignore-null* is true, then documents with +/// a null in *location* or at least one null in *latitude* or +/// *longitude* are ignored. +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.ensureGeoConstraint = function (lat, lon, ignoreNull) { @@ -1404,10 +1411,11 @@ ArangoCollection.prototype.lookupSkiplist = function () { //////////////////////////////////////////////////////////////////////////////// /// @brief looks up a fulltext index +/// @startDocuBlock lookUpFulltextIndex +/// `lookupFulltextIndex(field, minLength)` /// -/// @FUN{lookupFulltextIndex(@FA{field}, @FA{minLength}} -/// -/// Checks whether a fulltext index on the given attribute @FA{field} exists. +/// Checks whether a fulltext index on the given attribute *field* exists. +/// @endDocuBlock //////////////////////////////////////////////////////////////////////////////// ArangoCollection.prototype.lookupFulltextIndex = function (field, minLength) { From e2fc348467f445fc4ee1029428437f38bbc0f9c6 Mon Sep 17 00:00:00 2001 From: scottashton Date: Mon, 23 Jun 2014 15:11:36 +0200 Subject: [PATCH 12/13] fixed broken links and typos in general-graph --- js/common/modules/org/arangodb/general-graph.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/js/common/modules/org/arangodb/general-graph.js b/js/common/modules/org/arangodb/general-graph.js index 76e892883c..1304951712 100644 --- a/js/common/modules/org/arangodb/general-graph.js +++ b/js/common/modules/org/arangodb/general-graph.js @@ -2998,7 +2998,7 @@ Graph.prototype._countCommonProperties = function(vertex1Example, vertex2Example /// ~ var db = require("internal").db; /// var examples = require("org/arangodb/graph-examples/example-graph.js"); /// var g = examples.loadGraph("routeplanner"); -/// | g._absoluteEccentricity({}, {startVertexCollectionRestriction : 'city', +/// | g._absoluteEccentricity({}, {startVertexCollectionRestriction : 'germanCity', /// direction : 'outbound', weight : 'distance'}); /// @END_EXAMPLE_ARANGOSH_OUTPUT /// @@ -3029,7 +3029,7 @@ Graph.prototype._absoluteEccentricity = function(vertexExample, options) { /// [eccentricity](http://en.wikipedia.org/wiki/Distance_%28graph_theory%29) /// *of the vertices defined by the examples.* /// -/// Similar to [_absoluteEccentricity](#_absoluteEccentricity) but returns a normalized result. +/// Similar to [_absoluteEccentricity](#_absoluteeccentricity) but returns a normalized result. /// /// @EXAMPLES /// @@ -3160,7 +3160,7 @@ Graph.prototype._absoluteCloseness = function(vertexExample, options) { /// [closeness](http://en.wikipedia.org/wiki/Centrality#Closeness_centrality) /// *of graphs vertices.* /// -/// Similar to [_absoluteCloseness](#_absoluteCloseness) but returns a normalized value. +/// Similar to [_absoluteCloseness](#_absolutecloseness) but returns a normalized value. /// /// @EXAMPLES /// @@ -3252,7 +3252,7 @@ Graph.prototype._closeness = function(options) { /// g._absoluteBetweenness({weight : 'distance'}); /// @END_EXAMPLE_ARANGOSH_OUTPUT /// -/// A route planner example, the absolute closeness of all cities regarding only +/// A route planner example, the absolute betweenness of all cities regarding only /// outbound paths. /// /// @EXAMPLE_ARANGOSH_OUTPUT{generalGraphModuleAbsBetweenness3} @@ -3288,7 +3288,7 @@ Graph.prototype._absoluteBetweenness = function(options) { /// [betweenness](http://en.wikipedia.org/wiki/Betweenness_centrality) /// *of graphs vertices.* /// -/// Similar to [_absoluteBetweeness](#_absoluteBetweeness) but returns normalized values. +/// Similar to [_absoluteBetweeness](#_absolutebetweeness) but returns normalized values. /// /// @EXAMPLES /// @@ -3301,7 +3301,7 @@ Graph.prototype._absoluteBetweenness = function(options) { /// g._betweenness(); /// @END_EXAMPLE_ARANGOSH_OUTPUT /// -/// A route planner example, the closeness of all locations. +/// A route planner example, the betweenness of all locations. /// This considers the actual distances. /// /// @EXAMPLE_ARANGOSH_OUTPUT{generalGraphModuleBetweenness2} @@ -3311,7 +3311,7 @@ Graph.prototype._absoluteBetweenness = function(options) { /// g._betweenness({weight : 'distance'}); /// @END_EXAMPLE_ARANGOSH_OUTPUT /// -/// A route planner example, the closeness of all cities regarding only +/// A route planner example, the betweenness of all cities regarding only /// outbound paths. /// /// @EXAMPLE_ARANGOSH_OUTPUT{generalGraphModuleBetweenness3} From 56d8db54aaf08c09c527f84862d1e9398e19ed1c Mon Sep 17 00:00:00 2001 From: gschwab Date: Mon, 23 Jun 2014 15:14:16 +0200 Subject: [PATCH 13/13] docu --- Documentation/Books/Users/General-Graphs/Management.mdpp | 2 +- .../frontend/js/modules/org/arangodb/general-graph.js | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Documentation/Books/Users/General-Graphs/Management.mdpp b/Documentation/Books/Users/General-Graphs/Management.mdpp index 25cac50b44..f6885032cb 100644 --- a/Documentation/Books/Users/General-Graphs/Management.mdpp +++ b/Documentation/Books/Users/General-Graphs/Management.mdpp @@ -30,7 +30,7 @@ The edge definitions for a graph is an Array containing arbitrary many directed Each graph can have an arbitrary amount of orphan collections. These are vertex collections (type *document*), that are not used in an edge definition of the graph. If the graph is extended with an edge definition using one of the orphans, -it will be removed from the orphan collection automatically. +it will be removed from the set of orphan collection automatically. !SUBSUBSECTION Add diff --git a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/general-graph.js b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/general-graph.js index 0072197e63..81fc22d68f 100644 --- a/js/apps/system/aardvark/frontend/js/modules/org/arangodb/general-graph.js +++ b/js/apps/system/aardvark/frontend/js/modules/org/arangodb/general-graph.js @@ -1556,7 +1556,7 @@ var _extendEdgeDefinitions = function (edgeDefinition) { /// /// * *graphName*: Unique identifier of the graph /// * *edgeDefinitions* (optional): List of relation definition objects -/// * *orphanCollections* (optional): List of additonal vertex collection names +/// * *orphanCollections* (optional): List of additional vertex collection names /// /// *Examples* /// @@ -3079,7 +3079,7 @@ Graph.prototype._eccentricity = function(options) { /// *of the vertices defined by the examples.* /// /// The function accepts an id, an example, a list of examples or even an empty -/// example as parameter for vertexExample. +/// example as parameter for *vertexExample*. /// /// *Parameter* /// @@ -3123,7 +3123,7 @@ Graph.prototype._eccentricity = function(options) { /// g._absoluteCloseness({}, {weight : 'distance'}); /// @END_EXAMPLE_ARANGOSH_OUTPUT /// -/// A route planner example, the absolute closeness of all germanCities regarding only +/// A route planner example, the absolute closeness of all german Cities regarding only /// outbound paths. /// /// @EXAMPLE_ARANGOSH_OUTPUT{generalGraphModuleAbsCloseness3}