mirror of https://gitee.com/bigwinds/arangodb
Merge branch '1.1' of github.com:triAGENS/ArangoDB into 1.1
Conflicts: Documentation/Makefile.files
This commit is contained in:
commit
3aebed3020
36
CHANGELOG
36
CHANGELOG
|
@ -8,10 +8,38 @@ v1.1.beta1 (2012-XX-XX)
|
|||
|
||||
- In 1.1, we have introduced types for collections: regular documents go into document
|
||||
collections, and edges go into edge collections. The prefixing (db.xxx vs. edges.xxx)
|
||||
is gone in 1.1. edges.xxx can still be used to access collections, however, it will
|
||||
not determine the collection type anymore. To create an edge collection 1.1, you can
|
||||
use db._createEdgeCollection(). And there's also db._createDocumentCollection().
|
||||
db._create() is also still there and will create a document collection.
|
||||
works slightly different in 1.1: edges.xxx can still be used to access collections,
|
||||
however, it will not determine the type of existing collections anymore. To create an
|
||||
edge collection 1.1, you can use db._createEdgeCollection() or edges._create().
|
||||
And there's of course also db._createDocumentCollection().
|
||||
db._create() is also still there and will create a document collection by default,
|
||||
whereas edges._create() will create an edge collection.
|
||||
|
||||
* the server now handles requests with invalid Content-Length header values as follows:
|
||||
- if Content-Length is negative, the server will respond instantly with HTTP 411
|
||||
(length required)
|
||||
|
||||
- if Content-Length is positive but shorter than the supplied body, the server will
|
||||
respond with HTTP 400 (bad request)
|
||||
|
||||
- if Content-Length is positive but longer than the supplied body, the server will
|
||||
wait for the client to send the missing bytes. The server allows 90 seconds for this
|
||||
and will close the connection if the client does not send the remaining data
|
||||
|
||||
- if Content-Length is bigger than the maximum allowed size (512 MB), the server will
|
||||
fail with HTTP 413 (request entitiy too large).
|
||||
|
||||
- if the length of the HTTP headers is greated than the maximum allowed size (1 MB),
|
||||
the server will fail with HTTP 431 (request header fields too large)
|
||||
|
||||
* issue #247: added AQL function MERGE_RECURSIVE
|
||||
|
||||
* issue #246: added clear() function in arangosh
|
||||
|
||||
* issue #245: Documentation: Central place for naming rules/limits inside ArangoDB
|
||||
|
||||
* reduced size of hash index elements by 50 %, allowing more index elements to fit in
|
||||
memory
|
||||
|
||||
* issue #235: GUI Shell throws Error:ReferenceError: db is not defined
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ WIKI = \
|
|||
ArangoErrors \
|
||||
CommandLine \
|
||||
Compiling \
|
||||
Communication \
|
||||
DbaManual \
|
||||
DbaManualBasics \
|
||||
DbaManualAuthentication \
|
||||
|
@ -74,6 +75,7 @@ WIKI = \
|
|||
JSModuleInternal \
|
||||
JSModules \
|
||||
Key-Value \
|
||||
NamingConventions \
|
||||
RefManual \
|
||||
RestDocument \
|
||||
RestEdge \
|
||||
|
|
|
@ -28,7 +28,7 @@ case "$1" in
|
|||
log_daemon_msg "Starting $DESC" "$NAME"
|
||||
|
||||
test -d $PIDDIR || mkdir $PIDDIR
|
||||
chown arango $PIDDIR
|
||||
chown arangodb $PIDDIR
|
||||
|
||||
$DAEMON -c $CONF --pid-file "$PIDFILE" --supervisor --uid arangodb
|
||||
log_end_msg $?
|
||||
|
|
|
@ -120,6 +120,10 @@ case $TRI_OS_LONG in
|
|||
LDFLAGS='-L/usr/lib -L/opt/local/lib' # need to use OpenSSL from system
|
||||
OPTIONS="$OPTIONS --enable-all-in-one-libev --enable-all-in-one-v8 --enable-all-in-one-icu --disable-mruby"
|
||||
RESULTS="$RESULTS arangoirb"
|
||||
if [ "${TRI_MACH}" == "x86_64" ]; then
|
||||
X=$(uname -r)
|
||||
OPTIONS="$OPTIONS --build x86_64-apple-darwin${X}"
|
||||
fi
|
||||
;;
|
||||
|
||||
*)
|
||||
|
|
|
@ -403,13 +403,13 @@ BOOST_AUTO_TEST_CASE (EndpointHost) {
|
|||
CHECK_ENDPOINT_FEATURE(client, "tcp://localhost:8529", Host, "localhost");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://www.arangodb.org:8529", Host, "www.arangodb.org");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://arangodb.org:8529", Host, "arangodb.org");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[127.0.0.1]", Host, "[127.0.0.1]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[::]", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[127.0.0.1]:8529", Host, "[127.0.0.1]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[::]:8529", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[2001:0db8:0000:0000:0000:ff00:0042:8329]", Host, "[2001:0db8:0000:0000:0000:ff00:0042:8329]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[2001:0db8:0000:0000:0000:ff00:0042:8329]:8529", Host, "[2001:0db8:0000:0000:0000:ff00:0042:8329]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "http@tcp://[::]:8529", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[127.0.0.1]", Host, "127.0.0.1");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[::]", Host, "::");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[127.0.0.1]:8529", Host, "127.0.0.1");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[::]:8529", Host, "::");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[2001:0db8:0000:0000:0000:ff00:0042:8329]", Host, "2001:0db8:0000:0000:0000:ff00:0042:8329");
|
||||
CHECK_ENDPOINT_FEATURE(client, "tcp://[2001:0db8:0000:0000:0000:ff00:0042:8329]:8529", Host, "2001:0db8:0000:0000:0000:ff00:0042:8329");
|
||||
CHECK_ENDPOINT_FEATURE(client, "http@tcp://[::]:8529", Host, "::");
|
||||
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://127.0.0.1", Host, "127.0.0.1");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://localhost", Host, "localhost");
|
||||
|
@ -419,13 +419,13 @@ BOOST_AUTO_TEST_CASE (EndpointHost) {
|
|||
CHECK_ENDPOINT_FEATURE(client, "ssl://192.168.173.13:8529", Host, "192.168.173.13");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://localhost:8529", Host, "localhost");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://www.arangodb.org:8529", Host, "www.arangodb.org");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[127.0.0.1]", Host, "[127.0.0.1]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[::]", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[127.0.0.1]:8529", Host, "[127.0.0.1]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[::]:8529", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[2001:0db8:0000:0000:0000:ff00:0042:8329]", Host, "[2001:0db8:0000:0000:0000:ff00:0042:8329]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[2001:0db8:0000:0000:0000:ff00:0042:8329]:8529", Host, "[2001:0db8:0000:0000:0000:ff00:0042:8329]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "http@ssl://[::]:8529", Host, "[::]");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[127.0.0.1]", Host, "127.0.0.1");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[::]", Host, "::");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[127.0.0.1]:8529", Host, "127.0.0.1");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[::]:8529", Host, "::");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[2001:0db8:0000:0000:0000:ff00:0042:8329]", Host, "2001:0db8:0000:0000:0000:ff00:0042:8329");
|
||||
CHECK_ENDPOINT_FEATURE(client, "ssl://[2001:0db8:0000:0000:0000:ff00:0042:8329]:8529", Host, "2001:0db8:0000:0000:0000:ff00:0042:8329");
|
||||
CHECK_ENDPOINT_FEATURE(client, "http@ssl://[::]:8529", Host, "::");
|
||||
|
||||
CHECK_ENDPOINT_FEATURE(client, "unix:///tmp/socket", Host, "localhost");
|
||||
CHECK_ENDPOINT_FEATURE(client, "unix:///tmp/socket/arango.sock", Host, "localhost");
|
||||
|
@ -620,7 +620,7 @@ BOOST_AUTO_TEST_CASE (EndpointClientSslIpV6WithPortHttp) {
|
|||
BOOST_CHECK_EQUAL(Endpoint::PROTOCOL_HTTP, e->getProtocol());
|
||||
BOOST_CHECK_EQUAL(Endpoint::ENCRYPTION_SSL, e->getEncryption());
|
||||
BOOST_CHECK_EQUAL(AF_INET6, e->getDomain());
|
||||
BOOST_CHECK_EQUAL("[0001:0002:0003:0004:0005:0006:0007:0008]", e->getHost());
|
||||
BOOST_CHECK_EQUAL("0001:0002:0003:0004:0005:0006:0007:0008", e->getHost());
|
||||
BOOST_CHECK_EQUAL(43425, e->getPort());
|
||||
BOOST_CHECK_EQUAL("[0001:0002:0003:0004:0005:0006:0007:0008]:43425", e->getHostString());
|
||||
BOOST_CHECK_EQUAL(false, e->isConnected());
|
||||
|
@ -641,7 +641,7 @@ BOOST_AUTO_TEST_CASE (EndpointClientTcpIpv6WithoutPort) {
|
|||
BOOST_CHECK_EQUAL(Endpoint::PROTOCOL_HTTP, e->getProtocol());
|
||||
BOOST_CHECK_EQUAL(Endpoint::ENCRYPTION_NONE, e->getEncryption());
|
||||
BOOST_CHECK_EQUAL(AF_INET6, e->getDomain());
|
||||
BOOST_CHECK_EQUAL("[::]", e->getHost());
|
||||
BOOST_CHECK_EQUAL("::", e->getHost());
|
||||
BOOST_CHECK_EQUAL(8529, e->getPort());
|
||||
BOOST_CHECK_EQUAL("[::]:8529", e->getHostString());
|
||||
BOOST_CHECK_EQUAL(false, e->isConnected());
|
||||
|
|
|
@ -114,7 +114,7 @@ start-server:
|
|||
($(VALGRIND) @builddir@/bin/arangod "$(VOCDIR)" $(SERVER_OPT) --pid-file $(PIDFILE) --watch-process $(PID) && rm -rf "$(VOCDIR)") &
|
||||
|
||||
@test "$(PROTO)" == "unix" || (rm -f "$(STARTFILE)"; while [ ! -s "$(STARTFILE)" ]; do $(CURL) $(CURL_OPT) --insecure -X GET -s "$(PROTO)://$(VOCHOST):$(VOCPORT)/_api/version" > "$(STARTFILE)" || sleep 2; done)
|
||||
@(test "$(PROTO)" == "unix" && sleep 2) || true
|
||||
@(test "$(PROTO)" == "unix" && sleep 5) || true
|
||||
@rm -f "$(STARTFILE)"
|
||||
@echo "server has been started."
|
||||
@if [ "$(VALGRIND)" != "" ]; then echo "adding valgrind memorial time..."; sleep 75; else sleep 2; fi
|
||||
|
|
|
@ -381,8 +381,19 @@ TRI_aql_node_t* TRI_CreateNodeCollectionAql (TRI_aql_context_t* const context,
|
|||
|
||||
if (strlen(name) == 0) {
|
||||
TRI_SetErrorContextAql(context, TRI_ERROR_QUERY_COLLECTION_NOT_FOUND, name);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
else {
|
||||
TRI_col_parameter_t parameters;
|
||||
|
||||
parameters._isSystem = true;
|
||||
if (! TRI_IsAllowedCollectionName(¶meters, name)) {
|
||||
TRI_SetErrorContextAql(context, TRI_ERROR_ARANGO_ILLEGAL_NAME, name);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
TRI_aql_collection_hint_t* hint;
|
||||
|
|
|
@ -397,6 +397,7 @@ TRI_associative_pointer_t* TRI_InitialiseFunctionsAql (void) {
|
|||
// document functions
|
||||
REGISTER_FUNCTION("HAS", "HAS", true, false, "az,s");
|
||||
REGISTER_FUNCTION("MERGE", "MERGE", true, false, "a,a|+");
|
||||
REGISTER_FUNCTION("MERGE_RECURSIVE", "MERGE_RECURSIVE", true, false, "a,a|+");
|
||||
|
||||
// geo functions
|
||||
REGISTER_FUNCTION("NEAR", "GEO_NEAR", false, false, "h,n,n,n|s");
|
||||
|
|
|
@ -322,18 +322,19 @@
|
|||
/// happens to have the same name as a keyword, the name must be enclosed in
|
||||
/// backticks.
|
||||
///
|
||||
/// Allowed characters in collection names are the letters @LIT{a} to @LIT{z}
|
||||
/// (both in lower and upper case) and the numbers @LIT{0} to @LIT{9} and the
|
||||
/// the underscore (@LIT{_}) symbol. A collection name must start with either
|
||||
/// a letter or a number, but not with an underscore.
|
||||
/// Please refer to the @ref NamingConventions about collection name naming
|
||||
/// conventions.
|
||||
///
|
||||
/// @subsubsection AqlAttributeNames Attribute names
|
||||
///
|
||||
/// When referring to attributes of documents from a collection, the fully
|
||||
/// qualified attribute name must be used. This is because multiple
|
||||
/// collections with ambigious attribute names might be used in a query.
|
||||
/// collections with ambiguous attribute names might be used in a query.
|
||||
/// To avoid any ambiguity, it is not allowed to refer to an unqualified
|
||||
/// attribute name.
|
||||
///
|
||||
/// Please refer to the @ref NamingConventions for more information about the
|
||||
/// attribute naming conventions.
|
||||
///
|
||||
/// @EXAMPLE{aqlattributenamesvalid,active user with active friends}
|
||||
///
|
||||
|
@ -562,8 +563,8 @@
|
|||
/// Finally, the attribute value of both documents is compared using the beforementioned
|
||||
/// data type and value comparison.
|
||||
/// The comparisons are performed for all document attributes until there is an
|
||||
/// unambigious comparison result. If an unambigious comparison result is found, the
|
||||
/// comparison is finished. If there is no unambigious comparison result, the two
|
||||
/// unambiguous comparison result. If an unambiguous comparison result is found, the
|
||||
/// comparison is finished. If there is no unambiguous comparison result, the two
|
||||
/// compared documents are considered equal.
|
||||
///
|
||||
/// @verbinclude aqlcompareexamples2
|
||||
|
@ -862,10 +863,46 @@
|
|||
///
|
||||
/// - @FN{MERGE(@FA{document1}\, @FA{document2}\, ... @FA{documentn})}: merges the documents
|
||||
/// in @FA{document1} to @FA{documentn} into a single document. If document attribute
|
||||
/// keys are ambigious, the merged result will contain the values of the documents
|
||||
/// keys are ambiguous, the merged result will contain the values of the documents
|
||||
/// contained later in the argument list.
|
||||
/// - @FN{HAS(@FA{document}\, @FA{attributename})}: returns true if @FA{document} has an
|
||||
/// attribute named @FA{attributename}, and false otherwise.
|
||||
///
|
||||
/// For example, two documents with distinct attribute names can easily be merged into one:
|
||||
/// @code
|
||||
/// RETURN MERGE(
|
||||
/// { "user1" : { "name" : "J" } },
|
||||
/// { "user2" : { "name" : "T" } }
|
||||
/// )
|
||||
/// [ { "user1" : { "name" : "J" },
|
||||
/// "user2" : { "name" : "T" } } ]
|
||||
/// @endcode
|
||||
/// When merging documents with identical attribute names, the attribute values of the
|
||||
/// latter documents will be used in the end result:
|
||||
/// @code
|
||||
/// return MERGE(
|
||||
/// { "users" : { "name" : "J" } },
|
||||
/// { "users" : { "name" : "T" } }
|
||||
/// )
|
||||
/// [ { "users" : { "name" : "T" } } ]
|
||||
/// @endcode
|
||||
/// Please note that merging will only be done for top-level attributes. If you wish to
|
||||
/// merge sub-attributes, you should consider using @LIT{MERGE_RECURSIVE} instead.
|
||||
///
|
||||
/// - @FN{MERGE_RECURSIVE(@FA{document1}\, @FA{document2}\, ... @FA{documentn})}: recursively
|
||||
/// merges the documents in @FA{document1} to @FA{documentn} into a single document. If
|
||||
/// document attribute keys are ambiguous, the merged result will contain the values of the
|
||||
/// documents contained later in the argument list.
|
||||
///
|
||||
/// For example, two documents with distinct attribute names can easily be merged into one:
|
||||
/// @code
|
||||
/// RETURN MERGE_RECURSIVE(
|
||||
/// { "user-1" : { "name" : "J", "livesIn" : { "city" : "LA" } } },
|
||||
/// { "user-1" : { "age" : 42, "livesIn" : { "state" : "CA" } } }
|
||||
/// )
|
||||
/// [ { "user-1" : { "name" : "J", "livesIn" : { "city" : "LA", "state" : "CA" }, "age" : 42 } } ]
|
||||
/// @endcode
|
||||
///
|
||||
/// - @FN{HAS(@FA{document}\, @FA{attributename})}: returns @LIT{true} if @FA{document} has an
|
||||
/// attribute named @FA{attributename}, and @LIT{false} otherwise.
|
||||
///
|
||||
/// @subsubsection AqlFunctionsGeo Geo functions
|
||||
///
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief explanation for ArangoDB's HTTP handling
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page Communication HTTP Handling in ArangoDB
|
||||
///
|
||||
/// ArangoDB will always respond to client requests with HTTP 1.1. Clients should
|
||||
/// therefore support HTTP version 1.1.
|
||||
///
|
||||
/// ArangoDB supports HTTP keep-alive. If the client does not send a @LIT{Connection}
|
||||
/// header in its request, ArangoDB will assume the client wants to keep alive the
|
||||
/// connection. If clients do not wish to use the keep-alive feature, they should
|
||||
/// explicitly indicate that by sending a @LIT{Connection: Close} HTTP header in
|
||||
/// the request.
|
||||
///
|
||||
/// Client authentication is done by using the @LIT{Authorization} HTTP header.
|
||||
/// ArangoDB supports Basic authentication.
|
||||
///
|
||||
/// Authentication is optional if the server has been started with the option
|
||||
/// @LIT{--server.disable-authentication}.
|
||||
///
|
||||
/// The following should be noted about how ArangoDB handles client errors in its
|
||||
/// HTTP layer:
|
||||
///
|
||||
/// - ArangoDB will reject client requests with a negative value in the @LIT{Content-Length}
|
||||
/// request header with @LIT{HTTP 411} (Length Required).
|
||||
///
|
||||
/// - if the client sends a @LIT{Content-Length} header with a value bigger than 0
|
||||
/// for an HTTP GET, HEAD, or DELETE request, ArangoDB will process the request,
|
||||
/// but will write a warning to its log file.
|
||||
///
|
||||
/// - when the client sends a @LIT{Content-Length} header that has a value that
|
||||
/// is lower than the actual size of the body sent, ArangoDB will respond with
|
||||
/// @LIT{HTTP 400} (Bad Request).
|
||||
///
|
||||
/// - if clients send a @LIT{Content-Length} value bigger than the actual size of the
|
||||
/// body of the request, ArangoDB will wait for about 90 seconds for the client to
|
||||
/// complete its request. If the client does not send the remaining body data within
|
||||
/// this time, ArangoDB will close the connection.
|
||||
///
|
||||
/// - when clients send a body or a @LIT{Content-Length} value bigger than the maximum
|
||||
/// allowed value (512 MB), ArangoDB will respond with @LIT{HTTP 413} (Request Entity
|
||||
/// Too Large).
|
||||
///
|
||||
/// - if the overall length of the HTTP headers a client sends for one request exceeds
|
||||
/// the maximum allowed size (1 MB), the server will fail with @LIT{HTTP 431}
|
||||
/// (Request Header Fields Too Large).
|
||||
///
|
||||
/// - if clients request a HTTP method that is not supported by the server, ArangoDB
|
||||
/// will return with @LIT{HTTP 405} (Method Not Allowed). Generally supported methods
|
||||
/// are:
|
||||
/// - GET
|
||||
/// - POST
|
||||
/// - PUT
|
||||
/// - DELETE
|
||||
/// - HEAD
|
||||
/// - PATCH
|
||||
///
|
||||
/// Please note that not all server actions allow using all of these HTTP methods.
|
||||
/// You should look up up the supported methods for each method you intend to use
|
||||
/// in the manual.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Local Variables:
|
||||
// mode: c++
|
||||
// mode: outline-minor
|
||||
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @\\}\\)"
|
||||
// End:
|
|
@ -81,14 +81,14 @@
|
|||
/// database. It is an string and is unique within the database. Unlike the
|
||||
/// collection identifier it is supplied by the creator of the collection. The
|
||||
/// collection name can consist of letters, digits and the characters @LIT{_}
|
||||
/// (underscore) and @LIT{-} (dash). However, the first character must be a
|
||||
/// letter.
|
||||
/// (underscore) and @LIT{-} (dash). Please refer to @ref NamingConventions for more
|
||||
/// information on valid collection names.
|
||||
///
|
||||
/// @page GlossaryDocument
|
||||
//////////////////////////
|
||||
///
|
||||
/// @GE{Document}: Documents in ArangoDB are JSON objects. These objects can be
|
||||
/// nested (to any depth) and may contains lists. Each document is unique identified
|
||||
/// nested (to any depth) and may contains lists. Each document is uniquely identified
|
||||
/// by its document handle.
|
||||
///
|
||||
/// @page GlossaryDocumentEtag
|
||||
|
|
|
@ -44,6 +44,8 @@
|
|||
/// <li>@ref HttpMisc</li>
|
||||
/// <li>@ref HttpBatch</li>
|
||||
/// <li>@ref HttpImport</li>
|
||||
/// <li>@ref Communication</li>
|
||||
/// <li>@ref NamingConventions</li>
|
||||
/// <li>@ref ArangoErrors</li>
|
||||
/// <li>@ref Glossary</li>
|
||||
/// </ul>
|
||||
|
@ -76,6 +78,8 @@
|
|||
/// </li>
|
||||
/// <li>Advanced Topics
|
||||
/// <ul>
|
||||
/// <li>@ref Communication</li>
|
||||
/// <li>@ref NamingConventions</li>
|
||||
/// <li>@ref ArangoErrors</li>
|
||||
/// </ul>
|
||||
/// </li>
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief naming conventions in ArangoDB
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Jan Steemann
|
||||
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @page NamingConventions Naming Conventions in ArangoDB
|
||||
///
|
||||
/// The following naming conventions should be followed by users when creating
|
||||
/// collections and documents in ArangoDB.
|
||||
///
|
||||
/// @section CollectionNames Collection names
|
||||
///
|
||||
/// Users can pick names for their collections as desired, provided the following
|
||||
/// naming constraints are not violated:
|
||||
///
|
||||
/// - Collection names must only consist of the letters @LIT{a} to @LIT{z}
|
||||
/// (both in lower and upper case), the numbers @LIT{0} to @LIT{9}, the
|
||||
/// the underscore (@LIT{_}), or the dash (@LIT{-}) symbol. This also means that
|
||||
/// any non-ASCII collection names are not allowed.
|
||||
///
|
||||
/// - Regular collection names must start with a letter, and not a number, the
|
||||
/// underscore or the dash symbol. Collection names starting with either a number,
|
||||
/// an underscore or a dash are considered to be system collections that
|
||||
/// are for ArangoDB's internal use only. System collection names should not be
|
||||
/// used by end users for their own collections.
|
||||
///
|
||||
/// - The maximum allowed length of a collection name is 64 bytes.
|
||||
///
|
||||
/// - Collection names are case-sensitive.
|
||||
///
|
||||
/// @section AttributeNames Attribute names
|
||||
///
|
||||
/// Users can pick attribute names for document keys as desired, provided the
|
||||
/// following attribute naming constraints are not violated:
|
||||
///
|
||||
/// - Attribute names starting with an underscore are considered to be system
|
||||
/// attributes for ArangoDB's internal use. Such attribute names are already used
|
||||
/// by ArangoDB for special purposes, e.g. @LIT{_id} is used to contain a
|
||||
/// document's id, @LIT{_rev} is used to contain the revision number, and the
|
||||
/// @LIT{_from} and @LIT{_to} attributes are used within edge collections.
|
||||
/// More system attributes may be added in the future without further notice
|
||||
/// so end users should not use attribute names starting with an underscore
|
||||
/// for their own attributes.
|
||||
///
|
||||
/// - Attribute names should not start with the at-mark (@LIT{\@}). The at-mark
|
||||
/// at the start of attribute names is reserved in ArangoDB for future use cases.
|
||||
///
|
||||
/// - Theoretically, attribute names can include punctuation and special characters
|
||||
/// as desired, provided the name is a valid UTF-8 string.
|
||||
/// For maximum portability, special characters should be avoided though.
|
||||
/// For example, attribute names may
|
||||
/// contain the dot symbol, but the dot has a special meaning in Javascript and
|
||||
/// also in AQL, so when using such attribute names in one of these languages, the
|
||||
/// attribute name would need to be quoted by the end user. This will work but
|
||||
/// requires more work so it might be better to use attribute names which don't
|
||||
/// require any quoting/escaping in all languages used. This includes languages
|
||||
/// used by the client (e.g. Ruby, PHP) if the attributes are mapped to object
|
||||
/// members there.
|
||||
///
|
||||
/// - ArangoDB does not enforce a length limit for attribute names. However, long
|
||||
/// attribute names may use more memory in result sets etc. Therefore the use
|
||||
/// of long attribute names is discouraged.
|
||||
///
|
||||
/// - As ArangoDB saves document attribute names separate from the actual document
|
||||
/// attribute value data, the combined length of all attribute names for a document
|
||||
/// must fit into an ArangoDB shape structure. The maximum combined names length
|
||||
/// is variable and depends on the number and data types of attributes used.
|
||||
///
|
||||
/// - Attribute names are case-sensitive.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Local Variables:
|
||||
// mode: c++
|
||||
// mode: outline-minor
|
||||
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @\\}\\)"
|
||||
// End:
|
|
@ -35,7 +35,37 @@
|
|||
// -----------------------------------------------------------------------------
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- forward declared private functions
|
||||
// --SECTION-- private defines
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief size of a cache line, in bytes
|
||||
/// the memory acquired for the hash table is aligned to a multiple of this
|
||||
/// value
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup HashArray
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define CACHE_LINE_SIZE 64
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief initial preallocation size of the hash table when the table is
|
||||
/// first created
|
||||
/// setting this to a high value will waste memory but reduce the number of
|
||||
/// reallocations/repositionings necessary when the table grows
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define INITIAL_SIZE 256
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- private functions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -43,11 +73,116 @@
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool ResizeHashArray (TRI_hasharray_t*);
|
||||
static bool ResizeHashArrayMulti (TRI_hasharray_t*);
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief adds a new element
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static void AddNewElement (TRI_hasharray_t* array, void* element) {
|
||||
uint64_t hash;
|
||||
uint64_t i;
|
||||
|
||||
// ...........................................................................
|
||||
// compute the hash
|
||||
// ...........................................................................
|
||||
|
||||
hash = IndexStaticHashElement(array, element);
|
||||
|
||||
// ...........................................................................
|
||||
// search the table
|
||||
// ...........................................................................
|
||||
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesR++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// ...........................................................................
|
||||
// add a new element to the associative array
|
||||
// memcpy ok here since are simply moving array items internally
|
||||
// ...........................................................................
|
||||
|
||||
memcpy(array->_table + i * array->_elementSize, element, array->_elementSize);
|
||||
array->_nrUsed++;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief allocate memory for the hash table
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool AllocateTable (TRI_hasharray_t* array, size_t numElements) {
|
||||
char* data;
|
||||
char* table;
|
||||
size_t offset;
|
||||
|
||||
data = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, CACHE_LINE_SIZE + (array->_elementSize * numElements), true);
|
||||
if (data == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// position array directly on a cache line boundary
|
||||
offset = ((uint64_t) data) % CACHE_LINE_SIZE;
|
||||
|
||||
if (offset == 0) {
|
||||
// we're already on a cache line boundary
|
||||
table = data;
|
||||
}
|
||||
else {
|
||||
// move to start of a cache line
|
||||
table = data + (CACHE_LINE_SIZE - offset);
|
||||
}
|
||||
assert(((uint64_t) table) % CACHE_LINE_SIZE == 0);
|
||||
|
||||
array->_data = data;
|
||||
array->_table = table;
|
||||
array->_nrAlloc = numElements;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief resizes the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool ResizeHashArray (TRI_hasharray_t* array) {
|
||||
char* oldData;
|
||||
char* oldTable;
|
||||
uint64_t oldAlloc;
|
||||
uint64_t j;
|
||||
|
||||
oldData = array->_data;
|
||||
oldTable = array->_table;
|
||||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
if (! AllocateTable(array, 2 * array->_nrAlloc + 1)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrResizes++;
|
||||
#endif
|
||||
|
||||
for (j = 0; j < oldAlloc; j++) {
|
||||
if (! IndexStaticIsEmptyElement(array, oldTable + j * array->_elementSize)) {
|
||||
AddNewElement(array, oldTable + j * array->_elementSize);
|
||||
}
|
||||
}
|
||||
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, oldData);
|
||||
return true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief resizes the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool ResizeHashArrayMulti (TRI_hasharray_t* array) {
|
||||
return ResizeHashArray(array);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
|
@ -67,6 +202,7 @@ static bool ResizeHashArrayMulti (TRI_hasharray_t*);
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool TRI_InitHashArray (TRI_hasharray_t* array,
|
||||
size_t initialDocumentCount,
|
||||
size_t numFields,
|
||||
size_t elementSize,
|
||||
uint64_t (*hashKey) (TRI_hasharray_t*, void*),
|
||||
|
@ -76,6 +212,8 @@ bool TRI_InitHashArray (TRI_hasharray_t* array,
|
|||
bool (*isEqualKeyElement) (TRI_hasharray_t*, void*, void*),
|
||||
bool (*isEqualElementElement) (TRI_hasharray_t*, void*, void*)) {
|
||||
|
||||
size_t initialSize;
|
||||
|
||||
// ...........................................................................
|
||||
// Assign the callback functions
|
||||
// ...........................................................................
|
||||
|
@ -91,27 +229,26 @@ bool TRI_InitHashArray (TRI_hasharray_t* array,
|
|||
array->_elementSize = elementSize;
|
||||
array->_table = NULL;
|
||||
|
||||
// set initial allocation size to 256 elements
|
||||
array->_nrAlloc = 256;
|
||||
if (initialDocumentCount > 0) {
|
||||
// use initial document count provided as initial size
|
||||
initialSize = (size_t) (2.5 * initialDocumentCount);
|
||||
}
|
||||
else {
|
||||
initialSize = INITIAL_SIZE;
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (! AllocateTable(array, initialSize)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// ...........................................................................
|
||||
// allocate storage for the hash array
|
||||
// ...........................................................................
|
||||
|
||||
array->_table = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, array->_elementSize * array->_nrAlloc, true);
|
||||
if (array->_table == NULL) {
|
||||
array->_nrAlloc = 0;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
// Go through and 'zero' (clear) each item in the hash array
|
||||
// ...........................................................................
|
||||
|
||||
array->_nrUsed = 0;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds = 0;
|
||||
array->_nrAdds = 0;
|
||||
array->_nrRems = 0;
|
||||
|
@ -120,12 +257,11 @@ bool TRI_InitHashArray (TRI_hasharray_t* array,
|
|||
array->_nrProbesA = 0;
|
||||
array->_nrProbesD = 0;
|
||||
array->_nrProbesR = 0;
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief destroys an array, but does not free the pointer
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -135,7 +271,6 @@ void TRI_DestroyHashArray (TRI_hasharray_t* array) {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
// Go through each item in the array and remove any internal allocated memory
|
||||
// ...........................................................................
|
||||
|
@ -151,12 +286,10 @@ void TRI_DestroyHashArray (TRI_hasharray_t* array) {
|
|||
IndexStaticDestroyElement(array, p);
|
||||
}
|
||||
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, array->_table);
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, array->_data);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief destroys an array and frees the pointer
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -181,8 +314,6 @@ void TRI_FreeHashArray (TRI_hasharray_t* array) {
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief lookups an element given a key
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -198,13 +329,13 @@ void* TRI_LookupByKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
hash = IndexStaticHashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds++;
|
||||
|
||||
#endif
|
||||
|
||||
// ...........................................................................
|
||||
// search the table
|
||||
|
@ -213,7 +344,9 @@ void* TRI_LookupByKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
@ -224,8 +357,6 @@ void* TRI_LookupByKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
return array->_table + (i * array->_elementSize);
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief finds an element given a key, return NULL if not found
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -242,8 +373,6 @@ void* TRI_FindByKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief lookups an element given an element
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -264,7 +393,9 @@ void* TRI_LookupByElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -274,7 +405,9 @@ void* TRI_LookupByElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -284,8 +417,6 @@ void* TRI_LookupByElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
return (array->_table) + (i * array->_elementSize);
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief finds an element given an element, returns NULL if not found
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -302,8 +433,6 @@ void* TRI_FindByElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief adds an element to the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -326,7 +455,9 @@ bool TRI_InsertElementHashArray (TRI_hasharray_t* array, void* element, bool ove
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -336,7 +467,9 @@ bool TRI_InsertElementHashArray (TRI_hasharray_t* array, void* element, bool ove
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -356,8 +489,6 @@ bool TRI_InsertElementHashArray (TRI_hasharray_t* array, void* element, bool ove
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
// add a new element to the hash array (existing item is empty so no need to
|
||||
// destroy it)
|
||||
|
@ -383,8 +514,6 @@ bool TRI_InsertElementHashArray (TRI_hasharray_t* array, void* element, bool ove
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief adds an key/element to the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -407,7 +536,9 @@ bool TRI_InsertKeyHashArray (TRI_hasharray_t* array, void* key, void* element, b
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -417,7 +548,9 @@ bool TRI_InsertKeyHashArray (TRI_hasharray_t* array, void* key, void* element, b
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -460,8 +593,6 @@ bool TRI_InsertKeyHashArray (TRI_hasharray_t* array, void* key, void* element, b
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief removes an element from the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -485,7 +616,9 @@ bool TRI_RemoveElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -495,7 +628,9 @@ bool TRI_RemoveElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -542,7 +677,6 @@ bool TRI_RemoveElementHashArray (TRI_hasharray_t* array, void* element) {
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief removes an key/element to the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -562,13 +696,13 @@ bool TRI_RemoveKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
i = hash % array->_nrAlloc;
|
||||
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrRems++;
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -578,7 +712,9 @@ bool TRI_RemoveKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -634,36 +770,12 @@ bool TRI_RemoveKeyHashArray (TRI_hasharray_t* array, void* key) {
|
|||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- HASH ARRAY MULTI
|
||||
// -----------------------------------------------------------------------------
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- constructors and destructors
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup Collections
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- public functions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -702,7 +814,9 @@ TRI_vector_pointer_t TRI_LookupByKeyHashArrayMulti (TRI_hasharray_t* array, void
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -714,9 +828,11 @@ TRI_vector_pointer_t TRI_LookupByKeyHashArrayMulti (TRI_hasharray_t* array, void
|
|||
if (IndexStaticIsEqualKeyElementMulti(array, key, array->_table + i * array->_elementSize)) {
|
||||
TRI_PushBackVectorPointer(&result, array->_table + i * array->_elementSize);
|
||||
}
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
else {
|
||||
array->_nrProbesF++;
|
||||
}
|
||||
#endif
|
||||
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
}
|
||||
|
@ -729,7 +845,6 @@ TRI_vector_pointer_t TRI_LookupByKeyHashArrayMulti (TRI_hasharray_t* array, void
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief lookups an element given an element
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -759,7 +874,9 @@ TRI_vector_pointer_t TRI_LookupByElementHashArrayMulti (TRI_hasharray_t* array,
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -771,9 +888,11 @@ TRI_vector_pointer_t TRI_LookupByElementHashArrayMulti (TRI_hasharray_t* array,
|
|||
if (IndexStaticIsEqualElementElementMulti(array, element, array->_table + i * array->_elementSize)) {
|
||||
TRI_PushBackVectorPointer(&result, array->_table + i * array->_elementSize);
|
||||
}
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
else {
|
||||
array->_nrProbesF++;
|
||||
}
|
||||
#endif
|
||||
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
}
|
||||
|
@ -786,7 +905,6 @@ TRI_vector_pointer_t TRI_LookupByElementHashArrayMulti (TRI_hasharray_t* array,
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief adds an element to the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -810,7 +928,9 @@ bool TRI_InsertElementHashArrayMulti (TRI_hasharray_t* array, void* element, boo
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
|
@ -821,7 +941,9 @@ bool TRI_InsertElementHashArrayMulti (TRI_hasharray_t* array, void* element, boo
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualElementElementMulti(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -889,7 +1011,9 @@ bool TRI_InsertKeyHashArrayMulti (TRI_hasharray_t* array, void* key, void* eleme
|
|||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -898,7 +1022,9 @@ bool TRI_InsertKeyHashArrayMulti (TRI_hasharray_t* array, void* key, void* eleme
|
|||
|
||||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -951,7 +1077,10 @@ bool TRI_RemoveElementHashArrayMulti (TRI_hasharray_t* array, void* element) {
|
|||
// ...........................................................................
|
||||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -960,7 +1089,9 @@ bool TRI_RemoveElementHashArrayMulti (TRI_hasharray_t* array, void* element) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualElementElementMulti(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -1028,7 +1159,10 @@ bool TRI_RemoveKeyHashArrayMulti (TRI_hasharray_t* array, void* key) {
|
|||
// ...........................................................................
|
||||
// update statistics
|
||||
// ...........................................................................
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
|
||||
// ...........................................................................
|
||||
|
@ -1038,7 +1172,9 @@ bool TRI_RemoveKeyHashArrayMulti (TRI_hasharray_t* array, void* key) {
|
|||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize) &&
|
||||
! IndexStaticIsEqualKeyElementMulti(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
arrayElement = array->_table + (i * array->_elementSize);
|
||||
|
@ -1087,126 +1223,6 @@ bool TRI_RemoveKeyHashArrayMulti (TRI_hasharray_t* array, void* key) {
|
|||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- forward declared private functions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup HashArray
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief adds a new element
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static void AddNewElement (TRI_hasharray_t* array, void* element) {
|
||||
uint64_t hash;
|
||||
uint64_t i;
|
||||
|
||||
// ...........................................................................
|
||||
// compute the hash
|
||||
// ...........................................................................
|
||||
|
||||
hash = IndexStaticHashElement(array, element);
|
||||
|
||||
// ...........................................................................
|
||||
// search the table
|
||||
// ...........................................................................
|
||||
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
while (! IndexStaticIsEmptyElement(array, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
array->_nrProbesR++;
|
||||
}
|
||||
|
||||
// ...........................................................................
|
||||
// add a new element to the associative array
|
||||
// memcpy ok here since are simply moving array items internally
|
||||
// ...........................................................................
|
||||
|
||||
memcpy(array->_table + i * array->_elementSize, element, array->_elementSize);
|
||||
array->_nrUsed++;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief resizes the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool ResizeHashArray (TRI_hasharray_t* array) {
|
||||
char * oldTable;
|
||||
uint64_t oldAlloc;
|
||||
uint64_t j;
|
||||
|
||||
oldTable = array->_table;
|
||||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
array->_nrAlloc = 2 * array->_nrAlloc + 1;
|
||||
|
||||
array->_table = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, array->_nrAlloc * array->_elementSize, true);
|
||||
if (array->_table == NULL) {
|
||||
// allocation has failed. must restore original values
|
||||
array->_table = oldTable;
|
||||
array->_nrAlloc = oldAlloc;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
array->_nrResizes++;
|
||||
|
||||
for (j = 0; j < oldAlloc; j++) {
|
||||
if (! IndexStaticIsEmptyElement(array, oldTable + j * array->_elementSize)) {
|
||||
AddNewElement(array, oldTable + j * array->_elementSize);
|
||||
}
|
||||
}
|
||||
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, oldTable);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief resizes the array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool ResizeHashArrayMulti (TRI_hasharray_t* array) {
|
||||
char* oldTable;
|
||||
uint64_t oldAlloc;
|
||||
uint64_t j;
|
||||
|
||||
oldTable = array->_table;
|
||||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
array->_nrAlloc = 2 * array->_nrAlloc + 1;
|
||||
|
||||
array->_table = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, array->_nrAlloc * array->_elementSize, true);
|
||||
if (array->_table == NULL) {
|
||||
// allocation has failed, must restore original values
|
||||
array->_table = oldTable;
|
||||
array->_nrAlloc = oldAlloc;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
array->_nrResizes++;
|
||||
|
||||
for (j = 0; j < oldAlloc; j++) {
|
||||
if (! IndexStaticIsEmptyElement(array, oldTable + j * array->_elementSize)) {
|
||||
AddNewElement(array, oldTable + j * array->_elementSize);
|
||||
}
|
||||
}
|
||||
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, oldTable);
|
||||
return true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Local Variables:
|
||||
// mode: outline-minor
|
||||
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @\\}\\)"
|
||||
|
|
|
@ -63,9 +63,15 @@ typedef struct TRI_hasharray_s {
|
|||
uint64_t _elementSize;
|
||||
uint64_t _nrAlloc; // the size of the table
|
||||
uint64_t _nrUsed; // the number of used entries
|
||||
|
||||
// _table might or might not be the same pointer as _data
|
||||
// if you want to handle the hash table memory, always use the _data pointer!
|
||||
// if you want to work with the hash table elements, always use the _table pointer!
|
||||
|
||||
char* _table; // the table itself
|
||||
char* _data; // pointer to memory acquired for the hash table
|
||||
char* _table; // the table itself, aligned to a cache line boundary
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
uint64_t _nrFinds; // statistics: number of lookup calls
|
||||
uint64_t _nrAdds; // statistics: number of insert calls
|
||||
uint64_t _nrRems; // statistics: number of remove calls
|
||||
|
@ -75,6 +81,7 @@ typedef struct TRI_hasharray_s {
|
|||
uint64_t _nrProbesA; // statistics: number of misses while inserting
|
||||
uint64_t _nrProbesD; // statistics: number of misses while removing
|
||||
uint64_t _nrProbesR; // statistics: number of misses while adding
|
||||
#endif
|
||||
}
|
||||
TRI_hasharray_t;
|
||||
|
||||
|
@ -101,6 +108,7 @@ TRI_hasharray_t;
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool TRI_InitHashArray (TRI_hasharray_t*,
|
||||
size_t initialDocumentCount,
|
||||
size_t numFields,
|
||||
size_t elementSize,
|
||||
uint64_t (*hashKey) (TRI_hasharray_t*, void*),
|
||||
|
@ -187,43 +195,10 @@ bool TRI_RemoveKeyHashArray (TRI_hasharray_t*, void* key);
|
|||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- public types
|
||||
// --SECTION-- MULTI HASH ARRAY
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup HashArray
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- MULTI HASH ARRAY
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- constructors and destructors
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @addtogroup HashArray
|
||||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- public functions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -273,15 +248,6 @@ bool TRI_RemoveKeyHashArrayMulti (TRI_hasharray_t*, void* key);
|
|||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -49,8 +49,42 @@ static int HashIndex_queryMethodCall (void*, TRI_index_operat
|
|||
static TRI_index_iterator_t* HashIndex_resultMethodCall (void*, TRI_index_operator_t*, void*, bool (*filter) (TRI_index_iterator_t*));
|
||||
static int HashIndex_freeMethodCall (void*, void*);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief create an initialise a hash index
|
||||
/// this function is used by unique and non-unique indexes to set up the
|
||||
/// hash index base structure
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static HashIndex* CreateHashIndex (size_t numFields, size_t initialDocumentCount) {
|
||||
HashIndex* hashIndex = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(HashIndex), false);
|
||||
if (hashIndex == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
hashIndex->hashArray = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_hasharray_t), false);
|
||||
if (hashIndex->hashArray == NULL) {
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashIndex);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (! TRI_InitHashArray(hashIndex->hashArray,
|
||||
initialDocumentCount,
|
||||
numFields,
|
||||
sizeof(HashIndexElement),
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL)) {
|
||||
HashIndex_free(hashIndex);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return hashIndex;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief free a hash index results list
|
||||
|
@ -74,7 +108,7 @@ static void FreeResults (TRI_hash_index_elements_t* list) {
|
|||
/// @brief destroys the hash index by calling the hash array's own Free function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HashIndex_destroy(HashIndex* hashIndex) {
|
||||
void HashIndex_destroy (HashIndex* hashIndex) {
|
||||
if (hashIndex != NULL) {
|
||||
TRI_FreeHashArray(hashIndex->hashArray);
|
||||
}
|
||||
|
@ -85,7 +119,7 @@ void HashIndex_destroy(HashIndex* hashIndex) {
|
|||
/// @brief destroys the hash index and frees the memory associated with the index structure
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HashIndex_free(HashIndex* hashIndex) {
|
||||
void HashIndex_free (HashIndex* hashIndex) {
|
||||
if (hashIndex != NULL) {
|
||||
HashIndex_destroy(hashIndex);
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashIndex);
|
||||
|
@ -96,7 +130,7 @@ void HashIndex_free(HashIndex* hashIndex) {
|
|||
/// @brief free a result set allocated by HashIndex_find
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HashIndex_freeResult(TRI_hash_index_elements_t* const list) {
|
||||
void HashIndex_freeResult (TRI_hash_index_elements_t* const list) {
|
||||
FreeResults(list);
|
||||
}
|
||||
|
||||
|
@ -108,28 +142,14 @@ void HashIndex_freeResult(TRI_hash_index_elements_t* const list) {
|
|||
/// @brief Creates a new hash array used for storage of elements in the hash index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
HashIndex* HashIndex_new(size_t numFields) {
|
||||
HashIndex* HashIndex_new (size_t numFields, size_t initialDocumentCount) {
|
||||
HashIndex* hashIndex;
|
||||
|
||||
hashIndex = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(HashIndex), false);
|
||||
if (hashIndex == NULL) {
|
||||
return NULL;
|
||||
hashIndex = CreateHashIndex(numFields, initialDocumentCount);
|
||||
if (hashIndex != NULL) {
|
||||
hashIndex->unique = true;
|
||||
}
|
||||
|
||||
hashIndex->unique = true;
|
||||
hashIndex->hashArray = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_hasharray_t), false);
|
||||
if (hashIndex->hashArray == NULL) {
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashIndex);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (! TRI_InitHashArray(hashIndex->hashArray, numFields, sizeof(HashIndexElement), NULL, NULL, NULL, NULL, NULL, NULL) ) {
|
||||
HashIndex_free(hashIndex);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return hashIndex;
|
||||
}
|
||||
|
||||
|
@ -137,7 +157,7 @@ HashIndex* HashIndex_new(size_t numFields) {
|
|||
/// @brief Assigns a static function call to a function pointer used by Query Engine
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int HashIndex_assignMethod(void* methodHandle, TRI_index_method_assignment_type_e methodType) {
|
||||
int HashIndex_assignMethod (void* methodHandle, TRI_index_method_assignment_type_e methodType) {
|
||||
switch (methodType) {
|
||||
|
||||
case TRI_INDEX_METHOD_ASSIGNMENT_FREE : {
|
||||
|
@ -175,7 +195,7 @@ int HashIndex_assignMethod(void* methodHandle, TRI_index_method_assignment_type_
|
|||
/// @brief Adds (inserts) a data element into the hash array part of the hash index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int HashIndex_add(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int HashIndex_add (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
bool result;
|
||||
|
||||
// .............................................................................
|
||||
|
@ -192,7 +212,7 @@ int HashIndex_add(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
/// @brief Locates an entry within the hash array part of the hash index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_hash_index_elements_t* HashIndex_find(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
TRI_hash_index_elements_t* HashIndex_find (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
HashIndexElement* result;
|
||||
TRI_hash_index_elements_t* results;
|
||||
|
||||
|
@ -237,7 +257,7 @@ TRI_hash_index_elements_t* HashIndex_find(HashIndex* hashIndex, HashIndexElement
|
|||
/// @brief An alias for HashIndex_add
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int HashIndex_insert(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int HashIndex_insert (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
return HashIndex_add(hashIndex,element);
|
||||
}
|
||||
|
||||
|
@ -246,7 +266,7 @@ int HashIndex_insert(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
/// @brief Removes an entry from the hash array part of the hash index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int HashIndex_remove(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int HashIndex_remove (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
bool result;
|
||||
|
||||
result = TRI_RemoveElementHashArray(hashIndex->hashArray, element);
|
||||
|
@ -259,8 +279,8 @@ int HashIndex_remove(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
/// @brief then adds the afterElement
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int HashIndex_update(HashIndex* hashIndex, const HashIndexElement* beforeElement,
|
||||
const HashIndexElement* afterElement) {
|
||||
int HashIndex_update (HashIndex* hashIndex, const HashIndexElement* beforeElement,
|
||||
const HashIndexElement* afterElement) {
|
||||
// ...........................................................................
|
||||
// This function is not currently implemented and must not be called.
|
||||
// It's purpose would be to remove the existing beforeElement and replace it
|
||||
|
@ -289,7 +309,7 @@ int HashIndex_update(HashIndex* hashIndex, const HashIndexElement* beforeElement
|
|||
/// @brief destroys the hash index by calling the hash array's own Free function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void MultiHashIndex_destroy(HashIndex* hashIndex) {
|
||||
void MultiHashIndex_destroy (HashIndex* hashIndex) {
|
||||
HashIndex_destroy(hashIndex);
|
||||
}
|
||||
|
||||
|
@ -298,7 +318,7 @@ void MultiHashIndex_destroy(HashIndex* hashIndex) {
|
|||
/// @brief destroys the hash index and frees the memory associated with the index structure
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void MultiHashIndex_free(HashIndex* hashIndex) {
|
||||
void MultiHashIndex_free (HashIndex* hashIndex) {
|
||||
HashIndex_free(hashIndex);
|
||||
}
|
||||
|
||||
|
@ -306,7 +326,7 @@ void MultiHashIndex_free(HashIndex* hashIndex) {
|
|||
/// @brief free a result set allocated by MultiHashIndex_find
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void MultiHashIndex_freeResult(TRI_hash_index_elements_t* const list) {
|
||||
void MultiHashIndex_freeResult (TRI_hash_index_elements_t* const list) {
|
||||
FreeResults(list);
|
||||
}
|
||||
|
||||
|
@ -319,32 +339,17 @@ void MultiHashIndex_freeResult(TRI_hash_index_elements_t* const list) {
|
|||
/// @brief Creates a new multi (non-unique) hash index
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
HashIndex* MultiHashIndex_new(size_t numFields) {
|
||||
HashIndex* MultiHashIndex_new (size_t numFields, size_t initialDocumentCount) {
|
||||
HashIndex* hashIndex;
|
||||
|
||||
hashIndex = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(HashIndex), false);
|
||||
if (hashIndex == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
hashIndex->unique = false;
|
||||
hashIndex->hashArray = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE, sizeof(TRI_hasharray_t), false);
|
||||
if (hashIndex->hashArray == NULL) {
|
||||
TRI_Free(TRI_UNKNOWN_MEM_ZONE, hashIndex);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (! TRI_InitHashArray(hashIndex->hashArray, numFields, sizeof(HashIndexElement), NULL, NULL, NULL, NULL, NULL, NULL) ) {
|
||||
HashIndex_free(hashIndex);
|
||||
|
||||
return NULL;
|
||||
hashIndex = CreateHashIndex(numFields, initialDocumentCount);
|
||||
if (hashIndex != NULL) {
|
||||
hashIndex->unique = false;
|
||||
}
|
||||
|
||||
return hashIndex;
|
||||
}
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// public functions : INSERT, REMOVE & LOOKUP
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -354,7 +359,7 @@ HashIndex* MultiHashIndex_new(size_t numFields) {
|
|||
/// @brief Adds (inserts) a data element into the hash array (hash index)
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int MultiHashIndex_add(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int MultiHashIndex_add (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
bool result;
|
||||
result = TRI_InsertElementHashArrayMulti(hashIndex->hashArray, element, false);
|
||||
if (result) {
|
||||
|
@ -367,7 +372,7 @@ int MultiHashIndex_add(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
// Locates an entry within the associative array
|
||||
// ...............................................................................
|
||||
|
||||
TRI_hash_index_elements_t* MultiHashIndex_find(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
TRI_hash_index_elements_t* MultiHashIndex_find (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
TRI_vector_pointer_t result;
|
||||
TRI_hash_index_elements_t* results;
|
||||
size_t j;
|
||||
|
@ -411,7 +416,7 @@ TRI_hash_index_elements_t* MultiHashIndex_find(HashIndex* hashIndex, HashIndexEl
|
|||
// An alias for addIndex
|
||||
// ...............................................................................
|
||||
|
||||
int MultiHashIndex_insert(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int MultiHashIndex_insert (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
return MultiHashIndex_add(hashIndex,element);
|
||||
}
|
||||
|
||||
|
@ -420,7 +425,7 @@ int MultiHashIndex_insert(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
// Removes an entry from the associative array
|
||||
// ...............................................................................
|
||||
|
||||
int MultiHashIndex_remove(HashIndex* hashIndex, HashIndexElement* element) {
|
||||
int MultiHashIndex_remove (HashIndex* hashIndex, HashIndexElement* element) {
|
||||
bool result;
|
||||
result = TRI_RemoveElementHashArrayMulti(hashIndex->hashArray, element);
|
||||
return result ? TRI_ERROR_NO_ERROR : TRI_ERROR_INTERNAL;
|
||||
|
@ -432,7 +437,7 @@ int MultiHashIndex_remove(HashIndex* hashIndex, HashIndexElement* element) {
|
|||
// then adds the afterElement
|
||||
// ...............................................................................
|
||||
|
||||
int MultiHashIndex_update(HashIndex* hashIndex, HashIndexElement* beforeElement,
|
||||
int MultiHashIndex_update (HashIndex* hashIndex, HashIndexElement* beforeElement,
|
||||
HashIndexElement* afterElement) {
|
||||
assert(false);
|
||||
return TRI_ERROR_INTERNAL;
|
||||
|
@ -444,8 +449,8 @@ int MultiHashIndex_update(HashIndex* hashIndex, HashIndexElement* beforeElement,
|
|||
// Implementation of forward declared query engine callback functions
|
||||
////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static int HashIndex_queryMethodCall(void* theIndex, TRI_index_operator_t* indexOperator,
|
||||
TRI_index_challenge_t* challenge, void* data) {
|
||||
static int HashIndex_queryMethodCall (void* theIndex, TRI_index_operator_t* indexOperator,
|
||||
TRI_index_challenge_t* challenge, void* data) {
|
||||
HashIndex* hIndex = (HashIndex*)(theIndex);
|
||||
if (hIndex == NULL || indexOperator == NULL) {
|
||||
return TRI_ERROR_INTERNAL;
|
||||
|
@ -454,8 +459,8 @@ static int HashIndex_queryMethodCall(void* theIndex, TRI_index_operator_t* index
|
|||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
static TRI_index_iterator_t* HashIndex_resultMethodCall(void* theIndex, TRI_index_operator_t* indexOperator,
|
||||
void* data, bool (*filter) (TRI_index_iterator_t*)) {
|
||||
static TRI_index_iterator_t* HashIndex_resultMethodCall (void* theIndex, TRI_index_operator_t* indexOperator,
|
||||
void* data, bool (*filter) (TRI_index_iterator_t*)) {
|
||||
HashIndex* hIndex = (HashIndex*)(theIndex);
|
||||
if (hIndex == NULL || indexOperator == NULL) {
|
||||
return NULL;
|
||||
|
|
|
@ -89,7 +89,7 @@ int HashIndex_assignMethod (void*, TRI_index_method_assignment_type_e);
|
|||
|
||||
void HashIndex_destroy (HashIndex*);
|
||||
|
||||
HashIndex* HashIndex_new (size_t);
|
||||
HashIndex* HashIndex_new (size_t, size_t);
|
||||
|
||||
void HashIndex_free (HashIndex*);
|
||||
|
||||
|
@ -126,7 +126,7 @@ void MultiHashIndex_free (HashIndex*);
|
|||
|
||||
void MultiHashIndex_freeResult(TRI_hash_index_elements_t* const);
|
||||
|
||||
HashIndex* MultiHashIndex_new (size_t);
|
||||
HashIndex* MultiHashIndex_new (size_t, size_t);
|
||||
|
||||
int MultiHashIndex_add (HashIndex*, HashIndexElement*);
|
||||
|
||||
|
|
|
@ -3944,7 +3944,11 @@ static v8::Handle<v8::Value> JS_RemoveVocbaseCol (v8::Arguments const& argv) {
|
|||
/// @FUN{@FA{collection}.rename(@FA{new-name})}
|
||||
///
|
||||
/// Renames a collection using the @FA{new-name}. The @FA{new-name} must not
|
||||
/// already be used for a different collection. If it is an error is thrown.
|
||||
/// already be used for a different collection. @FA{new-name} must also be a
|
||||
/// valid collection name. For more information on valid collection names please refer
|
||||
/// to @ref NamingConventions.
|
||||
///
|
||||
/// If renaming fails for any reason, an error is thrown.
|
||||
///
|
||||
/// @EXAMPLES
|
||||
///
|
||||
|
@ -4610,9 +4614,10 @@ static v8::Handle<v8::Value> JS_CompletionsVocBase (v8::Arguments const& argv) {
|
|||
///
|
||||
/// @FUN{db._create(@FA{collection-name})}
|
||||
///
|
||||
/// Creates a new collection named @FA{collection-name}. If the
|
||||
/// collection name already exists, then an error is thrown. The default value
|
||||
/// for @LIT{waitForSync} is @LIT{false}.
|
||||
/// Creates a new collection named @FA{collection-name}.
|
||||
/// If the collection name already exists or if the name format is invalid, an
|
||||
/// error is thrown. For more information on valid collection names please refer
|
||||
/// to @ref NamingConventions.
|
||||
///
|
||||
/// The type of the collection is automatically determined by the object that
|
||||
/// @FA{_create} is invoked with:
|
||||
|
|
|
@ -522,7 +522,7 @@ static TRI_doc_mptr_t RollbackUpdate (TRI_document_collection_t* sim,
|
|||
else if (originalMarker->_type == TRI_DOC_MARKER_EDGE) {
|
||||
TRI_doc_edge_marker_t edgeUpdate;
|
||||
|
||||
memcpy(&edgeUpdate, originalMarker, sizeof(TRI_doc_document_marker_t));
|
||||
memcpy(&edgeUpdate, originalMarker, sizeof(TRI_doc_edge_marker_t));
|
||||
marker = &edgeUpdate.base;
|
||||
markerLength = sizeof(TRI_doc_edge_marker_t);
|
||||
data = ((char*) originalMarker) + sizeof(TRI_doc_edge_marker_t);
|
||||
|
@ -3695,8 +3695,13 @@ static TRI_index_t* CreateHashIndexDocumentCollection (TRI_document_collection_t
|
|||
return idx;
|
||||
}
|
||||
|
||||
// create the hash index
|
||||
idx = TRI_CreateHashIndex(&collection->base, &fields, &paths, unique);
|
||||
// create the hash index. we'll provide it with the current number of documents
|
||||
// in the collection so the index can do a sensible memory preallocation
|
||||
idx = TRI_CreateHashIndex(&collection->base,
|
||||
&fields,
|
||||
&paths,
|
||||
unique,
|
||||
collection->base._primaryIndex._nrUsed);
|
||||
|
||||
// release memory allocated to vector
|
||||
TRI_DestroyVector(&paths);
|
||||
|
|
|
@ -1759,7 +1759,8 @@ static int UpdateHashIndex (TRI_index_t* idx,
|
|||
TRI_index_t* TRI_CreateHashIndex (struct TRI_primary_collection_s* collection,
|
||||
TRI_vector_pointer_t* fields,
|
||||
TRI_vector_t* paths,
|
||||
bool unique) {
|
||||
bool unique,
|
||||
size_t initialDocumentCount) {
|
||||
TRI_hash_index_t* hashIndex;
|
||||
int result;
|
||||
size_t j;
|
||||
|
@ -1814,10 +1815,12 @@ TRI_index_t* TRI_CreateHashIndex (struct TRI_primary_collection_s* collection,
|
|||
}
|
||||
|
||||
if (unique) {
|
||||
hashIndex->_hashIndex = HashIndex_new(hashIndex->_paths._length);
|
||||
// create a unique index preallocated for the current number of documents
|
||||
hashIndex->_hashIndex = HashIndex_new(hashIndex->_paths._length, initialDocumentCount);
|
||||
}
|
||||
else {
|
||||
hashIndex->_hashIndex = MultiHashIndex_new(hashIndex->_paths._length);
|
||||
// create a non-unique index preallocated for the current number of documents
|
||||
hashIndex->_hashIndex = MultiHashIndex_new(hashIndex->_paths._length, initialDocumentCount);
|
||||
}
|
||||
|
||||
if (hashIndex->_hashIndex == NULL) { // oops out of memory?
|
||||
|
|
|
@ -476,7 +476,8 @@ GeoCoordinates* TRI_NearestGeoIndex (TRI_index_t*,
|
|||
TRI_index_t* TRI_CreateHashIndex (struct TRI_primary_collection_s*,
|
||||
TRI_vector_pointer_t* fields,
|
||||
TRI_vector_t* paths,
|
||||
bool unique);
|
||||
bool unique,
|
||||
size_t initialDocumentCount);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief frees the memory allocated, but does not free the pointer
|
||||
|
|
|
@ -674,21 +674,12 @@ static TRI_vocbase_col_t* BearCollectionVocBase (TRI_vocbase_t* vocbase,
|
|||
union { void const* v; TRI_vocbase_col_t* c; } found;
|
||||
TRI_vocbase_col_t* collection;
|
||||
TRI_col_parameter_t parameter;
|
||||
char wrong;
|
||||
|
||||
if (*name == '\0') {
|
||||
TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// check that the name does not contain any strange characters
|
||||
parameter._isSystem = false;
|
||||
wrong = TRI_IsAllowedCollectionName(¶meter, name);
|
||||
|
||||
if (wrong != 0) {
|
||||
LOG_DEBUG("found illegal character in name: %c", wrong);
|
||||
|
||||
if (! TRI_IsAllowedCollectionName(¶meter, name)) {
|
||||
TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -1003,12 +994,13 @@ size_t PageSize;
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief checks if a collection name is allowed
|
||||
///
|
||||
/// Returns 0 for success or the offending character.
|
||||
/// Returns true if the name is allowed and false otherwise
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
char TRI_IsAllowedCollectionName (TRI_col_parameter_t* paramater, char const* name) {
|
||||
bool TRI_IsAllowedCollectionName (TRI_col_parameter_t* paramater, char const* name) {
|
||||
bool ok;
|
||||
char const* ptr;
|
||||
size_t length = 0;
|
||||
|
||||
for (ptr = name; *ptr; ++ptr) {
|
||||
if (name < ptr || paramater->_isSystem) {
|
||||
|
@ -1019,11 +1011,18 @@ char TRI_IsAllowedCollectionName (TRI_col_parameter_t* paramater, char const* na
|
|||
}
|
||||
|
||||
if (! ok) {
|
||||
return *ptr;
|
||||
return false;
|
||||
}
|
||||
|
||||
++length;
|
||||
}
|
||||
|
||||
return 0;
|
||||
if (length == 0 || length > 64) {
|
||||
// invalid name length
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1411,24 +1410,15 @@ TRI_vocbase_col_t* TRI_CreateCollectionVocBase (TRI_vocbase_t* vocbase,
|
|||
TRI_document_collection_t* sim;
|
||||
TRI_col_type_e type;
|
||||
char const* name;
|
||||
char wrong;
|
||||
void const* found;
|
||||
|
||||
assert(parameter);
|
||||
name = parameter->_name;
|
||||
|
||||
if (*name == '\0') {
|
||||
TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// check that the name does not contain any strange characters
|
||||
wrong = TRI_IsAllowedCollectionName(parameter, name);
|
||||
|
||||
if (wrong != 0) {
|
||||
LOG_DEBUG("found illegal character in name: %c", wrong);
|
||||
|
||||
if (! TRI_IsAllowedCollectionName(parameter, name)) {
|
||||
TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -1695,7 +1685,6 @@ int TRI_RenameCollectionVocBase (TRI_vocbase_t* vocbase, TRI_vocbase_col_t* coll
|
|||
TRI_col_info_t info;
|
||||
TRI_col_parameter_t parameter;
|
||||
void const* found;
|
||||
char wrong;
|
||||
char const* oldName;
|
||||
int res;
|
||||
|
||||
|
@ -1706,16 +1695,8 @@ int TRI_RenameCollectionVocBase (TRI_vocbase_t* vocbase, TRI_vocbase_col_t* coll
|
|||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
// check name conventions
|
||||
if (*newName == '\0') {
|
||||
return TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
}
|
||||
|
||||
parameter._isSystem = (*oldName == '_');
|
||||
wrong = TRI_IsAllowedCollectionName(¶meter, newName);
|
||||
|
||||
if (wrong != 0) {
|
||||
LOG_DEBUG("found illegal character in name: %c", wrong);
|
||||
if (! TRI_IsAllowedCollectionName(¶meter, newName)) {
|
||||
return TRI_set_errno(TRI_ERROR_ARANGO_ILLEGAL_NAME);
|
||||
}
|
||||
|
||||
|
|
|
@ -445,7 +445,7 @@ TRI_vocbase_col_t;
|
|||
/// @brief checks if a collection is allowed
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
char TRI_IsAllowedCollectionName (struct TRI_col_parameter_s*, char const*);
|
||||
bool TRI_IsAllowedCollectionName (struct TRI_col_parameter_s*, char const*);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief create a new tick
|
||||
|
|
|
@ -2619,13 +2619,20 @@ function ArangoDatabase (connection) {
|
|||
'Print function: ' + "\n" +
|
||||
' > print(x) std. print function ' + "\n" +
|
||||
' > print_plain(x) print without pretty printing' + "\n" +
|
||||
' and without colors ';
|
||||
' and without colors ' + "\n" +
|
||||
' > clear() clear screen ' ;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief create the global db object and load the collections
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
try {
|
||||
clear = function () {
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
print('\n');
|
||||
}
|
||||
};
|
||||
|
||||
if (typeof arango !== 'undefined') {
|
||||
// default database object
|
||||
db = internal.db = new ArangoDatabase(arango);
|
||||
|
|
|
@ -58,7 +58,7 @@
|
|||
"ERROR_ARANGO_COLLECTION_NOT_FOUND" : { "code" : 1203, "message" : "collection not found" },
|
||||
"ERROR_ARANGO_COLLECTION_PARAMETER_MISSING" : { "code" : 1204, "message" : "parameter 'collection' not found" },
|
||||
"ERROR_ARANGO_DOCUMENT_HANDLE_BAD" : { "code" : 1205, "message" : "illegal document handle" },
|
||||
"ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL" : { "code" : 1206, "message" : "maixaml size of journal too small" },
|
||||
"ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL" : { "code" : 1206, "message" : "maixmal size of journal too small" },
|
||||
"ERROR_ARANGO_DUPLICATE_NAME" : { "code" : 1207, "message" : "duplicate name" },
|
||||
"ERROR_ARANGO_ILLEGAL_NAME" : { "code" : 1208, "message" : "illegal name" },
|
||||
"ERROR_ARANGO_NO_INDEX" : { "code" : 1209, "message" : "no suitable index known" },
|
||||
|
|
|
@ -41,6 +41,34 @@ function CollectionSuiteErrorHandling () {
|
|||
|
||||
return {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief bad name (too long)
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testErrorHandlingBadNameTooLong : function () {
|
||||
try {
|
||||
// one char too long
|
||||
internal.db._create("a1234567890123456789012345678901234567890123456789012345678901234");
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(ERRORS.ERROR_ARANGO_ILLEGAL_NAME.code, err.errorNum);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief bad name (system name)
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testErrorHandlingBadNameSystem : function () {
|
||||
try {
|
||||
// one char too long
|
||||
internal.db._create("1234");
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(ERRORS.ERROR_ARANGO_ILLEGAL_NAME.code, err.errorNum);
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief bad name (underscore)
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -130,6 +158,20 @@ function CollectionSuite () {
|
|||
|
||||
return {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief long name
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testCreateLongName : function () {
|
||||
var cn = "a123456789012345678901234567890123456789012345678901234567890123";
|
||||
|
||||
internal.db._drop(cn);
|
||||
var c1 = internal.db._create(cn);
|
||||
assertEqual(cn, c1.name());
|
||||
|
||||
internal.db._drop(cn);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief read by name
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -680,6 +722,29 @@ function CollectionSuite () {
|
|||
assertEqual(null, c2);
|
||||
|
||||
internal.db._drop(nn);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief rename a collection to an already existing collection
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRenameExisting : function () {
|
||||
var cn1 = "example";
|
||||
var cn2 = "example2";
|
||||
|
||||
internal.db._drop(cn1);
|
||||
internal.db._drop(cn2);
|
||||
var c1 = internal.db._create(cn1);
|
||||
var c2 = internal.db._create(cn2);
|
||||
|
||||
try {
|
||||
c1.rename(cn2);
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(ERRORS.ERROR_ARANGO_DUPLICATE_NAME.code, err.errorNum);
|
||||
}
|
||||
internal.db._drop(cn1);
|
||||
internal.db._drop(cn2);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -131,11 +131,7 @@ function AHUACATL_NORMALIZE (value) {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function AHUACATL_CLONE (obj) {
|
||||
if (obj == null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (typeof(obj) != "object") {
|
||||
if (obj == null || typeof(obj) != "object") {
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
@ -2222,7 +2218,7 @@ function AHUACATL_MERGE () {
|
|||
}
|
||||
|
||||
for (var k in element) {
|
||||
if (!element.hasOwnProperty(k)) {
|
||||
if (! element.hasOwnProperty(k)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -2233,6 +2229,43 @@ function AHUACATL_MERGE () {
|
|||
return result;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief merge all arguments recursively
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function AHUACATL_MERGE_RECURSIVE () {
|
||||
var result = { };
|
||||
|
||||
for (var i in arguments) {
|
||||
var element = arguments[i];
|
||||
|
||||
if (AHUACATL_TYPEWEIGHT(element) !== AHUACATL_TYPEWEIGHT_DOCUMENT) {
|
||||
AHUACATL_THROW(internal.errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH, "MERGE_RECURSIVE");
|
||||
}
|
||||
|
||||
recurse = function (old, element) {
|
||||
var r = AHUACATL_CLONE(old);
|
||||
|
||||
for (var k in element) {
|
||||
if (! element.hasOwnProperty(k)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (r.hasOwnProperty(k) && AHUACATL_TYPEWEIGHT(element[k]) === AHUACATL_TYPEWEIGHT_DOCUMENT) {
|
||||
r[k] = recurse(r[k], element[k]);
|
||||
}
|
||||
else {
|
||||
r[k] = element[k];
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
result = recurse(result, element);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief passthru the argument
|
||||
|
|
|
@ -873,6 +873,148 @@ function ahuacatlFunctionsTestSuite () {
|
|||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE({ }, { }, [ ])"); } ));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive1 : function () {
|
||||
var doc1 = "{ \"black\" : { \"enabled\" : true, \"visible\": false }, \"white\" : { \"enabled\" : true}, \"list\" : [ 1, 2, 3, 4, 5 ] }";
|
||||
var doc2 = "{ \"black\" : { \"enabled\" : false }, \"list\": [ 6, 7, 8, 9, 0 ] }";
|
||||
|
||||
var expected = [ { "black" : { "enabled" : false, "visible" : false }, "list" : [ 6, 7, 8, 9, 0 ], "white" : { "enabled" : true } } ];
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc2 + ")", false);
|
||||
assertEqual(expected, actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive2 : function () {
|
||||
var doc1 = "{ \"black\" : { \"enabled\" : true, \"visible\": false }, \"white\" : { \"enabled\" : true}, \"list\" : [ 1, 2, 3, 4, 5 ] }";
|
||||
var doc2 = "{ \"black\" : { \"enabled\" : false }, \"list\": [ 6, 7, 8, 9, 0 ] }";
|
||||
|
||||
var expected = [ { "black" : { "enabled" : true, "visible" : false }, "list" : [ 1, 2, 3, 4, 5 ], "white" : { "enabled" : true } } ];
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc2 + ", " + doc1 + ")", false);
|
||||
assertEqual(expected, actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive3 : function () {
|
||||
var doc1 = "{ \"a\" : 1, \"b\" : 2, \"c\" : 3 }";
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc1 + ")", false);
|
||||
assertEqual([ { "a" : 1, "b" : 2, "c" : 3 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc1 + ", " + doc1 + ")", false);
|
||||
assertEqual([ { "a" : 1, "b" : 2, "c" : 3 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc1 + ", " + doc1 + ", " + doc1 + ")", false);
|
||||
assertEqual([ { "a" : 1, "b" : 2, "c" : 3 } ], actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive4 : function () {
|
||||
var doc1 = "{ \"a\" : 1, \"b\" : 2, \"c\" : 3 }";
|
||||
var doc2 = "{ \"a\" : 2, \"b\" : 3, \"c\" : 4 }";
|
||||
var doc3 = "{ \"a\" : 3, \"b\" : 4, \"c\" : 5 }";
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc2 + ", " + doc3 + ")", false);
|
||||
assertEqual([ { "a" : 3, "b" : 4, "c" : 5 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc3 + ", " + doc2 + ")", false);
|
||||
assertEqual([ { "a" : 2, "b" : 3, "c" : 4 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc2 + ", " + doc3 + ", " + doc1 + ")", false);
|
||||
assertEqual([ { "a" : 1, "b" : 2, "c" : 3 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc3 + ", " + doc1 + ", " + doc2 + ")", false);
|
||||
assertEqual([ { "a" : 2, "b" : 3, "c" : 4 } ], actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive5 : function () {
|
||||
var doc1 = "{ \"a\" : 1, \"b\" : 2, \"c\" : 3 }";
|
||||
var doc2 = "{ \"1\" : 7, \"b\" : 8, \"y\" : 9 }";
|
||||
var doc3 = "{ \"x\" : 4, \"y\" : 5, \"z\" : 6 }";
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc2 + ", " + doc3 + ")", false);
|
||||
assertEqual([ { "1" : 7, "a" : 1, "b" : 8, "c" : 3, "x" : 4, "y": 5, "z" : 6 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc3 + ", " + doc2 + ")", false);
|
||||
assertEqual([ { "1" : 7, "a" : 1, "b" : 8, "c" : 3, "x" : 4, "y": 9, "z" : 6 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc2 + ", " + doc3 + ", " + doc1 + ")", false);
|
||||
assertEqual([ { "1" : 7, "a" : 1, "b" : 2, "c" : 3, "x" : 4, "y": 5, "z" : 6 } ], actual);
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc3 + ", " + doc1 + ", " + doc2 + ")", false);
|
||||
assertEqual([ { "1" : 7, "a" : 1, "b" : 8, "c" : 3, "x" : 4, "y": 9, "z" : 6 } ], actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursive6 : function () {
|
||||
var doc1 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"DE\" : { \"city\" : \"Cologne\" } } } } }";
|
||||
var doc2 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"DE\" : { \"city\" : \"Frankfurt\" } } } } }";
|
||||
var doc3 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"DE\" : { \"city\" : \"Munich\" } } } } }";
|
||||
var doc4 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"UK\" : { \"city\" : \"Manchester\" } } } } }";
|
||||
var doc5 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"UK\" : { \"city\" : \"London\" } } } } }";
|
||||
var doc6 = "{ \"continent\" : { \"Europe\" : { \"country\" : { \"FR\" : { \"city\" : \"Paris\" } } } } }";
|
||||
var doc7 = "{ \"continent\" : { \"Asia\" : { \"country\" : { \"CN\" : { \"city\" : \"Beijing\" } } } } }";
|
||||
var doc8 = "{ \"continent\" : { \"Asia\" : { \"country\" : { \"CN\" : { \"city\" : \"Shanghai\" } } } } }";
|
||||
var doc9 = "{ \"continent\" : { \"Asia\" : { \"country\" : { \"JP\" : { \"city\" : \"Tokyo\" } } } } }";
|
||||
var doc10 = "{ \"continent\" : { \"Australia\" : { \"country\" : { \"AU\" : { \"city\" : \"Sydney\" } } } } }";
|
||||
var doc11 ="{ \"continent\" : { \"Australia\" : { \"country\" : { \"AU\" : { \"city\" : \"Melbourne\" } } } } }";
|
||||
var doc12 ="{ \"continent\" : { \"Africa\" : { \"country\" : { \"EG\" : { \"city\" : \"Cairo\" } } } } }";
|
||||
|
||||
var actual = getQueryResults("RETURN MERGE_RECURSIVE(" + doc1 + ", " + doc2 + ", " + doc3 + ", " + doc4 + ", " + doc5 + ", " + doc6 + ", " + doc7 + ", " + doc8 + ", " + doc9 + ", " + doc10 + ", " + doc11 + ", " + doc12 + ")", false);
|
||||
|
||||
assertEqual([ { "continent" : {
|
||||
"Europe" : { "country" : { "DE" : { "city" : "Munich" }, "UK" : { "city" : "London" }, "FR" : { "city" : "Paris" } } },
|
||||
"Asia" : { "country" : { "CN" : { "city" : "Shanghai" }, "JP" : { "city" : "Tokyo" } } },
|
||||
"Australia" : { "country" : { "AU" : { "city" : "Melbourne" } } },
|
||||
"Africa" : { "country" : { "EG" : { "city" : "Cairo" } } }
|
||||
} } ], actual);
|
||||
},
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test merge_recursive function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testMergeRecursiveInvalid : function () {
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE()"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, null)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, true)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, 3)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, \"yes\")"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, [ ])"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE(null, { })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE(true, { })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE(3, { })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE(\"yes\", { })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE([ ], { })"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, { }, null)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, { }, true)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, { }, 3)"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, { }, \"yes\")"); } ));
|
||||
assertEqual(errors.ERROR_QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH.code, getErrorCode(function() { AHUACATL_RUN("RETURN MERGE_RECURSIVE({ }, { }, [ ])"); } ));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test union function
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -59,7 +59,9 @@ static void AddNewElement (TRI_associative_array_t* array, void* element) {
|
|||
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesR++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// add a new element to the associative array
|
||||
|
@ -80,7 +82,9 @@ static void ResizeAssociativeArray (TRI_associative_array_t* array) {
|
|||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
array->_nrAlloc = 2 * array->_nrAlloc + 1;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrResizes++;
|
||||
#endif
|
||||
|
||||
array->_table = TRI_Allocate(array->_memoryZone, array->_nrAlloc * array->_elementSize, true);
|
||||
|
||||
|
@ -162,6 +166,8 @@ void TRI_InitAssociativeArray (TRI_associative_array_t* array,
|
|||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds = 0;
|
||||
array->_nrAdds = 0;
|
||||
array->_nrRems = 0;
|
||||
|
@ -170,6 +176,7 @@ void TRI_InitAssociativeArray (TRI_associative_array_t* array,
|
|||
array->_nrProbesA = 0;
|
||||
array->_nrProbesD = 0;
|
||||
array->_nrProbesR = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -214,14 +221,18 @@ void* TRI_LookupByKeyAssociativeArray (TRI_associative_array_t* array, void* key
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// return whatever we found
|
||||
|
@ -256,14 +267,18 @@ void* TRI_LookupByElementAssociativeArray (TRI_associative_array_t* array, void*
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// return whatever we found
|
||||
|
@ -304,14 +319,18 @@ bool TRI_InsertElementAssociativeArray (TRI_associative_array_t* array, void* el
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we found an element, return
|
||||
|
@ -353,14 +372,18 @@ bool TRI_InsertKeyAssociativeArray (TRI_associative_array_t* array, void* key, v
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we found an element, return
|
||||
|
@ -396,14 +419,18 @@ bool TRI_RemoveElementAssociativeArray (TRI_associative_array_t* array, void* el
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualElementElement(array, element, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return false
|
||||
|
@ -454,14 +481,18 @@ bool TRI_RemoveKeyAssociativeArray (TRI_associative_array_t* array, void* key, v
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (! array->isEmptyElement(array, array->_table + i * array->_elementSize)
|
||||
&& ! array->isEqualKeyElement(array, key, array->_table + i * array->_elementSize)) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return false
|
||||
|
@ -541,7 +572,9 @@ static void AddNewElementPointer (TRI_associative_pointer_t* array, void* elemen
|
|||
|
||||
while (array->_table[i] != NULL) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesR++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// add a new element to the associative array
|
||||
|
@ -562,7 +595,9 @@ static void ResizeAssociativePointer (TRI_associative_pointer_t* array) {
|
|||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
array->_nrAlloc = 2 * array->_nrAlloc + 1;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrResizes++;
|
||||
#endif
|
||||
|
||||
array->_table = TRI_Allocate(array->_memoryZone, array->_nrAlloc * sizeof(void*), true);
|
||||
|
||||
|
@ -623,6 +658,8 @@ void TRI_InitAssociativePointer (TRI_associative_pointer_t* array,
|
|||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds = 0;
|
||||
array->_nrAdds = 0;
|
||||
array->_nrRems = 0;
|
||||
|
@ -631,6 +668,7 @@ void TRI_InitAssociativePointer (TRI_associative_pointer_t* array,
|
|||
array->_nrProbesA = 0;
|
||||
array->_nrProbesD = 0;
|
||||
array->_nrProbesR = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -695,13 +733,17 @@ void* TRI_LookupByKeyAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// return whatever we found
|
||||
|
@ -721,13 +763,17 @@ void* TRI_LookupByElementAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// return whatever we found
|
||||
|
@ -755,13 +801,17 @@ void* TRI_InsertElementAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
old = array->_table[i];
|
||||
|
@ -809,13 +859,17 @@ void* TRI_InsertKeyAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
old = array->_table[i];
|
||||
|
@ -855,13 +909,17 @@ void* TRI_RemoveElementAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return 0
|
||||
|
@ -907,13 +965,17 @@ void* TRI_RemoveKeyAssociativePointer (TRI_associative_pointer_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return false
|
||||
|
@ -988,7 +1050,9 @@ static void AddNewElementSynced (TRI_associative_synced_t* array, void* element)
|
|||
|
||||
while (array->_table[i] != NULL) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesR++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// add a new element to the associative array
|
||||
|
@ -1011,7 +1075,9 @@ static void ResizeAssociativeSynced (TRI_associative_synced_t* array) {
|
|||
oldAlloc = array->_nrAlloc;
|
||||
|
||||
array->_nrAlloc = 2 * array->_nrAlloc + 1;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrResizes++;
|
||||
#endif
|
||||
|
||||
array->_table = TRI_Allocate(array->_memoryZone, array->_nrAlloc * sizeof(void*), true);
|
||||
|
||||
|
@ -1072,6 +1138,8 @@ void TRI_InitAssociativeSynced (TRI_associative_synced_t* array,
|
|||
}
|
||||
|
||||
array->_nrUsed = 0;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrFinds = 0;
|
||||
array->_nrAdds = 0;
|
||||
array->_nrRems = 0;
|
||||
|
@ -1080,6 +1148,7 @@ void TRI_InitAssociativeSynced (TRI_associative_synced_t* array,
|
|||
array->_nrProbesA = 0;
|
||||
array->_nrProbesD = 0;
|
||||
array->_nrProbesR = 0;
|
||||
#endif
|
||||
|
||||
TRI_InitReadWriteLock(&array->_lock);
|
||||
}
|
||||
|
@ -1129,15 +1198,19 @@ void const* TRI_LookupByKeyAssociativeSynced (TRI_associative_synced_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
TRI_ReadLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
result = array->_table[i];
|
||||
|
@ -1162,15 +1235,19 @@ void const* TRI_LookupByElementAssociativeSynced (TRI_associative_synced_t* arra
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrFinds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
TRI_ReadLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesF++;
|
||||
#endif
|
||||
}
|
||||
|
||||
result = array->_table[i];
|
||||
|
@ -1201,15 +1278,19 @@ void* TRI_InsertElementAssociativeSynced (TRI_associative_synced_t* array,
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table, TODO optimise the locks
|
||||
TRI_WriteLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
old = array->_table[i];
|
||||
|
@ -1254,15 +1335,19 @@ void* TRI_InsertKeyAssociativeSynced (TRI_associative_synced_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrAdds++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
TRI_WriteLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesA++;
|
||||
#endif
|
||||
}
|
||||
|
||||
old = array->_table[i];
|
||||
|
@ -1300,15 +1385,19 @@ void* TRI_RemoveElementAssociativeSynced (TRI_associative_synced_t* array,
|
|||
hash = array->hashElement(array, element);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
TRI_WriteLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualElementElement(array, element, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return 0
|
||||
|
@ -1356,15 +1445,19 @@ void* TRI_RemoveKeyAssociativeSynced (TRI_associative_synced_t* array,
|
|||
hash = array->hashKey(array, key);
|
||||
i = hash % array->_nrAlloc;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
// update statistics
|
||||
array->_nrRems++;
|
||||
#endif
|
||||
|
||||
// search the table
|
||||
TRI_WriteLockReadWriteLock(&array->_lock);
|
||||
|
||||
while (array->_table[i] != NULL && ! array->isEqualKeyElement(array, key, array->_table[i])) {
|
||||
i = (i + 1) % array->_nrAlloc;
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
array->_nrProbesD++;
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we did not find such an item return false
|
||||
|
|
|
@ -72,6 +72,7 @@ typedef struct TRI_associative_array_s {
|
|||
|
||||
char* _table; // the table itself
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
uint64_t _nrFinds; // statistics: number of lookup calls
|
||||
uint64_t _nrAdds; // statistics: number of insert calls
|
||||
uint64_t _nrRems; // statistics: number of remove calls
|
||||
|
@ -81,6 +82,7 @@ typedef struct TRI_associative_array_s {
|
|||
uint64_t _nrProbesA; // statistics: number of misses while inserting
|
||||
uint64_t _nrProbesD; // statistics: number of misses while removing
|
||||
uint64_t _nrProbesR; // statistics: number of misses while adding
|
||||
#endif
|
||||
|
||||
TRI_memory_zone_t* _memoryZone;
|
||||
}
|
||||
|
@ -225,6 +227,7 @@ typedef struct TRI_associative_pointer_s {
|
|||
|
||||
void** _table; // the table itself
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
uint64_t _nrFinds; // statistics: number of lookup calls
|
||||
uint64_t _nrAdds; // statistics: number of insert calls
|
||||
uint64_t _nrRems; // statistics: number of remove calls
|
||||
|
@ -234,6 +237,7 @@ typedef struct TRI_associative_pointer_s {
|
|||
uint64_t _nrProbesA; // statistics: number of misses while inserting
|
||||
uint64_t _nrProbesD; // statistics: number of misses while removing
|
||||
uint64_t _nrProbesR; // statistics: number of misses while adding
|
||||
#endif
|
||||
|
||||
TRI_memory_zone_t* _memoryZone;
|
||||
}
|
||||
|
@ -379,6 +383,7 @@ typedef struct TRI_associative_synced_s {
|
|||
|
||||
TRI_read_write_lock_t _lock;
|
||||
|
||||
#ifdef TRI_INTERNAL_STATS
|
||||
uint64_t _nrFinds; // statistics: number of lookup calls
|
||||
uint64_t _nrAdds; // statistics: number of insert calls
|
||||
uint64_t _nrRems; // statistics: number of remove calls
|
||||
|
@ -388,6 +393,7 @@ typedef struct TRI_associative_synced_s {
|
|||
uint64_t _nrProbesA; // statistics: number of misses while inserting
|
||||
uint64_t _nrProbesD; // statistics: number of misses while removing
|
||||
uint64_t _nrProbesR; // statistics: number of misses while adding
|
||||
#endif
|
||||
|
||||
TRI_memory_zone_t* _memoryZone;
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ ERROR_ARANGO_DOCUMENT_NOT_FOUND,1202,"document not found","Will be raised when a
|
|||
ERROR_ARANGO_COLLECTION_NOT_FOUND,1203,"collection not found","Will be raised when a collection with a given identifier or name is unknown."
|
||||
ERROR_ARANGO_COLLECTION_PARAMETER_MISSING,1204,"parameter 'collection' not found","Will be raised when the collection parameter is missing."
|
||||
ERROR_ARANGO_DOCUMENT_HANDLE_BAD,1205,"illegal document handle","Will be raised when a document handle is corrupt."
|
||||
ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL,1206,"maixaml size of journal too small","Will be raised when the maximal size of the journal is too small."
|
||||
ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL,1206,"maixmal size of journal too small","Will be raised when the maximal size of the journal is too small."
|
||||
ERROR_ARANGO_DUPLICATE_NAME,1207,"duplicate name","Will be raised when a name duplicate is detected."
|
||||
ERROR_ARANGO_ILLEGAL_NAME,1208,"illegal name","Will be raised when an illegal name is detected."
|
||||
ERROR_ARANGO_NO_INDEX,1209,"no suitable index known","Will be raised when no suitable index for the query is known."
|
||||
|
|
|
@ -75,8 +75,15 @@ void TRI_InitVector (TRI_vector_t* vector, TRI_memory_zone_t* zone, size_t eleme
|
|||
vector->_growthFactor = GROW_FACTOR;
|
||||
}
|
||||
|
||||
int TRI_InitVector2 (TRI_vector_t* vector, TRI_memory_zone_t* zone, size_t elementSize,
|
||||
size_t initialCapacity, double growthFactor) {
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief initialises a vector, with user-definable settings
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int TRI_InitVector2 (TRI_vector_t* vector,
|
||||
TRI_memory_zone_t* zone,
|
||||
size_t elementSize,
|
||||
size_t initialCapacity,
|
||||
double growthFactor) {
|
||||
vector->_memoryZone = zone;
|
||||
vector->_elementSize = elementSize;
|
||||
vector->_buffer = NULL;
|
||||
|
|
|
@ -81,8 +81,11 @@ TRI_vector_t;
|
|||
|
||||
void TRI_InitVector (TRI_vector_t*, TRI_memory_zone_t*, size_t elementSize);
|
||||
|
||||
int TRI_InitVector2 (TRI_vector_t*, TRI_memory_zone_t*, size_t elementSize,
|
||||
size_t initialCapacity, double growthFactor);
|
||||
int TRI_InitVector2 (TRI_vector_t*,
|
||||
TRI_memory_zone_t*,
|
||||
size_t elementSize,
|
||||
size_t initialCapacity,
|
||||
double growthFactor);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief destroys a vector, but does not free the pointer
|
||||
|
|
|
@ -54,7 +54,7 @@ void TRI_InitialiseErrorMessages (void) {
|
|||
REG_ERROR(ERROR_ARANGO_COLLECTION_NOT_FOUND, "collection not found");
|
||||
REG_ERROR(ERROR_ARANGO_COLLECTION_PARAMETER_MISSING, "parameter 'collection' not found");
|
||||
REG_ERROR(ERROR_ARANGO_DOCUMENT_HANDLE_BAD, "illegal document handle");
|
||||
REG_ERROR(ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL, "maixaml size of journal too small");
|
||||
REG_ERROR(ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL, "maixmal size of journal too small");
|
||||
REG_ERROR(ERROR_ARANGO_DUPLICATE_NAME, "duplicate name");
|
||||
REG_ERROR(ERROR_ARANGO_ILLEGAL_NAME, "illegal name");
|
||||
REG_ERROR(ERROR_ARANGO_NO_INDEX, "no suitable index known");
|
||||
|
|
|
@ -107,7 +107,7 @@ extern "C" {
|
|||
/// Will be raised when the collection parameter is missing.
|
||||
/// - 1205: @CODE{illegal document handle}
|
||||
/// Will be raised when a document handle is corrupt.
|
||||
/// - 1206: @CODE{maixaml size of journal too small}
|
||||
/// - 1206: @CODE{maixmal size of journal too small}
|
||||
/// Will be raised when the maximal size of the journal is too small.
|
||||
/// - 1207: @CODE{duplicate name}
|
||||
/// Will be raised when a name duplicate is detected.
|
||||
|
@ -761,7 +761,7 @@ void TRI_InitialiseErrorMessages (void);
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief 1206: ERROR_ARANGO_MAXIMAL_SIZE_TOO_SMALL
|
||||
///
|
||||
/// maixaml size of journal too small
|
||||
/// maixmal size of journal too small
|
||||
///
|
||||
/// Will be raised when the maximal size of the journal is too small.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -195,19 +195,19 @@ Endpoint* Endpoint::factory (const Endpoint::EndpointType type,
|
|||
|
||||
if (copy[0] == '[') {
|
||||
// ipv6
|
||||
found = copy.find("]:", 1);
|
||||
if (found != string::npos && found + 2 < copy.size()) {
|
||||
found = copy.find("]:", 1);
|
||||
if (found != string::npos && found > 2 && found + 2 < copy.size()) {
|
||||
// hostname and port (e.g. [address]:port)
|
||||
uint16_t port = (uint16_t) StringUtils::uint32(copy.substr(found + 2));
|
||||
|
||||
return new EndpointIpV6(type, protocol, encryption, specification, listenBacklog, copy.substr(0, found + 1), port);
|
||||
return new EndpointIpV6(type, protocol, encryption, specification, listenBacklog, copy.substr(1, found - 1), port);
|
||||
}
|
||||
|
||||
found = copy.find("]", 1);
|
||||
if (found != string::npos && found + 1 == copy.size()) {
|
||||
if (found != string::npos && found > 2 && found + 1 == copy.size()) {
|
||||
// hostname only (e.g. [address])
|
||||
|
||||
return new EndpointIpV6(type, protocol, encryption, specification, listenBacklog, copy.substr(0, found + 1), EndpointIp::_defaultPort);
|
||||
return new EndpointIpV6(type, protocol, encryption, specification, listenBacklog, copy.substr(1, found - 1), EndpointIp::_defaultPort);
|
||||
}
|
||||
|
||||
// invalid address specification
|
||||
|
@ -501,7 +501,7 @@ EndpointIp::EndpointIp (const Endpoint::EndpointType type,
|
|||
const std::string& host,
|
||||
const uint16_t port) :
|
||||
Endpoint(type, domainType, protocol, encryption, specification, listenBacklog), _host(host), _port(port) {
|
||||
|
||||
|
||||
assert(domainType == DOMAIN_IPV4 || domainType == Endpoint::DOMAIN_IPV6);
|
||||
}
|
||||
|
||||
|
|
|
@ -629,14 +629,6 @@ namespace triagens {
|
|||
return _host;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host strin for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::string getHostString () const {
|
||||
return _host + ':' + triagens::basics::StringUtils::itoa(_port);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -728,6 +720,15 @@ namespace triagens {
|
|||
return AF_INET;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host string for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string getHostString () const {
|
||||
return getHost() + ':' + triagens::basics::StringUtils::itoa(getPort());
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -791,6 +792,14 @@ namespace triagens {
|
|||
int getDomain () const {
|
||||
return AF_INET6;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host string for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string getHostString () const {
|
||||
return '[' + getHost() + "]:" + triagens::basics::StringUtils::itoa(getPort());
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
|
|
|
@ -175,14 +175,6 @@ namespace triagens {
|
|||
return _host;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host strin for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string getHostString () const {
|
||||
return _host + ':' + triagens::basics::StringUtils::itoa(_port);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -101,6 +101,14 @@ namespace triagens {
|
|||
return AF_INET;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host string for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string getHostString () const {
|
||||
return _host + ':' + triagens::basics::StringUtils::itoa(_port);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -101,6 +101,15 @@ namespace triagens {
|
|||
return AF_INET6;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief get host string for HTTP requests
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string getHostString () const {
|
||||
return '[' + _host + "]:" + triagens::basics::StringUtils::itoa(_port);
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @}
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
Loading…
Reference in New Issue