mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of ssh://github.com/triAGENS/ArangoDB into aql-feature-optimize-or
This commit is contained in:
commit
901c65559c
10
CHANGELOG
10
CHANGELOG
|
@ -1,6 +1,16 @@
|
|||
v2.3.0 (XXXX-XX-XX)
|
||||
-------------------
|
||||
|
||||
* added command-line option `--javascript.v8-contexts` to control the number of
|
||||
V8 contexts created in arangod.
|
||||
|
||||
Previously, the number of V8 contexts was equal to the number of server threads
|
||||
(as specified by option `--server.threads`). However, it may be sensible to
|
||||
create different amounts of threads and V8 contexts. If the option is not
|
||||
specified, the number of V8 contexts created will be equal to the number of
|
||||
server threads. Thus no change in configuration is required to keep the old
|
||||
behavior.
|
||||
|
||||
* removed bitarray indexes
|
||||
|
||||
* removed internal "_admin/modules/flush" in order to fix requireApp
|
||||
|
|
|
@ -8,7 +8,7 @@ You can use this to define your own extra variables and functions that you need
|
|||
For example, you could put the following into the *.arangosh.rc* file in your home
|
||||
directory:
|
||||
|
||||
```
|
||||
```js
|
||||
// var keyword omitted intentionally,
|
||||
// otherwise "timed" would not survive the scope of this script
|
||||
timed = function (cb) {
|
||||
|
@ -23,7 +23,7 @@ This will make a function named *timed* available in _arangosh_ in the global sc
|
|||
|
||||
You can now start _arangosh_ and invoke the function like this:
|
||||
|
||||
```
|
||||
```js
|
||||
timed(function () {
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
db.test.save({ value: i });
|
||||
|
|
|
@ -36,6 +36,10 @@
|
|||
@startDocuBlock serverAllowMethod
|
||||
|
||||
|
||||
!SUBSECTION Server threads
|
||||
@startDocuBlock serverThreads
|
||||
|
||||
|
||||
!SUBSECTION Keyfile
|
||||
@startDocuBlock serverKeyfile
|
||||
|
||||
|
@ -64,12 +68,12 @@
|
|||
@startDocuBlock serverBacklog
|
||||
|
||||
|
||||
!SUBSECTION Disable statics
|
||||
!SUBSECTION Disable statistics
|
||||
|
||||
`--disable-statistics value`
|
||||
|
||||
If this option is *value* is *true*, then ArangoDB's statistics gathering
|
||||
is turned off. Statistics gathering causes constant CPU activity so using this
|
||||
is turned off. Statistics gathering causes regular CPU activity so using this
|
||||
option to turn it off might relieve heavy-loaded instances.
|
||||
Note: this option is only available when ArangoDB has not been compiled with
|
||||
the option *--disable-figures*.
|
||||
|
@ -90,6 +94,10 @@ the option *--disable-figures*.
|
|||
@startDocuBlock databaseForceSyncProperties
|
||||
|
||||
|
||||
!SUBSECTION V8 Contexts
|
||||
@startDocuBlock v8Contexts
|
||||
|
||||
|
||||
!SUBSECTION Frequency
|
||||
@startDocuBlock jsGcFrequency
|
||||
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
!CHAPTER Command-Line Options for Communication
|
||||
|
||||
!SUBSECTION Scheduler threads
|
||||
<!-- lib/Scheduler/ApplicationScheduler.h -->
|
||||
@startDocuBlock schedulerThreads
|
||||
|
||||
|
||||
!SUBSECTION Scheduler maximal queue size
|
||||
<!-- arangod/RestServer/ArangoServer.h -->
|
||||
@startDocuBlock serverAuthenticationDisable
|
||||
@startDocuBlock schedulerMaximalQueueSize
|
||||
|
||||
|
||||
!SUBSECTION Scheduler backend
|
||||
<!-- lib/Scheduler/ApplicationScheduler.h -->
|
||||
@startDocuBlock schedulerBackend
|
||||
|
||||
|
||||
!SUBSECTION Io backends
|
||||
`--show-io-backends`
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
!SUBSECTION In Case Of Disaster
|
||||
|
||||
The following command starts a emergency console.
|
||||
The following command starts an emergency console.
|
||||
|
||||
**Note**: Never start the emergency console for a database which also has a
|
||||
server attached to it. In general the ArangoDB shell is what you want.
|
||||
|
@ -14,18 +14,16 @@ ArangoDB shell [V8 version 3.9.4, DB version 1.x.y]
|
|||
arango> 1 + 2;
|
||||
3
|
||||
|
||||
arango> db.geo.count();
|
||||
arango> var db = require("org/arangodb").db; db.geo.count();
|
||||
703
|
||||
|
||||
```
|
||||
|
||||
The emergency console disables the HTTP interface of the server and
|
||||
opens a JavaScript console on standard output instead. This allows you
|
||||
to debug and examine collections and documents without interference
|
||||
from the outside. In most respects the emergency console behaves like
|
||||
the normal ArangoDB shell - but with exclusive access and no
|
||||
client/server communication.
|
||||
The emergency console provides a JavaScript console directly running in the
|
||||
arangod server process. This allows to debug and examine collections and
|
||||
documents as with the normal ArangoDB shell, but without client/server
|
||||
communication.
|
||||
|
||||
However, it is very likely that you never need the emergency console
|
||||
However, it is very likely that you will never need the emergency console
|
||||
unless you are an ArangoDB developer.
|
||||
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
!SUBSECTION Command-Line Options for Random Numbers
|
||||
|
||||
<!-- lib/ApplicationServer/ApplicationServer.h -->
|
||||
@startDocuBlock randomGenerator
|
|
@ -138,14 +138,13 @@
|
|||
* [Authentication](Sharding/Authentication.md)
|
||||
* [Firewall setup](Sharding/FirewallSetup.md)
|
||||
<!-- 20 -->
|
||||
* [Configure ArangoDB](ConfigureArango/README.md)
|
||||
* [Server Configuration](ConfigureArango/README.md)
|
||||
* [Arangod options](ConfigureArango/Arangod.md)
|
||||
* [Write-ahead log options](ConfigureArango/Wal.md)
|
||||
* [Endpoints options](ConfigureArango/Endpoint.md)
|
||||
* [Cluster options](ConfigureArango/Cluster.md)
|
||||
* [Logging options](ConfigureArango/Logging.md)
|
||||
* [Communication options](ConfigureArango/Communication.md)
|
||||
* [Random numbers](ConfigureArango/RandomNumbers.md)
|
||||
* [Authentication](ConfigureArango/Authentication.md)
|
||||
* [Emergency Console](ConfigureArango/EmergencyConsole.md)
|
||||
<!-- 21 -->
|
||||
|
|
24
README.md
24
README.md
|
@ -40,22 +40,28 @@ For Mac OSX users: execute
|
|||
For Windows and Linux users: use the installer script or distribution package
|
||||
from our [download page](http://www.arangodb.com/download).
|
||||
|
||||
If the package manager has not already started the ArangoDB server, use
|
||||
If the package manager has not already started the ArangoDB server, use the
|
||||
following command to start it.
|
||||
|
||||
unix> /path/to/sbin/arangod
|
||||
2012-03-30T12:54:19Z [11794] INFO ArangoDB (version 2.x.y) is ready for business
|
||||
2012-03-30T12:54:19Z [11794] INFO Have Fun!
|
||||
|
||||
`/path/to/sbin` is OS dependent. It will normally by either `/usr/sbin` or `/user/local/sbin`. Point your browser to
|
||||
`/path/to/sbin` is OS dependent. It will normally be either `/usr/sbin` or `/user/local/sbin`.
|
||||
|
||||
To access ArangoDB in your browser, open the following URL
|
||||
|
||||
http://localhost:8529/
|
||||
|
||||
and select `Tools / JS Shell`. You can now use the Arango shell from within your browser. Alternative, it is available as command-line tool _arangosh_.
|
||||
and select `Tools / JS Shell`. You can now use the Arango shell from within your browser.
|
||||
|
||||
Alternatively, a scriptable shell is available as a command-line tool _arangosh_.
|
||||
|
||||
arangosh> db._create("hello");
|
||||
arangosh> db.hello.save({ world: "earth" });
|
||||
|
||||
Congratulations! You have created your first collection called `hello` and your first document. To verify your achievements, type:
|
||||
Congratulations! You have created your first collection named `hello` and your first document.
|
||||
To verify your achievements, type:
|
||||
|
||||
arangosh> db.hello.toArray();
|
||||
|
||||
|
@ -64,15 +70,15 @@ More Information
|
|||
----------------
|
||||
|
||||
Please check the
|
||||
[Installation Manual](http://www.arangodb.com/manuals/current/InstallManual.html)
|
||||
[Installation Manual](https://www.arangodb.com/Installing/README.html)
|
||||
for installation and compilation instructions.
|
||||
|
||||
The
|
||||
[User Manual](http://www.arangodb.com/manuals/current/UserManual.html)
|
||||
[User Manual](https://www.arangodb.com/FirstSteps/README.html)
|
||||
has an introductory chapter showing the basic operations of ArangoDB.
|
||||
|
||||
Or you can use the
|
||||
[online tutorial](http://www.arangodb.com/try)
|
||||
[online tutorial](https://www.arangodb.com/tryitout)
|
||||
to play with ArangoDB without installing it locally.
|
||||
|
||||
|
||||
|
@ -86,7 +92,7 @@ you report them:
|
|||
|
||||
You can use the Google group for improvements, feature requests, comments
|
||||
|
||||
[http://www.arangodb.com/community](http://www.arangodb.com/community)
|
||||
[http://www.arangodb.com/community](https://www.arangodb.com/community)
|
||||
|
||||
|
||||
Citing ArangoDB
|
||||
|
@ -96,7 +102,7 @@ Please kindly cite ArangoDB in your publications if it helps your research:
|
|||
```bibtex
|
||||
@misc{ArangoDB2014,
|
||||
Author = {ArangoDB},
|
||||
Title = { {ArangoDB 2.2}: An Open Source multi-purpose database supporting flexible data models for documents, graphs, and key-values.},
|
||||
Title = { {ArangoDB 2.3}: An Open source, multi-purpose database supporting flexible data models for documents, graphs, and key-values.},
|
||||
Year = {2014},
|
||||
Howpublished = {\url{http://arangodb.com/}
|
||||
}
|
||||
|
|
|
@ -4706,13 +4706,30 @@ int RemoteBlock::shutdown (int errorCode) {
|
|||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
// If we get here, then res->result is the response which will be
|
||||
// a serialized AqlItemBlock:
|
||||
StringBuffer const& responseBodyBuf(res->result->getBody());
|
||||
Json responseBodyJson(TRI_UNKNOWN_MEM_ZONE,
|
||||
TRI_JsonString(TRI_UNKNOWN_MEM_ZONE,
|
||||
responseBodyBuf.begin()));
|
||||
|
||||
// read "warnings" attribute if present and add it our query
|
||||
if (responseBodyJson.isArray()) {
|
||||
auto warnings = responseBodyJson.get("warnings");
|
||||
if (warnings.isList()) {
|
||||
auto query = _engine->getQuery();
|
||||
for (size_t i = 0; i < warnings.size(); ++i) {
|
||||
auto warning = warnings.at(i);
|
||||
if (warning.isArray()) {
|
||||
auto code = warning.get("code");
|
||||
auto message = warning.get("message");
|
||||
if (code.isNumber() && message.isString()) {
|
||||
query->registerWarning(static_cast<int>(code.json()->_value._number),
|
||||
message.json()->_value._string.data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return JsonHelper::getNumericValue<int>
|
||||
(responseBodyJson.json(), "code", TRI_ERROR_INTERNAL);
|
||||
LEAVE_BLOCK
|
||||
|
|
|
@ -917,6 +917,34 @@ bool Query::getBooleanOption (char const* option, bool defaultValue) const {
|
|||
return valueJson->_value._boolean;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief convert the list of warnings to JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_json_t* Query::warningsToJson () const {
|
||||
if (_warnings.empty()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
size_t const n = _warnings.size();
|
||||
TRI_json_t* json = TRI_CreateList2Json(TRI_UNKNOWN_MEM_ZONE, n);
|
||||
|
||||
if (json != nullptr) {
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
TRI_json_t* error = TRI_CreateArray2Json(TRI_UNKNOWN_MEM_ZONE, 2);
|
||||
|
||||
if (error != nullptr) {
|
||||
TRI_Insert3ArrayJson(TRI_UNKNOWN_MEM_ZONE, error, "code", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, static_cast<double>(_warnings[i].first)));
|
||||
TRI_Insert3ArrayJson(TRI_UNKNOWN_MEM_ZONE, error, "message", TRI_CreateString2CopyJson(TRI_UNKNOWN_MEM_ZONE, _warnings[i].second.c_str(), _warnings[i].second.size()));
|
||||
|
||||
TRI_PushBack3ListJson(TRI_UNKNOWN_MEM_ZONE, json, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- private methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -1092,34 +1120,6 @@ void Query::setPlan (ExecutionPlan *plan) {
|
|||
_plan = plan;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief convert the list of warnings to JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_json_t* Query::warningsToJson () const {
|
||||
if (_warnings.empty()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
size_t const n = _warnings.size();
|
||||
TRI_json_t* json = TRI_CreateList2Json(TRI_UNKNOWN_MEM_ZONE, n);
|
||||
|
||||
if (json != nullptr) {
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
TRI_json_t* error = TRI_CreateArray2Json(TRI_UNKNOWN_MEM_ZONE, 2);
|
||||
|
||||
if (error != nullptr) {
|
||||
TRI_Insert3ArrayJson(TRI_UNKNOWN_MEM_ZONE, error, "code", TRI_CreateNumberJson(TRI_UNKNOWN_MEM_ZONE, static_cast<double>(_warnings[i].first)));
|
||||
TRI_Insert3ArrayJson(TRI_UNKNOWN_MEM_ZONE, error, "message", TRI_CreateString2CopyJson(TRI_UNKNOWN_MEM_ZONE, _warnings[i].second.c_str(), _warnings[i].second.size()));
|
||||
|
||||
TRI_PushBack3ListJson(TRI_UNKNOWN_MEM_ZONE, json, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief create a TransactionContext
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -405,6 +405,12 @@ namespace triagens {
|
|||
bool getBooleanOption (char const*,
|
||||
bool) const;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief convert the list of warnings to JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_json_t* warningsToJson () const;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// --SECTION-- private methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -452,12 +458,6 @@ namespace triagens {
|
|||
|
||||
void cleanupPlanAndEngine (int);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief convert the list of warnings to JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TRI_json_t* warningsToJson () const;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief create a TransactionContext
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -841,7 +841,17 @@ void RestAqlHandler::handleUseQuery (std::string const& operation,
|
|||
|
||||
try {
|
||||
res = query->engine()->shutdown(errorCode); // pass errorCode to shutdown
|
||||
|
||||
// return statistics
|
||||
answerBody("stats", query->getStats());
|
||||
|
||||
// return warnings if present
|
||||
auto warnings = query->warningsToJson();
|
||||
if (warnings != nullptr) {
|
||||
answerBody("warnings", Json(TRI_UNKNOWN_MEM_ZONE, warnings));
|
||||
}
|
||||
|
||||
// delete the query from the registry
|
||||
_queryRegistry->destroy(_vocbase, _qId, errorCode);
|
||||
}
|
||||
catch (...) {
|
||||
|
|
|
@ -242,6 +242,10 @@ static int HashIndexHelperAllocate (TRI_hash_index_t const* hashIndex,
|
|||
static int HashIndex_insert (TRI_hash_index_t* hashIndex,
|
||||
TRI_hash_index_element_t* element,
|
||||
bool isRollback) {
|
||||
TRI_IF_FAILURE("InsertHashIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
TRI_index_search_value_t key;
|
||||
int res = FillIndexSearchValueByHashIndexElement<TRI_hash_index_element_t>(hashIndex, &key, element);
|
||||
|
||||
|
@ -269,6 +273,10 @@ static int HashIndex_insert (TRI_hash_index_t* hashIndex,
|
|||
|
||||
static int HashIndex_remove (TRI_hash_index_t* hashIndex,
|
||||
TRI_hash_index_element_t* element) {
|
||||
TRI_IF_FAILURE("RemoveHashIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
int res = TRI_RemoveElementHashArray(&hashIndex->_hashArray, element);
|
||||
|
||||
// this might happen when rolling back
|
||||
|
@ -331,6 +339,10 @@ static TRI_index_result_t HashIndex_find (TRI_hash_index_t* hashIndex,
|
|||
static int MultiHashIndex_insert (TRI_hash_index_t* hashIndex,
|
||||
TRI_hash_index_element_multi_t* element,
|
||||
bool isRollback) {
|
||||
TRI_IF_FAILURE("InsertHashIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
TRI_index_search_value_t key;
|
||||
int res = FillIndexSearchValueByHashIndexElement<TRI_hash_index_element_multi_t>(hashIndex, &key, element);
|
||||
|
||||
|
@ -358,6 +370,10 @@ static int MultiHashIndex_insert (TRI_hash_index_t* hashIndex,
|
|||
|
||||
int MultiHashIndex_remove (TRI_hash_index_t* hashIndex,
|
||||
TRI_hash_index_element_multi_t* element) {
|
||||
TRI_IF_FAILURE("RemoveHashIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
TRI_index_search_value_t key;
|
||||
int res = FillIndexSearchValueByHashIndexElement<TRI_hash_index_element_multi_t>(hashIndex, &key, element);
|
||||
|
||||
|
|
|
@ -281,6 +281,7 @@ ArangoServer::ArangoServer (int argc, char** argv)
|
|||
_disableAuthenticationUnixSockets(false),
|
||||
_dispatcherThreads(8),
|
||||
_dispatcherQueueSize(8192),
|
||||
_v8Contexts(8),
|
||||
_databasePath(),
|
||||
_defaultMaximalSize(TRI_JOURNAL_DEFAULT_MAXIMAL_SIZE),
|
||||
_defaultWaitForSync(false),
|
||||
|
@ -560,6 +561,7 @@ void ArangoServer::buildApplicationServer () {
|
|||
|
||||
additional["THREAD Options:help-admin"]
|
||||
("server.threads", &_dispatcherThreads, "number of threads for basic operations")
|
||||
("javascript.v8-contexts", &_v8Contexts, "number of V8 contexts that are created for executing JavaScript actions")
|
||||
;
|
||||
|
||||
additional["Server Options:help-extended"]
|
||||
|
@ -795,25 +797,35 @@ int ArangoServer::startupServer () {
|
|||
|
||||
TRI_ASSERT(vocbase != nullptr);
|
||||
|
||||
|
||||
// initialise V8
|
||||
size_t concurrency = _dispatcherThreads;
|
||||
if (! _applicationServer->programOptions().has("javascript.v8-contexts")) {
|
||||
// the option was added recently so it's not always set
|
||||
// the behavior in older ArangoDB was to create one V8 context per dispatcher thread
|
||||
_v8Contexts = _dispatcherThreads;
|
||||
}
|
||||
|
||||
if (_v8Contexts < 1) {
|
||||
_v8Contexts = 1;
|
||||
}
|
||||
|
||||
if (mode == OperationMode::MODE_CONSOLE) {
|
||||
// one V8 instance is taken by the console
|
||||
if (startServer) {
|
||||
++concurrency;
|
||||
++_v8Contexts;
|
||||
}
|
||||
}
|
||||
else if (mode == OperationMode::MODE_UNITTESTS || mode == OperationMode::MODE_SCRIPT) {
|
||||
if (concurrency == 1) {
|
||||
// at least two to allow the test-runner and the scheduler to use a V8
|
||||
concurrency = 2;
|
||||
if (_v8Contexts == 1) {
|
||||
// at least two to allow both the test-runner and the scheduler to use a V8 instance
|
||||
_v8Contexts = 2;
|
||||
}
|
||||
}
|
||||
|
||||
_applicationV8->setVocbase(vocbase);
|
||||
_applicationV8->setConcurrency(concurrency);
|
||||
_applicationV8->setConcurrency(_v8Contexts);
|
||||
_applicationV8->defineDouble("DISPATCHER_THREADS", _dispatcherThreads);
|
||||
_applicationV8->defineDouble("V8_CONTEXTS", _v8Contexts);
|
||||
|
||||
// .............................................................................
|
||||
// prepare everything
|
||||
|
|
|
@ -317,12 +317,12 @@ namespace triagens {
|
|||
bool _disableAuthenticationUnixSockets;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief number of dispatcher threads for non-database worker
|
||||
/// @brief number of dispatcher threads
|
||||
/// @startDocuBlock serverThreads
|
||||
/// `--server.threads number`
|
||||
///
|
||||
/// Specifies the *number* of threads that are spawned to handle action
|
||||
/// requests using Rest, JavaScript, or Ruby.
|
||||
/// Specifies the *number* of threads that are spawned to handle HTTP REST
|
||||
/// requests.
|
||||
/// @endDocuBlock
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
@ -330,7 +330,7 @@ namespace triagens {
|
|||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief maximum size of the dispatcher queue for asynchronous requests
|
||||
/// @startDocuBlock serverAuthenticationDisable
|
||||
/// @startDocuBlock schedulerMaximalQueueSize
|
||||
/// `--scheduler.maximal-queue-size size`
|
||||
///
|
||||
/// Specifies the maximum *size* of the dispatcher queue for asynchronous
|
||||
|
@ -343,6 +343,21 @@ namespace triagens {
|
|||
|
||||
int _dispatcherQueueSize;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief number of V8 contexts for executing JavaScript actions
|
||||
/// @startDocuBlock v8Contexts
|
||||
/// `--server.v8-contexts number`
|
||||
///
|
||||
/// Specifies the *number* of V8 contexts that are created for executing
|
||||
/// JavaScript code. More contexts allow execute more JavaScript actions in
|
||||
/// parallel, provided that there are also enough threads available. Please
|
||||
/// note that each V8 context will use a substantial amount of memory and
|
||||
/// requires periodic CPU processing time for garbage collection.
|
||||
/// @endDocuBlock
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
int _v8Contexts;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief path to the database
|
||||
/// @startDocuBlock DatabaseDirectory
|
||||
|
|
|
@ -259,6 +259,10 @@ static inline void SetRevision (TRI_document_collection_t* document,
|
|||
static int InsertPrimaryIndex (TRI_document_collection_t* document,
|
||||
TRI_doc_mptr_t const* header,
|
||||
bool isRollback) {
|
||||
TRI_IF_FAILURE("InsertPrimaryIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
TRI_doc_mptr_t* found;
|
||||
|
||||
TRI_ASSERT(document != nullptr);
|
||||
|
@ -294,6 +298,10 @@ static int InsertPrimaryIndex (TRI_document_collection_t* document,
|
|||
static int InsertSecondaryIndexes (TRI_document_collection_t* document,
|
||||
TRI_doc_mptr_t const* header,
|
||||
bool isRollback) {
|
||||
TRI_IF_FAILURE("InsertSecondaryIndexes") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
if (! document->useSecondaryIndexes()) {
|
||||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
@ -329,9 +337,9 @@ static int InsertSecondaryIndexes (TRI_document_collection_t* document,
|
|||
static int DeletePrimaryIndex (TRI_document_collection_t* document,
|
||||
TRI_doc_mptr_t const* header,
|
||||
bool isRollback) {
|
||||
// .............................................................................
|
||||
// remove from main index
|
||||
// .............................................................................
|
||||
TRI_IF_FAILURE("DeletePrimaryIndex") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
TRI_doc_mptr_t* found = static_cast<TRI_doc_mptr_t*>(TRI_RemoveKeyPrimaryIndex(&document->_primaryIndex, TRI_EXTRACT_MARKER_KEY(header))); // ONLY IN INDEX, PROTECTED by RUNTIME
|
||||
|
||||
|
@ -353,6 +361,10 @@ static int DeleteSecondaryIndexes (TRI_document_collection_t* document,
|
|||
return TRI_ERROR_NO_ERROR;
|
||||
}
|
||||
|
||||
TRI_IF_FAILURE("DeleteSecondaryIndexes") {
|
||||
return TRI_ERROR_DEBUG;
|
||||
}
|
||||
|
||||
int result = TRI_ERROR_NO_ERROR;
|
||||
size_t const n = document->_allIndexes._length;
|
||||
|
||||
|
|
|
@ -118,8 +118,10 @@ namespace triagens {
|
|||
header->copy(oldHeader);
|
||||
}
|
||||
else if (type == TRI_VOC_DOCUMENT_OPERATION_REMOVE) {
|
||||
if (status != StatusType::CREATED) {
|
||||
document->_headersPtr->relink(header, &oldHeader); // PROTECTED by trx in trxCollection
|
||||
}
|
||||
}
|
||||
|
||||
status = StatusType::SWAPPED;
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ def genJsFile(errors):
|
|||
i = i + 1
|
||||
|
||||
if i < len(errors):
|
||||
out = out + ", \n"
|
||||
out = out + ",\n"
|
||||
else:
|
||||
out = out + "\n"
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ function DatabaseSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testVersion : function () {
|
||||
assertMatch(/(^2\.2)|(-devel$)/, internal.db._version());
|
||||
assertMatch(/(^2\.3)|(-devel$)/, internal.db._version());
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -36,7 +36,9 @@
|
|||
/// to perform and the second is an options object. For `which` the following
|
||||
/// values are allowed:
|
||||
/// Empty will give you a complete list.
|
||||
var functionDoku = {
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var functionsDocumentation = {
|
||||
'all' : " do all tests (marked with [x])",
|
||||
"shell_server_perf" : "bulk tests intended to get an overview of executiontime needed.",
|
||||
"single_client" : "run one test suite isolated via the arangosh; options required\n" +
|
||||
|
@ -45,7 +47,7 @@ var functionDoku = {
|
|||
" Run without to get more detail"
|
||||
};
|
||||
|
||||
var optiondoku = [
|
||||
var optionsDocumentation = [
|
||||
'',
|
||||
' The following properties of `options` are defined:',
|
||||
'',
|
||||
|
@ -65,7 +67,7 @@ var optiondoku = [
|
|||
' - `cleanup`: if set to true (the default), the cluster data files',
|
||||
' and logs are removed after termination of the test.',
|
||||
' - `jasmineReportFormat`: this option is passed on to the `format`',
|
||||
' option of the Jasmin options object, only for Jasmin tests.',
|
||||
' option of the Jasmine options object, only for Jasmine tests.',
|
||||
'',
|
||||
' - `valgrind`: if set to true the arangods are run with the valgrind',
|
||||
' memory checker',
|
||||
|
@ -76,7 +78,6 @@ var optiondoku = [
|
|||
' - `portOffset`: move our base port by n ports up',
|
||||
''
|
||||
];
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var _ = require("underscore");
|
||||
var cleanupDirectories = [];
|
||||
|
@ -146,14 +147,14 @@ function printUsage () {
|
|||
print(' where "which" is one of:\n');
|
||||
var i;
|
||||
var checkAll;
|
||||
var oneFunctionDoku;
|
||||
var oneFunctionDocumentation;
|
||||
for (i in testFuncs) {
|
||||
if (testFuncs.hasOwnProperty(i)) {
|
||||
if (functionDoku.hasOwnProperty(i)) {
|
||||
oneFunctionDoku = ' - ' + functionDoku[i];
|
||||
if (functionsDocumentation.hasOwnProperty(i)) {
|
||||
oneFunctionDocumentation = ' - ' + functionsDocumentation[i];
|
||||
}
|
||||
else {
|
||||
oneFunctionDoku = '';
|
||||
oneFunctionDocumentation = '';
|
||||
}
|
||||
if (allTests.indexOf(i) !== -1) {
|
||||
checkAll = '[x]';
|
||||
|
@ -161,18 +162,17 @@ function printUsage () {
|
|||
else {
|
||||
checkAll = ' ';
|
||||
}
|
||||
print(' ' + checkAll + ' '+i+' ' + oneFunctionDoku);
|
||||
print(' ' + checkAll + ' '+i+' ' + oneFunctionDocumentation);
|
||||
}
|
||||
}
|
||||
for (i in optiondoku) {
|
||||
if (optiondoku.hasOwnProperty(i)) {
|
||||
print(optiondoku[i]);
|
||||
for (i in optionsDocumentation) {
|
||||
if (optionsDocumentation.hasOwnProperty(i)) {
|
||||
print(optionsDocumentation[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function filterTestcaseByOptions (testname, options, whichFilter)
|
||||
{
|
||||
function filterTestcaseByOptions (testname, options, whichFilter) {
|
||||
if ((testname.indexOf("-cluster") !== -1) && (options.cluster === false)) {
|
||||
whichFilter.filter = 'noncluster';
|
||||
return false;
|
||||
|
@ -560,13 +560,13 @@ function runThere (options, instanceInfo, file) {
|
|||
var t;
|
||||
if (file.indexOf("-spec") === -1) {
|
||||
t = 'var runTest = require("jsunity").runTest; '+
|
||||
'return runTest('+JSON.stringify(file)+');';
|
||||
'return runTest(' + JSON.stringify(file) + ');';
|
||||
}
|
||||
else {
|
||||
var jasmineReportFormat = options.jasmineReportFormat || 'progress';
|
||||
t = 'var executeTestSuite = require("jasmine").executeTestSuite; '+
|
||||
'return executeTestSuite(['+JSON.stringify(file)+'],{"format": '+
|
||||
JSON.stringify(jasmineReportFormat)+'});';
|
||||
'return executeTestSuite([' + JSON.stringify(file) + '],{"format": '+
|
||||
JSON.stringify(jasmineReportFormat) + '});';
|
||||
}
|
||||
var o = makeAuthorisationHeaders(options);
|
||||
o.method = "POST";
|
||||
|
|
|
@ -36,6 +36,14 @@ var cluster = require("org/arangodb/cluster");
|
|||
var getModifyQueryResults = helper.getModifyQueryResults;
|
||||
var assertQueryError = helper.assertQueryError;
|
||||
|
||||
var sanitizeStats = function (stats) {
|
||||
// remove these members from the stats because they don't matter
|
||||
// for the comparisons
|
||||
delete stats.scannedFull;
|
||||
delete stats.scannedIndex;
|
||||
return stats;
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test suite
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -203,11 +211,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNothing : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN " + cn1 + " FILTER d.value1 < 0 REMOVE d IN " + cn1);
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -215,11 +223,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNothingBind : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn FILTER d.value1 < 0 REMOVE d IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -268,11 +276,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveIgnore1 : function () {
|
||||
var expected = { executed: 0, ignored: 100 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 100 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE 'foo' IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -280,11 +288,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveIgnore2 : function () {
|
||||
var expected = { executed: 100, ignored: 1 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 1 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..100 REMOVE CONCAT('test', TO_STRING(i)) IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -292,11 +300,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveAll1 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE d IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -304,11 +312,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveAll2 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE d._key IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -316,11 +324,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveAll3 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE { _key: d._key } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -328,11 +336,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveAll4 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..99 REMOVE { _key: CONCAT('test', TO_STRING(i)) } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -340,24 +348,23 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveAll5 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE d INTO @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test remove
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveHalf : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..99 FILTER i % 2 == 0 REMOVE { _key: CONCAT('test', TO_STRING(i)) } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(50, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -373,11 +380,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSingle : function () {
|
||||
var expected = { executed: 1, ignored: 0 };
|
||||
var expected = { writesExecuted: 1, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("REMOVE 'test0' IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(99, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -396,12 +403,12 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testTwoCollectionsJoin1 : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn1 FILTER d.value1 < 50 REMOVE { _key: d._key } IN @@cn2", { "@cn1": cn1, "@cn2": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(0, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -409,12 +416,12 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testTwoCollectionsJoin2 : function () {
|
||||
var expected = { executed: 48, ignored: 0 };
|
||||
var expected = { writesExecuted: 48, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn1 FILTER d.value1 >= 2 && d.value1 < 50 REMOVE { _key: d._key } IN @@cn2", { "@cn1": cn1, "@cn2": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(2, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -422,12 +429,12 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testTwoCollectionsIgnoreErrors1 : function () {
|
||||
var expected = { executed: 50, ignored: 50 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 50 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn1 REMOVE { _key: d._key } IN @@cn2 OPTIONS { ignoreErrors: true }", { "@cn1": cn1, "@cn2": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(0, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -435,12 +442,12 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testTwoCollectionsIgnoreErrors2 : function () {
|
||||
var expected = { executed: 0, ignored: 100 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 100 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn1 REMOVE { _key: CONCAT('foo', d._key) } IN @@cn2 OPTIONS { ignoreErrors: true }", { "@cn1": cn1, "@cn2": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(50, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -448,11 +455,11 @@ function ahuacatlRemoveSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveWaitForSync : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REMOVE d IN @@cn OPTIONS { waitForSync: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(0, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -466,12 +473,12 @@ function ahuacatlRemoveSuite () {
|
|||
for (var i = 0; i < 100; ++i) {
|
||||
edge.save("UnitTestsAhuacatlRemove1/foo" + i, "UnitTestsAhuacatlRemove2/bar", { what: i, _key: "test" + i });
|
||||
}
|
||||
var expected = { executed: 10, ignored: 0 };
|
||||
var expected = { writesExecuted: 10, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..9 REMOVE CONCAT('test', TO_STRING(i)) IN @@cn", { "@cn": edge.name() });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(90, edge.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
db._drop("UnitTestsAhuacatlEdge");
|
||||
}
|
||||
|
||||
|
@ -526,11 +533,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertNothing : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN " + cn1 + " FILTER d.value1 < 0 INSERT { foxx: true } IN " + cn1);
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -538,11 +545,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertNothingBind : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn FILTER d.value1 < 0 INSERT { foxx: true } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -606,11 +613,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertIgnore1 : function () {
|
||||
var expected = { executed: 0, ignored: 100 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 100 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn INSERT d IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -618,11 +625,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertIgnore2 : function () {
|
||||
var expected = { executed: 1, ignored: 50 };
|
||||
var expected = { writesExecuted: 1, writesIgnored: 50 };
|
||||
var actual = getModifyQueryResults("FOR i IN 50..100 INSERT { _key: CONCAT('test', TO_STRING(i)) } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(101, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -630,11 +637,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertIgnore3 : function () {
|
||||
var expected = { executed: 51, ignored: 50 };
|
||||
var expected = { writesExecuted: 51, writesIgnored: 50 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..100 INSERT { _key: CONCAT('test', TO_STRING(i)) } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn2 });
|
||||
|
||||
assertEqual(101, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -642,11 +649,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertIgnore4 : function () {
|
||||
var expected = { executed: 0, ignored: 100 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 100 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..99 INSERT { _key: CONCAT('test', TO_STRING(i)) } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -654,11 +661,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertIgnore5 : function () {
|
||||
var expected = { executed: 50, ignored: 50 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 50 };
|
||||
var actual = getModifyQueryResults("FOR i IN 0..99 INSERT { _key: CONCAT('test', TO_STRING(i)) } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn2 });
|
||||
|
||||
assertEqual(100, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -666,11 +673,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertEmpty : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn INSERT { } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(200, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -678,12 +685,12 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertCopy : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 50..99 INSERT { _key: CONCAT('test', TO_STRING(i)) } IN @@cn", { "@cn": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(100, c2.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -691,12 +698,12 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSingle : function () {
|
||||
var expected = { executed: 1, ignored: 0 };
|
||||
var expected = { writesExecuted: 1, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("INSERT { value: 'foobar', _key: 'test' } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(101, c1.count());
|
||||
assertEqual("foobar", c1.document("test").value);
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -704,11 +711,11 @@ function ahuacatlInsertSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertWaitForSync : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 1..50 INSERT { value: i } INTO @@cn OPTIONS { waitForSync: true }", { "@cn": cn2 });
|
||||
|
||||
assertEqual(100, c1.count());
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -761,10 +768,10 @@ function ahuacatlInsertSuite () {
|
|||
db._drop("UnitTestsAhuacatlEdge");
|
||||
var edge = db._createEdgeCollection("UnitTestsAhuacatlEdge");
|
||||
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 1..50 INSERT { _key: CONCAT('test', TO_STRING(i)), _from: CONCAT('UnitTestsAhuacatlInsert1/', TO_STRING(i)), _to: CONCAT('UnitTestsAhuacatlInsert2/', TO_STRING(i)), value: [ i ], sub: { foo: 'bar' } } INTO @@cn", { "@cn": edge.name() });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
assertEqual(50, edge.count());
|
||||
|
||||
for (var i = 1; i <= 50; ++i) {
|
||||
|
@ -828,10 +835,10 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateNothing : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN " + cn1 + " FILTER d.value1 < 0 UPDATE { foxx: true } IN " + cn1);
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -839,10 +846,10 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateNothingBind : function () {
|
||||
var expected = { executed: 0, ignored: 0 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn FILTER d.value1 < 0 UPDATE { foxx: true } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -893,10 +900,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testUpdateIgnore1 : function () {
|
||||
c1.ensureUniqueConstraint("value3");
|
||||
var expected = { executed: 1, ignored: 99 };
|
||||
var expected = { writesExecuted: 1, writesIgnored: 99 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d WITH { value3: 1 } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -905,10 +912,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testUpdateIgnore2 : function () {
|
||||
c1.ensureUniqueConstraint("value1");
|
||||
var expected = { executed: 0, ignored: 51 };
|
||||
var expected = { writesExecuted: 0, writesIgnored: 51 };
|
||||
var actual = getModifyQueryResults("FOR i IN 50..100 UPDATE { _key: CONCAT('test', TO_STRING(i)), value1: 1 } IN @@cn OPTIONS { ignoreErrors: true }", { "@cn": cn1 });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -916,10 +923,10 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateEmpty1 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE { _key: d._key } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
assertEqual(i, doc.value1);
|
||||
|
@ -932,10 +939,10 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateEmpty2 : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
assertEqual(i, doc.value1);
|
||||
|
@ -956,11 +963,11 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testSingle : function () {
|
||||
var expected = { executed: 1, ignored: 0 };
|
||||
var expected = { writesExecuted: 1, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("UPDATE { value: 'foobar', _key: 'test17' } IN @@cn", { "@cn": cn1 });
|
||||
|
||||
assertEqual("foobar", c1.document("test17").value);
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -968,9 +975,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateOldValue : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE { _key: d._key, value1: d.value2, value2: d.value1, value3: d.value1 + 5 } IN @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -985,9 +992,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateWaitForSync : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR i IN 1..50 UPDATE { _key: CONCAT('test', TO_STRING(i)) } INTO @@cn OPTIONS { waitForSync: true }", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -1001,9 +1008,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateKeepNullDefault : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d._key WITH { value1: null, value3: 'foobar', value9: null } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -1019,9 +1026,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateKeepNullTrue : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d._key WITH { value1: null, value3: 'foobar', value9: null } INTO @@cn OPTIONS { keepNull: true }", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -1037,9 +1044,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateKeepNullFalse : function () {
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d._key WITH { value1: null, value3: 'foobar', value9: null } INTO @@cn OPTIONS { keepNull: false }", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -1055,9 +1062,9 @@ function ahuacatlUpdateSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testUpdateFilter : function () {
|
||||
var expected = { executed: 50, ignored: 0 };
|
||||
var expected = { writesExecuted: 50, writesIgnored: 0 };
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn FILTER d.value1 % 2 == 0 UPDATE d._key WITH { value2: 100 } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
|
||||
for (var i = 0; i < 100; ++i) {
|
||||
var doc = c1.document("test" + i);
|
||||
|
@ -1076,10 +1083,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testUpdateUpdate : function () {
|
||||
var i;
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
for (i = 0; i < 5; ++i) {
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn UPDATE d._key WITH { counter: HAS(d, 'counter') ? d.counter + 1 : 1 } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
}
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
|
@ -1094,10 +1101,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testReplace1 : function () {
|
||||
var i;
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
for (i = 0; i < 5; ++i) {
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REPLACE d._key WITH { value4: 12 } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
}
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
|
@ -1115,10 +1122,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testReplace2 : function () {
|
||||
var i;
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
for (i = 0; i < 5; ++i) {
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REPLACE { _key: d._key, value4: 13 } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
}
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
|
@ -1136,10 +1143,10 @@ function ahuacatlUpdateSuite () {
|
|||
|
||||
testReplaceReplace : function () {
|
||||
var i;
|
||||
var expected = { executed: 100, ignored: 0 };
|
||||
var expected = { writesExecuted: 100, writesIgnored: 0 };
|
||||
for (i = 0; i < 5; ++i) {
|
||||
var actual = getModifyQueryResults("FOR d IN @@cn REPLACE d._key WITH { value1: d.value1 + 1 } INTO @@cn", { "@cn": cn1 });
|
||||
assertEqual(expected, actual.operations);
|
||||
assertEqual(expected, sanitizeStats(actual));
|
||||
}
|
||||
|
||||
for (i = 0; i < 100; ++i) {
|
||||
|
|
|
@ -355,7 +355,6 @@ function ahuacatlRemoveSuite () {
|
|||
assertEqual(expected, sanitizeStats(actual));
|
||||
},
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test remove
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -4227,6 +4227,452 @@ function transactionServerFailuresSuite () {
|
|||
internal.wait(0);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertUniqueHashIndexServerFailures : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureUniqueConstraint("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.save({ value: 1 });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(0, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertUniqueHashIndexServerFailuresTrx : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureUniqueConstraint("value");
|
||||
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
c.save({ value: 1 });
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
try {
|
||||
c.save({ value: 2 });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1, c.count());
|
||||
internal.debugClearFailAt();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertUniqueHashIndexServerFailuresRollback : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureUniqueConstraint("value");
|
||||
|
||||
try {
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
c.save({ value: 1 });
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
c.save({ value: 2 });
|
||||
}
|
||||
});
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(0, c.count());
|
||||
internal.debugClearFailAt();
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertNonUniqueHashIndexServerFailures : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.save({ value: 1 });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(0, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertNonUniqueHashIndexServerFailuresTrx : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
c.save({ value: 1 });
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
try {
|
||||
c.save({ value: 2 });
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1, c.count());
|
||||
internal.debugClearFailAt();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testInsertNonUniqueHashIndexServerFailuresRollback : function () {
|
||||
var failures = [ "InsertPrimaryIndex", "InsertSecondaryIndexes", "InsertHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
try {
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
c.save({ value: 1 });
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
c.save({ value: 2 });
|
||||
}
|
||||
});
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(0, c.count());
|
||||
internal.debugClearFailAt();
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveUniqueHashIndexServerFailures : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes", "RemoveHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ value: i });
|
||||
}
|
||||
c.ensureUniqueConstraint("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.truncate();
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNonUniqueHashIndexServerFailures : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes", "RemoveHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ value: i % 10 });
|
||||
}
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.truncate();
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNonUniqueHashIndexServerFailuresTrx : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes", "RemoveHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
c.ensureHashIndex("value");
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ _key: "test" + i, value: i % 10 });
|
||||
}
|
||||
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
for (var j = 0; j < 10; ++j) {
|
||||
c.remove("test" + j);
|
||||
}
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.remove("test10");
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assertEqual(990, c.count());
|
||||
internal.debugClearFailAt();
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNonUniqueHashIndexServerFailuresRollback : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes", "RemoveHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ _key: "test" + i, value: i % 10 });
|
||||
}
|
||||
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
try {
|
||||
db._executeTransaction({
|
||||
collections: {
|
||||
write: cn
|
||||
},
|
||||
action: function () {
|
||||
for (var j = 0; j < 10; ++j) {
|
||||
c.remove("test" + j);
|
||||
}
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
c.remove("test10");
|
||||
}
|
||||
});
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
internal.debugClearFailAt();
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveUniqueSkiplistServerFailures : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ value: i });
|
||||
}
|
||||
c.ensureUniqueSkiplist("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.truncate();
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveNonUniqueSkiplistServerFailures : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ value: i % 10 });
|
||||
}
|
||||
c.ensureSkiplist("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.truncate();
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRemoveMultipleIndexesServerFailures : function () {
|
||||
var failures = [ "DeletePrimaryIndex", "DeleteSecondaryIndexes", "RemoveHashIndex" ];
|
||||
|
||||
failures.forEach (function (f) {
|
||||
internal.debugClearFailAt();
|
||||
db._drop(cn);
|
||||
c = db._create(cn);
|
||||
|
||||
for (var i = 0; i < 1000; ++i) {
|
||||
c.save({ value: i % 10 });
|
||||
}
|
||||
c.ensureSkiplist("value");
|
||||
c.ensureHashIndex("value");
|
||||
|
||||
internal.debugSetFailAt(f);
|
||||
|
||||
try {
|
||||
c.truncate();
|
||||
fail();
|
||||
}
|
||||
catch (err) {
|
||||
assertEqual(internal.errors.ERROR_DEBUG.code, err.errorNum);
|
||||
}
|
||||
|
||||
assertEqual(1000, c.count());
|
||||
});
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test: rollback in case of a server-side fail
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -5046,3 +5492,4 @@ return jsunity.done();
|
|||
// mode: outline-minor
|
||||
// outline-regexp: "\\(/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @\\}\\)"
|
||||
// End:
|
||||
|
||||
|
|
Loading…
Reference in New Issue