diff --git a/Documentation/DocuBlocks/Rest/Administration/get_engine.md b/Documentation/DocuBlocks/Rest/Administration/get_engine.md
index db090d834c..01faf3f78c 100644
--- a/Documentation/DocuBlocks/Rest/Administration/get_engine.md
+++ b/Documentation/DocuBlocks/Rest/Administration/get_engine.md
@@ -18,14 +18,25 @@ will be *mmfiles* or *rocksdb*
@EXAMPLES
-Return the active storage engine
+Return the active storage engine with the MMFiles storage engine in use:
-@EXAMPLE_ARANGOSH_RUN{RestEngine}
+@EXAMPLE_ARANGOSH_RUN{RestEngine_mmfiles}
var response = logCurlRequest('GET', '/_api/engine');
assert(response.code === 200);
logJsonResponse(response);
@END_EXAMPLE_ARANGOSH_RUN
+
+Return the active storage engine with the RocksDB storage engine in use:
+
+@EXAMPLE_ARANGOSH_RUN{RestEngine_rocksdb}
+ var response = logCurlRequest('GET', '/_api/engine');
+
+ assert(response.code === 200);
+
+ logJsonResponse(response);
+@END_EXAMPLE_ARANGOSH_RUN
+
@endDocuBlock
diff --git a/Documentation/DocuBlocks/Rest/Bulk/batch_processing.md b/Documentation/DocuBlocks/Rest/Bulk/batch_processing.md
index a9e05f8a66..1d324bd3de 100644
--- a/Documentation/DocuBlocks/Rest/Bulk/batch_processing.md
+++ b/Documentation/DocuBlocks/Rest/Bulk/batch_processing.md
@@ -108,11 +108,11 @@ The boundary (`SomeBoundaryValue`) is passed to the server in the HTTP
parts.join("\r\n" + "--" + boundary + "\r\n") +
"--" + boundary + "--\r\n";
- var response = logCurlRequestRaw('POST', '/_api/batch', body, headers);
+ var response = logCurlRequestPlain('POST', '/_api/batch', body, headers);
assert(response.code === 200);
- logRawResponse(response);
+ logPlainResponse(response);
@END_EXAMPLE_ARANGOSH_RUN
Sending a batch request, setting the boundary implicitly (the server will
@@ -130,12 +130,12 @@ in this case try to find the boundary at the beginning of the request body).
parts.join("\r\n" + "--" + boundary + "\r\n") +
"--" + boundary + "--\r\n";
- var response = logCurlRequestRaw('POST', '/_api/batch', body);
+ var response = logCurlRequestPlain('POST', '/_api/batch', body);
assert(response.code === 200);
assert(response.headers['x-arango-errors'] == 2);
- logRawResponse(response);
+ logPlainResponse(response);
@END_EXAMPLE_ARANGOSH_RUN
@endDocuBlock
diff --git a/Documentation/DocuBlocks/Rest/Collections/get_api_collection_figures.md b/Documentation/DocuBlocks/Rest/Collections/get_api_collection_figures.md
index 6204767b1e..4094b6c37a 100644
--- a/Documentation/DocuBlocks/Rest/Collections/get_api_collection_figures.md
+++ b/Documentation/DocuBlocks/Rest/Collections/get_api_collection_figures.md
@@ -18,9 +18,9 @@ The name of the collection.
@RESTDESCRIPTION
In addition to the above, the result also contains the number of documents
and additional statistical information about the collection.
-**Note** : This will always load the collection into memory.
+**Note**: This will always load the collection into memory.
-**Note**: collection data that are stored in the write-ahead log only are
+**Note**: collection data that is stored in the write-ahead log only is
not reported in the results. When the write-ahead log is collected, documents
might be added to journals and datafiles of the collection, which may modify
the figures of the collection.
@@ -126,6 +126,7 @@ engine (in bytes). This figure does not include the document data but only mappi
from document revision ids to storage engine datafile positions.
@RESTSTRUCT{indexes,collection_figures,object,required,collection_figures_indexes}
+
@RESTSTRUCT{count,collection_figures_indexes,integer,required,int64}
The total number of indexes defined for the collection, including the pre-defined
indexes (e.g. primary index).
@@ -153,6 +154,7 @@ head of the collection's cleanup queue. This information can be used for debuggi
compaction and unload issues.
@RESTSTRUCT{compactionStatus,collection_figures,object,optional,compactionStatus_attributes}
+
@RESTSTRUCT{message,compactionStatus_attributes,string,optional,string}
The action that was performed when the compaction was last run for the collection.
This information can be used for debugging compaction issues.
@@ -174,9 +176,9 @@ is returned.
@EXAMPLES
-Using an identifier and requesting the figures of the collection:
+Using an identifier and requesting the figures of the collection (MMFiles storage engine):
-@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures}
+@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures_mmfiles}
var cn = "products";
db._drop(cn);
var coll = db._create(cn);
@@ -191,5 +193,24 @@ Using an identifier and requesting the figures of the collection:
logJsonResponse(response);
db._drop(cn);
@END_EXAMPLE_ARANGOSH_RUN
+
+Using an identifier and requesting the figures of the collection (RocksDB storage engine):
+
+@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures_rocksdb}
+ var cn = "products";
+ db._drop(cn);
+ var coll = db._create(cn);
+ coll.save({"test":"hello"});
+ require("internal").wal.flush(true, true);
+ var url = "/_api/collection/"+ coll.name() + "/figures";
+
+ var response = logCurlRequest('GET', url);
+
+ assert(response.code === 200);
+
+ logJsonResponse(response);
+ db._drop(cn);
+@END_EXAMPLE_ARANGOSH_RUN
+
@endDocuBlock
diff --git a/Documentation/DocuBlocks/Rest/Collections/put_api_collection_rotate.md b/Documentation/DocuBlocks/Rest/Collections/put_api_collection_rotate.md
index b8ca6d54fd..db1255b980 100644
--- a/Documentation/DocuBlocks/Rest/Collections/put_api_collection_rotate.md
+++ b/Documentation/DocuBlocks/Rest/Collections/put_api_collection_rotate.md
@@ -43,7 +43,7 @@ If the *collection-name* is unknown, then a *HTTP 404* is returned.
Rotating the journal:
-@EXAMPLE_ARANGOSH_RUN{RestCollectionRotate}
+@EXAMPLE_ARANGOSH_RUN{RestCollectionRotate_mmfiles}
var cn = "products";
db._drop(cn);
var coll = db._create(cn);
@@ -62,7 +62,7 @@ Rotating the journal:
Rotating if no journal exists:
-@EXAMPLE_ARANGOSH_RUN{RestCollectionRotateNoJournal}
+@EXAMPLE_ARANGOSH_RUN{RestCollectionRotateNoJournal_mmfiles}
var cn = "products";
db._drop(cn);
var coll = db._create(cn);
diff --git a/Documentation/DocuBlocks/Rest/Replication/get_api_replication_dump.md b/Documentation/DocuBlocks/Rest/Replication/get_api_replication_dump.md
index 2e15646c52..5f72905bac 100644
--- a/Documentation/DocuBlocks/Rest/Replication/get_api_replication_dump.md
+++ b/Documentation/DocuBlocks/Rest/Replication/get_api_replication_dump.md
@@ -104,7 +104,7 @@ is returned if an error occurred while assembling the response.
Empty collection:
-@EXAMPLE_ARANGOSH_RUN{RestReplicationDumpEmpty}
+@EXAMPLE_ARANGOSH_RUN{RestReplicationDumpEmpty_mmfiles}
db._drop("testCollection");
var c = db._create("testCollection");
var url = "/_api/replication/dump?collection=" + c.name();
@@ -118,7 +118,7 @@ Empty collection:
Non-empty collection *(One JSON document per line)*:
-@EXAMPLE_ARANGOSH_RUN{RestReplicationDump}
+@EXAMPLE_ARANGOSH_RUN{RestReplicationDump_mmfiles}
db._drop("testCollection");
var c = db._create("testCollection");
c.save({ "test" : true, "a" : "abc", "_key" : "abcdef" });
diff --git a/Documentation/DocuBlocks/Rest/Replication/get_api_wal_access_range.md b/Documentation/DocuBlocks/Rest/Replication/get_api_wal_access_range.md
index 443139e5b5..ece10deec7 100644
--- a/Documentation/DocuBlocks/Rest/Replication/get_api_wal_access_range.md
+++ b/Documentation/DocuBlocks/Rest/Replication/get_api_wal_access_range.md
@@ -11,7 +11,7 @@ data (identified by tick value) are still available for replication.
The body of the response contains a JSON object.
* *tickMin*: minimum tick available
-* *tickMax: maximum tick available
+* *tickMax*: maximum tick available
* *time*: the server time as string in format "YYYY-MM-DDTHH:MM:SSZ"
* *server*: An object with fields *version* and *serverId*
diff --git a/Documentation/DocuBlocks/Rest/Replication/put_api_replication_inventory.md b/Documentation/DocuBlocks/Rest/Replication/put_api_replication_inventory.md
index 9b7ffc380a..ea8481f772 100644
--- a/Documentation/DocuBlocks/Rest/Replication/put_api_replication_inventory.md
+++ b/Documentation/DocuBlocks/Rest/Replication/put_api_replication_inventory.md
@@ -96,7 +96,7 @@ is returned if an error occurred while assembling the response.
@EXAMPLES
-@EXAMPLE_ARANGOSH_RUN{RestReplicationInventory}
+@EXAMPLE_ARANGOSH_RUN{RestReplicationInventory_mmfiles}
var url = "/_api/replication/inventory";
var response = logCurlRequest('GET', url);
@@ -107,7 +107,7 @@ is returned if an error occurred while assembling the response.
With some additional indexes:
-@EXAMPLE_ARANGOSH_RUN{RestReplicationInventoryIndexes}
+@EXAMPLE_ARANGOSH_RUN{RestReplicationInventoryIndexes_mmfiles}
db._drop("IndexedCollection1");
var c1 = db._create("IndexedCollection1");
c1.ensureHashIndex("name");
diff --git a/Documentation/DocuBlocks/Rest/wal/get_admin_wal_properties.md b/Documentation/DocuBlocks/Rest/wal/get_admin_wal_properties.md
index 9f9ad41a79..29e99b39c6 100644
--- a/Documentation/DocuBlocks/Rest/wal/get_admin_wal_properties.md
+++ b/Documentation/DocuBlocks/Rest/wal/get_admin_wal_properties.md
@@ -32,7 +32,7 @@ is returned when an invalid HTTP method is used.
@EXAMPLES
-@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesGet}
+@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesGet_mmfiles}
var url = "/_admin/wal/properties";
var response = logCurlRequest('GET', url);
diff --git a/Documentation/DocuBlocks/Rest/wal/get_admin_wal_transactions.md b/Documentation/DocuBlocks/Rest/wal/get_admin_wal_transactions.md
index fedf21ad8d..1fc75f7fd7 100644
--- a/Documentation/DocuBlocks/Rest/wal/get_admin_wal_transactions.md
+++ b/Documentation/DocuBlocks/Rest/wal/get_admin_wal_transactions.md
@@ -26,7 +26,7 @@ is returned when an invalid HTTP method is used.
@EXAMPLES
-@EXAMPLE_ARANGOSH_RUN{RestWalTransactionsGet}
+@EXAMPLE_ARANGOSH_RUN{RestWalTransactionsGet_mmfiles}
var url = "/_admin/wal/transactions";
var response = logCurlRequest('GET', url);
diff --git a/Documentation/DocuBlocks/Rest/wal/put_admin_wal_properties.md b/Documentation/DocuBlocks/Rest/wal/put_admin_wal_properties.md
index f50b687449..33a290e93f 100644
--- a/Documentation/DocuBlocks/Rest/wal/put_admin_wal_properties.md
+++ b/Documentation/DocuBlocks/Rest/wal/put_admin_wal_properties.md
@@ -33,7 +33,7 @@ is returned when an invalid HTTP method is used.
@EXAMPLES
-@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesPut}
+@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesPut_mmfiles}
var url = "/_admin/wal/properties";
var body = {
logfileSize: 32 * 1024 * 1024,
diff --git a/Documentation/DocuBlocks/collectionFigures.md b/Documentation/DocuBlocks/collectionFigures.md
index 7c53f91455..3f3d1ea479 100644
--- a/Documentation/DocuBlocks/collectionFigures.md
+++ b/Documentation/DocuBlocks/collectionFigures.md
@@ -83,7 +83,7 @@ used as a lower bound approximation of the disk usage.
@EXAMPLES
-@EXAMPLE_ARANGOSH_OUTPUT{collectionFigures}
+@EXAMPLE_ARANGOSH_OUTPUT{collectionFigures_mmfiles}
~ require("internal").wal.flush(true, true);
db.demo.figures()
@END_EXAMPLE_ARANGOSH_OUTPUT
diff --git a/Documentation/DocuBlocks/ensureUniqueSkiplist.md b/Documentation/DocuBlocks/ensureUniqueSkiplist.md
index a48821155e..3ae3c9270e 100644
--- a/Documentation/DocuBlocks/ensureUniqueSkiplist.md
+++ b/Documentation/DocuBlocks/ensureUniqueSkiplist.md
@@ -36,6 +36,7 @@ db.ids.save({ "myId": 789 });
db.ids.save({ "myId": 123 }); // xpError(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED)
~db._drop("ids");
@END_EXAMPLE_ARANGOSH_OUTPUT
+
@EXAMPLE_ARANGOSH_OUTPUT{ensureUniqueSkiplistMultiColmun}
~db._create("ids");
db.ids.ensureIndex({ type: "skiplist", fields: [ "name.first", "name.last" ], unique: true });
@@ -46,5 +47,3 @@ db.ids.save({ "name" : { "first" : "hans", "last": "jensen" }});
~ // xpError(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED)
~db._drop("ids");
@END_EXAMPLE_ARANGOSH_OUTPUT
-
-
diff --git a/Documentation/DocuBlocks/walPropertiesGet.md b/Documentation/DocuBlocks/walPropertiesGet.md
index f3992a7ef9..f248b3bec4 100644
--- a/Documentation/DocuBlocks/walPropertiesGet.md
+++ b/Documentation/DocuBlocks/walPropertiesGet.md
@@ -21,7 +21,7 @@ array with the following attributes:
@EXAMPLES
-@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesGet}
+@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesGet_mmfiles}
require("internal").wal.properties();
@END_EXAMPLE_ARANGOSH_OUTPUT
diff --git a/Documentation/DocuBlocks/walPropertiesSet.md b/Documentation/DocuBlocks/walPropertiesSet.md
index 5b04548ca5..315f2ecdfd 100644
--- a/Documentation/DocuBlocks/walPropertiesSet.md
+++ b/Documentation/DocuBlocks/walPropertiesSet.md
@@ -22,7 +22,7 @@ will be ignored and the configuration for them will not be modified.
@EXAMPLES
-@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesSet}
+@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesSet_mmfiles}
| require("internal").wal.properties({
| allowOverSizeEntries: true,
logfileSize: 32 * 1024 * 1024 });
diff --git a/Documentation/Examples/RestBatchImplicitBoundary.generated b/Documentation/Examples/RestBatchImplicitBoundary.generated
index 2dd72d580e..5e3e985fb7 100644
--- a/Documentation/Examples/RestBatchImplicitBoundary.generated
+++ b/Documentation/Examples/RestBatchImplicitBoundary.generated
@@ -17,4 +17,36 @@ content-type: application/json
x-arango-errors: 2
x-content-type-options: nosniff
-"--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 87\r\n\r\n{\"code\":404,\"error\":true,\"errorMessage\":\"collection or view not found\",\"errorNum\":1203}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 101\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":404,\"errorMessage\":\"unknown path '_api/collection/notexisting2'\"}\r\n--SomeBoundaryValue--"
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+
+HTTP/1.1 404 Not Found
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 87
+
+{
+ "code" : 404,
+ "error" : true,
+ "errorMessage" : "collection or view not found",
+ "errorNum" : 1203
+}↩
+
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+
+HTTP/1.1 404 Not Found
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 101
+
+{
+ "error" : true,
+ "code" : 404,
+ "errorNum" : 404,
+ "errorMessage" : "unknown path '_api/collection/notexisting2'"
+}↩
+
+--SomeBoundaryValue--
diff --git a/Documentation/Examples/RestBatchMultipartHeader.generated b/Documentation/Examples/RestBatchMultipartHeader.generated
index f13215fbe8..bff448498c 100644
--- a/Documentation/Examples/RestBatchMultipartHeader.generated
+++ b/Documentation/Examples/RestBatchMultipartHeader.generated
@@ -39,4 +39,162 @@ content-type: application/json
x-arango-errors: 1
x-content-type-options: nosniff
-"--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 64\r\n\r\n{\"server\":\"arango\",\"license\":\"community\",\"version\":\"3.5.0-rc.4\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 87\r\n\r\n{\"code\":404,\"error\":true,\"errorMessage\":\"collection or view not found\",\"errorNum\":1203}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 328\r\n\r\n{\"error\":false,\"code\":200,\"waitForSync\":false,\"type\":2,\"status\":3,\"journalSize\":33554432,\"keyOptions\":{\"allowUserKeys\":true,\"type\":\"traditional\",\"lastValue\":0},\"globallyUniqueId\":\"h82E80CF18F1A/103173\",\"statusString\":\"loaded\",\"id\":\"103173\",\"name\":\"products\",\"doCompact\":true,\"isSystem\":false,\"indexBuckets\":8,\"isVolatile\":false}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nLocation: /_api/collection/products/figures\r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 835\r\n\r\n{\"error\":false,\"code\":200,\"type\":2,\"status\":3,\"journalSize\":33554432,\"isVolatile\":false,\"name\":\"products\",\"doCompact\":true,\"isSystem\":false,\"count\":0,\"waitForSync\":false,\"figures\":{\"indexes\":{\"count\":1,\"size\":32128},\"documentReferences\":0,\"waitingFor\":\"-\",\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"compactionStatus\":{\"message\":\"compaction not yet started\",\"time\":\"2019-06-13T19:54:57Z\",\"count\":0,\"filesCombined\":0,\"bytesRead\":0,\"bytesWritten\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"revisions\":{\"count\":0,\"size\":48192},\"lastTick\":0,\"uncollectedLogfileEntries\":0},\"keyOptions\":{\"allowUserKeys\":true,\"type\":\"traditional\",\"lastValue\":0},\"globallyUniqueId\":\"h82E80CF18F1A/103173\",\"statusString\":\"loaded\",\"id\":\"103173\",\"indexBuckets\":8}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 40\r\n\r\n{\"error\":false,\"code\":200,\"id\":\"103173\"}\r\n--SomeBoundaryValue--"
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+Content-Id: myId1
+
+HTTP/1.1 200 OK
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 65
+
+{
+ "server" : "arango",
+ "license" : "community",
+ "version" : "3.6.0-devel"
+}↩
+
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+Content-Id: myId2
+
+HTTP/1.1 404 Not Found
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 87
+
+{
+ "code" : 404,
+ "error" : true,
+ "errorMessage" : "collection or view not found",
+ "errorNum" : 1203
+}↩
+
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+Content-Id: someId
+
+HTTP/1.1 200 OK
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 322
+
+{
+ "error" : false,
+ "code" : 200,
+ "waitForSync" : false,
+ "type" : 2,
+ "status" : 3,
+ "journalSize" : 33554432,
+ "keyOptions" : {
+ "allowUserKeys" : true,
+ "type" : "traditional",
+ "lastValue" : 0
+ },
+ "globallyUniqueId" : "h5E72DF2A848A/111",
+ "statusString" : "loaded",
+ "id" : "111",
+ "name" : "products",
+ "doCompact" : true,
+ "isSystem" : false,
+ "indexBuckets" : 8,
+ "isVolatile" : false
+}↩
+
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+Content-Id: nextId
+
+HTTP/1.1 200 OK
+Server:
+Location: /_api/collection/products/figures
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 829
+
+{
+ "error" : false,
+ "code" : 200,
+ "type" : 2,
+ "status" : 3,
+ "journalSize" : 33554432,
+ "isVolatile" : false,
+ "name" : "products",
+ "doCompact" : true,
+ "isSystem" : false,
+ "count" : 0,
+ "waitForSync" : false,
+ "figures" : {
+ "indexes" : {
+ "count" : 1,
+ "size" : 32128
+ },
+ "documentReferences" : 0,
+ "waitingFor" : "-",
+ "alive" : {
+ "count" : 0,
+ "size" : 0
+ },
+ "dead" : {
+ "count" : 0,
+ "size" : 0,
+ "deletion" : 0
+ },
+ "compactionStatus" : {
+ "message" : "compaction not yet started",
+ "time" : "2019-07-17T12:01:44Z",
+ "count" : 0,
+ "filesCombined" : 0,
+ "bytesRead" : 0,
+ "bytesWritten" : 0
+ },
+ "datafiles" : {
+ "count" : 0,
+ "fileSize" : 0
+ },
+ "journals" : {
+ "count" : 0,
+ "fileSize" : 0
+ },
+ "compactors" : {
+ "count" : 0,
+ "fileSize" : 0
+ },
+ "revisions" : {
+ "count" : 0,
+ "size" : 48192
+ },
+ "lastTick" : 0,
+ "uncollectedLogfileEntries" : 0
+ },
+ "keyOptions" : {
+ "allowUserKeys" : true,
+ "type" : "traditional",
+ "lastValue" : 0
+ },
+ "globallyUniqueId" : "h5E72DF2A848A/111",
+ "statusString" : "loaded",
+ "id" : "111",
+ "indexBuckets" : 8
+}↩
+
+--SomeBoundaryValue
+Content-Type: application/x-arango-batchpart
+Content-Id: otherId
+
+HTTP/1.1 200 OK
+Server:
+Connection:
+Content-Type: application/json; charset=utf-8
+Content-Length: 37
+
+{
+ "error" : false,
+ "code" : 200,
+ "id" : "111"
+}↩
+
+--SomeBoundaryValue--
diff --git a/Documentation/Examples/RestCollectionGetCollectionFigures.generated b/Documentation/Examples/RestCollectionGetCollectionFigures_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestCollectionGetCollectionFigures.generated
rename to Documentation/Examples/RestCollectionGetCollectionFigures_mmfiles.generated
diff --git a/Documentation/Examples/RestCollectionGetCollectionFigures_rocksdb.generated b/Documentation/Examples/RestCollectionGetCollectionFigures_rocksdb.generated
new file mode 100644
index 0000000000..16a0b49b61
--- /dev/null
+++ b/Documentation/Examples/RestCollectionGetCollectionFigures_rocksdb.generated
@@ -0,0 +1,37 @@
+shell> curl --header 'accept: application/json' --dump - http://localhost:8529/_api/collection/products/figures
+
+HTTP/1.1 OK
+content-type: application/json; charset=utf-8
+location: /_api/collection/products/figures
+x-content-type-options: nosniff
+
+{
+ "error" : false,
+ "code" : 200,
+ "type" : 2,
+ "status" : 3,
+ "name" : "products",
+ "waitForSync" : false,
+ "objectId" : "101",
+ "cacheEnabled" : false,
+ "figures" : {
+ "indexes" : {
+ "count" : 1,
+ "size" : 37
+ },
+ "documentsSize" : 846,
+ "cacheInUse" : false,
+ "cacheSize" : 0,
+ "cacheUsage" : 0
+ },
+ "keyOptions" : {
+ "allowUserKeys" : true,
+ "type" : "traditional",
+ "lastValue" : 107
+ },
+ "globallyUniqueId" : "h5FFC6746B13/102",
+ "statusString" : "loaded",
+ "id" : "102",
+ "count" : 1,
+ "isSystem" : false
+}
diff --git a/Documentation/Examples/RestCollectionRotateNoJournal.generated b/Documentation/Examples/RestCollectionRotateNoJournal_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestCollectionRotateNoJournal.generated
rename to Documentation/Examples/RestCollectionRotateNoJournal_mmfiles.generated
diff --git a/Documentation/Examples/RestCollectionRotate.generated b/Documentation/Examples/RestCollectionRotate_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestCollectionRotate.generated
rename to Documentation/Examples/RestCollectionRotate_mmfiles.generated
diff --git a/Documentation/Examples/RestEngine.generated b/Documentation/Examples/RestEngine_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestEngine.generated
rename to Documentation/Examples/RestEngine_mmfiles.generated
diff --git a/Documentation/Examples/RestEngine_rocksdb.generated b/Documentation/Examples/RestEngine_rocksdb.generated
new file mode 100644
index 0000000000..8854662b4a
--- /dev/null
+++ b/Documentation/Examples/RestEngine_rocksdb.generated
@@ -0,0 +1,28 @@
+shell> curl --header 'accept: application/json' --dump - http://localhost:8529/_api/engine
+
+HTTP/1.1 OK
+content-type: application/json; charset=utf-8
+x-content-type-options: nosniff
+
+{
+ "name" : "rocksdb",
+ "supports" : {
+ "dfdb" : false,
+ "indexes" : [
+ "primary",
+ "edge",
+ "hash",
+ "skiplist",
+ "ttl",
+ "persistent",
+ "geo",
+ "fulltext"
+ ],
+ "aliases" : {
+ "indexes" : {
+ "skiplist" : "persistent",
+ "hash" : "persistent"
+ }
+ }
+ }
+}
diff --git a/Documentation/Examples/RestReplicationDumpEmpty.generated b/Documentation/Examples/RestReplicationDumpEmpty_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestReplicationDumpEmpty.generated
rename to Documentation/Examples/RestReplicationDumpEmpty_mmfiles.generated
diff --git a/Documentation/Examples/RestReplicationDump.generated b/Documentation/Examples/RestReplicationDump_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestReplicationDump.generated
rename to Documentation/Examples/RestReplicationDump_mmfiles.generated
diff --git a/Documentation/Examples/RestReplicationInventoryIndexes.generated b/Documentation/Examples/RestReplicationInventoryIndexes_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestReplicationInventoryIndexes.generated
rename to Documentation/Examples/RestReplicationInventoryIndexes_mmfiles.generated
diff --git a/Documentation/Examples/RestReplicationInventory.generated b/Documentation/Examples/RestReplicationInventory_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestReplicationInventory.generated
rename to Documentation/Examples/RestReplicationInventory_mmfiles.generated
diff --git a/Documentation/Examples/RestWalPropertiesGet.generated b/Documentation/Examples/RestWalPropertiesGet_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestWalPropertiesGet.generated
rename to Documentation/Examples/RestWalPropertiesGet_mmfiles.generated
diff --git a/Documentation/Examples/RestWalPropertiesPut.generated b/Documentation/Examples/RestWalPropertiesPut_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestWalPropertiesPut.generated
rename to Documentation/Examples/RestWalPropertiesPut_mmfiles.generated
diff --git a/Documentation/Examples/RestWalTransactionsGet.generated b/Documentation/Examples/RestWalTransactionsGet_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/RestWalTransactionsGet.generated
rename to Documentation/Examples/RestWalTransactionsGet_mmfiles.generated
diff --git a/Documentation/Examples/WalPropertiesGet.generated b/Documentation/Examples/WalPropertiesGet.generated
deleted file mode 100644
index 0ecdd349da..0000000000
--- a/Documentation/Examples/WalPropertiesGet.generated
+++ /dev/null
@@ -1,10 +0,0 @@
-arangosh> require("internal").wal.properties();
-{
- "allowOversizeEntries" : true,
- "logfileSize" : 33554432,
- "historicLogfiles" : 10,
- "reserveLogfiles" : 3,
- "syncInterval" : 100,
- "throttleWait" : 15000,
- "throttleWhenPending" : 0
-}
diff --git a/Documentation/Examples/WalPropertiesGet_mmfiles.generated b/Documentation/Examples/WalPropertiesGet_mmfiles.generated
new file mode 100644
index 0000000000..31a81b8c99
--- /dev/null
+++ b/Documentation/Examples/WalPropertiesGet_mmfiles.generated
@@ -0,0 +1,3 @@
+arangosh> require("internal").wal.properties();
+{
+}
diff --git a/Documentation/Examples/WalPropertiesSet.generated b/Documentation/Examples/WalPropertiesSet.generated
deleted file mode 100644
index a8fbe6a2d8..0000000000
--- a/Documentation/Examples/WalPropertiesSet.generated
+++ /dev/null
@@ -1,12 +0,0 @@
-arangosh> require("internal").wal.properties({
-........> allowOverSizeEntries: true,
-........> logfileSize: 32 * 1024 * 1024 });
-{
- "allowOversizeEntries" : true,
- "logfileSize" : 33554432,
- "historicLogfiles" : 10,
- "reserveLogfiles" : 3,
- "syncInterval" : 100,
- "throttleWait" : 15000,
- "throttleWhenPending" : 0
-}
diff --git a/Documentation/Examples/WalPropertiesSet_mmfiles.generated b/Documentation/Examples/WalPropertiesSet_mmfiles.generated
new file mode 100644
index 0000000000..e01937d11c
--- /dev/null
+++ b/Documentation/Examples/WalPropertiesSet_mmfiles.generated
@@ -0,0 +1,5 @@
+arangosh> require("internal").wal.properties({
+........> allowOverSizeEntries: true,
+........> logfileSize: 32 * 1024 * 1024 });
+{
+}
diff --git a/Documentation/Examples/collectionFigures.generated b/Documentation/Examples/collectionFigures_mmfiles.generated
similarity index 100%
rename from Documentation/Examples/collectionFigures.generated
rename to Documentation/Examples/collectionFigures_mmfiles.generated
diff --git a/Documentation/README_maintainers.md b/Documentation/README_maintainers.md
index 77794a93d2..c00e2d6cdd 100644
--- a/Documentation/README_maintainers.md
+++ b/Documentation/README_maintainers.md
@@ -237,8 +237,10 @@ Here is how its details work:
- an example start is marked with *@EXAMPLE_ARANGOSH_OUTPUT* or *@EXAMPLE_ARANGOSH_RUN*
- the example is named by the string provided in brackets after the above key
- the output is written to `Documentation/Examples/.generated`
+ - if your example depends on a storage engine, prepend `` with `_rocksdb` or `_mmfiles` to run it against such a server
- examples end with *@END_EXAMPLE_[OUTPUT|RUN|AQL]*
- all code in between is executed as javascript in the **arangosh** while talking to a valid **arangod**.
+ - you should strive to group your examples by naming them with a common prefix per topic.
You may inspect the generated js code in `/tmp/arangosh.examples.js`
## OUTPUT, RUN and AQL specifics
diff --git a/Documentation/Scripts/exampleHeader.js b/Documentation/Scripts/exampleHeader.js
index 155ef65918..ae49a7b63e 100644
--- a/Documentation/Scripts/exampleHeader.js
+++ b/Documentation/Scripts/exampleHeader.js
@@ -98,6 +98,22 @@ const htmlAppender = function(text) {
const rawAppender = function(text) {
output += text;
};
+
+const plainAppender = function(text) {
+ // do we have a line that could be json? try to parse & format it.
+ if (text.match(/^{.*}$/) || text.match(/^[.*]$/)) {
+ try {
+ let parsed = JSON.parse(text);
+ output += highlight("js", internal.inspect(parsed)) + "↩\n" ;
+ } catch (x) {
+ // fallback to plain text.
+ output += text;
+ }
+ } else {
+ output += text;
+ }
+};
+
const shellAppender = function(text) {
output += highlight("shell", text);
};
@@ -108,6 +124,7 @@ const log = function (a) {
};
var logCurlRequestRaw = internal.appendCurlRequest(shellAppender, jsonAppender, rawAppender);
+var logCurlRequestPlain = internal.appendCurlRequest(shellAppender, jsonAppender, plainAppender);
var logCurlRequest = function () {
if ((arguments.length > 1) &&
(arguments[1] !== undefined) &&
@@ -139,6 +156,7 @@ var logJsonResponse = internal.appendJsonResponse(rawAppender, jsonAppender);
var logJsonLResponse = internal.appendJsonLResponse(rawAppender, jsonLAppender);
var logHtmlResponse = internal.appendRawResponse(rawAppender, htmlAppender);
var logRawResponse = internal.appendRawResponse(rawAppender, rawAppender);
+var logPlainResponse = internal.appendPlainResponse(plainAppender, plainAppender);
var logErrorResponse = function (response) {
allErrors += "Server reply was: " + JSON.stringify(response) + "\n";
};
diff --git a/js/client/bootstrap/modules/internal.js b/js/client/bootstrap/modules/internal.js
index 08896c39ae..feaadf4d84 100644
--- a/js/client/bootstrap/modules/internal.js
+++ b/js/client/bootstrap/modules/internal.js
@@ -1,20 +1,5 @@
/* jshint -W051:true */
/* eslint-disable */
-
-let appendHeaders = function(appender, headers) {
- var key;
- // generate header
- appender('HTTP/1.1 ' + headers['http/1.1'] + '\n');
-
- for (key in headers) {
- if (headers.hasOwnProperty(key)) {
- if (key !== 'http/1.1' && key !== 'server' && key !== 'connection'
- && key !== 'content-length') {
- appender(key + ': ' + headers[key] + '\n');
- }
- }
- }
-};
;(function () {
'use strict'
@@ -226,6 +211,21 @@ let appendHeaders = function(appender, headers) {
};
};
+ let appendHeaders = function(appender, headers) {
+ var key;
+ // generate header
+ appender('HTTP/1.1 ' + headers['http/1.1'] + '\n');
+
+ for (key in headers) {
+ if (headers.hasOwnProperty(key)) {
+ if (key !== 'http/1.1' && key !== 'server' && key !== 'connection'
+ && key !== 'content-length') {
+ appender(key + ': ' + headers[key] + '\n');
+ }
+ }
+ }
+ };
+
// //////////////////////////////////////////////////////////////////////////////
// / @brief logs a raw response
// //////////////////////////////////////////////////////////////////////////////
@@ -237,12 +237,37 @@ let appendHeaders = function(appender, headers) {
// append body
if (response.body !== undefined) {
- syntaxAppender(exports.inspect(response.body));
+ syntaxAppender(internal.inspect(response.body));
appender('\n');
}
};
};
+ // //////////////////////////////////////////////////////////////////////////////
+ // / @brief logs a raw response - don't string escape etc.
+ // //////////////////////////////////////////////////////////////////////////////
+
+ exports.appendPlainResponse = function (appender, syntaxAppender) {
+ return function (response) {
+ appendHeaders(appender, response.headers);
+ appender('\n');
+
+ // append body
+ if (response.body !== undefined) {
+ let splitted = response.body.split(/\r\n|\r|\n/);
+ if (splitted.length > 0) {
+ splitted.forEach(function (line) {
+ syntaxAppender(line);
+ appender('\n');
+ });
+ } else {
+ syntaxAppender(response.body);
+ appender('\n');
+ }
+ }
+ };
+ };
+
// //////////////////////////////////////////////////////////////////////////////
// / @brief logs a response in JSON
// //////////////////////////////////////////////////////////////////////////////
diff --git a/utils/generateExamples.js b/utils/generateExamples.js
index 09880c7e06..6ebe51ea57 100644
--- a/utils/generateExamples.js
+++ b/utils/generateExamples.js
@@ -2,6 +2,7 @@
/*global start_pretty_print */
'use strict';
+const _ = require("lodash");
const fs = require("fs");
const internal = require("internal");
const executeExternal = internal.executeExternal;
@@ -119,134 +120,144 @@ function main(argv) {
args = args.concat(['--arangoshSetup']);
args = args.concat(documentationSourceDirs);
- let res = executeExternalAndWait(thePython, args);
+ let storageEngines = [['mmfiles', false], ['rocksdb', true]];
+ let res;
- if (res.exit !== 0) {
- print("parsing the examples failed - aborting!");
- print(res);
- return -1;
- }
+ storageEngines.forEach(function (engine) {
+ let pyArgs = _.clone(args);
+ pyArgs.push('--storageEngine');
+ pyArgs.push(engine[0]);
+ pyArgs.push('--storageEngineAgnostic');
+ pyArgs.push(engine[1]);
+ print(pyArgs)
+ res = executeExternalAndWait(thePython, pyArgs);
- if (startServer) {
- let port = findFreePort();
- instanceInfo.port = port;
- serverEndpoint = protocol + "://127.0.0.1:" + port;
-
- instanceInfo.url = endpointToURL(serverEndpoint);
-
- fs.makeDirectoryRecursive(fs.join(tmpDataDir, "data"));
-
- let serverArgs = {};
- fs.makeDirectoryRecursive(fs.join(tmpDataDir, "apps"));
-
- serverArgs["configuration"] = "none";
- serverArgs["database.directory"] = fs.join(tmpDataDir, "data");
- serverArgs["javascript.app-path"] = fs.join(tmpDataDir, "apps");
- serverArgs["javascript.startup-directory"] = "js";
- serverArgs["javascript.module-directory"] = "enterprise/js";
- serverArgs["log.file"] = fs.join(tmpDataDir, "log");
- serverArgs["server.authentication"] = "false";
- serverArgs["server.endpoint"] = serverEndpoint;
- serverArgs["server.storage-engine"] = "mmfiles"; // examples depend on it
-
- print("================================================================================");
- ARANGOD = locateProgram("arangod", "Cannot find arangod to execute tests against");
- print(ARANGOD);
- print(toArgv(serverArgs));
- instanceInfo.pid = executeExternal(ARANGOD, toArgv(serverArgs)).pid;
-
- // Wait until the server is up:
- count = 0;
- instanceInfo.endpoint = serverEndpoint;
-
- while (true) {
- wait(0.5, false);
- let r = download(instanceInfo.url + "/_api/version", "");
-
- if (!r.error && r.code === 200) {
- break;
- }
-
- count++;
-
- if (count % 60 === 0) {
- res = statusExternal(instanceInfo.pid, false);
-
- if (res.status !== "RUNNING") {
- print("start failed - process is gone: " + yaml.safeDump(res));
- return 1;
- }
- }
+ if (res.exit !== 0) {
+ print("parsing the examples failed - aborting!");
+ print(res);
+ return -1;
}
- }
- let arangoshArgs = {
- 'configuration': fs.join(fs.makeAbsolute(''), 'etc', 'relative', 'arangosh.conf'),
- 'server.password': "",
- 'server.endpoint': serverEndpoint,
- 'javascript.execute': scriptArguments.outputFile
- };
+ if (startServer) {
+ let port = findFreePort();
+ instanceInfo.port = port;
+ serverEndpoint = protocol + "://127.0.0.1:" + port;
- print("--------------------------------------------------------------------------------");
- ARANGOSH = locateProgram("arangosh", "Cannot find arangosh to run tests with");
- print(ARANGOSH);
- print(internal.toArgv(arangoshArgs));
- res = executeExternalAndWait(ARANGOSH, internal.toArgv(arangoshArgs));
+ instanceInfo.url = endpointToURL(serverEndpoint);
- if (startServer) {
- if (typeof(instanceInfo.exitStatus) === 'undefined') {
- download(instanceInfo.url + "/_admin/shutdown", "", {method: "DELETE"});
+ fs.makeDirectoryRecursive(fs.join(tmpDataDir, engine[0], "data"));
- print("Waiting for server shut down");
+ let serverArgs = {};
+ fs.makeDirectoryRecursive(fs.join(tmpDataDir, engine[0], "apps"));
+
+ serverArgs["configuration"] = "none";
+ serverArgs["database.directory"] = fs.join(tmpDataDir, engine[0], "data");
+ serverArgs["javascript.app-path"] = fs.join(tmpDataDir, engine[0], "apps");
+ serverArgs["javascript.startup-directory"] = "js";
+ serverArgs["javascript.module-directory"] = "enterprise/js";
+ serverArgs["log.file"] = fs.join(tmpDataDir, engine[0], "log");
+ serverArgs["server.authentication"] = "false";
+ serverArgs["server.endpoint"] = serverEndpoint;
+ serverArgs["server.storage-engine"] = engine[0]
+
+ print("================================================================================");
+ ARANGOD = locateProgram("arangod", "Cannot find arangod to execute tests against");
+ print(ARANGOD);
+ print(toArgv(serverArgs));
+ instanceInfo.pid = executeExternal(ARANGOD, toArgv(serverArgs)).pid;
+
+ // Wait until the server is up:
count = 0;
- let bar = "[";
+ instanceInfo.endpoint = serverEndpoint;
- while (1) {
- instanceInfo.exitStatus = statusExternal(instanceInfo.pid, false);
+ while (true) {
+ wait(0.5, false);
+ let r = download(instanceInfo.url + "/_api/version", "");
- if (instanceInfo.exitStatus.status === "RUNNING") {
- count++;
- if (typeof(options.valgrind) === 'string') {
- wait(1);
- continue;
+ if (!r.error && r.code === 200) {
+ break;
+ }
+
+ count++;
+
+ if (count % 60 === 0) {
+ res = statusExternal(instanceInfo.pid, false);
+
+ if (res.status !== "RUNNING") {
+ print("start failed - process is gone: " + yaml.safeDump(res));
+ return 1;
}
- if (count % 10 === 0) {
- bar = bar + "#";
- }
- if (count > 600) {
- print("forcefully terminating " + yaml.safeDump(instanceInfo.pid) +
- " after 600 s grace period; marking crashy.");
- serverCrashed = true;
- killExternal(instanceInfo.pid);
- break;
- } else {
- wait(1);
- }
- } else if (instanceInfo.exitStatus.status !== "TERMINATED") {
- if (instanceInfo.exitStatus.hasOwnProperty('signal')) {
- print("Server shut down with : " +
- yaml.safeDump(instanceInfo.exitStatus) +
- " marking build as crashy.");
- serverCrashed = true;
- break;
- }
- if (internal.platform.substr(0, 3) === 'win') {
- // Windows: wait for procdump to do its job...
- statusExternal(instanceInfo.monitor, true);
- }
- } else {
- print("Server shutdown: Success.");
- break; // Success.
}
}
-
- if (count > 10) {
- print("long Server shutdown: " + bar + ']');
- }
-
}
- }
+
+ let arangoshArgs = {
+ 'configuration': fs.join(fs.makeAbsolute(''), 'etc', 'relative', 'arangosh.conf'),
+ 'server.password': "",
+ 'server.endpoint': serverEndpoint,
+ 'javascript.execute': scriptArguments.outputFile
+ };
+ print("--------------------------------------------------------------------------------");
+ ARANGOSH = locateProgram("arangosh", "Cannot find arangosh to run tests with");
+ print(ARANGOSH);
+ print(internal.toArgv(arangoshArgs));
+ res = executeExternalAndWait(ARANGOSH, internal.toArgv(arangoshArgs));
+
+ if (startServer) {
+ if (typeof(instanceInfo.exitStatus) === 'undefined') {
+ download(instanceInfo.url + "/_admin/shutdown", "", {method: "DELETE"});
+
+ print("Waiting for server shut down");
+ count = 0;
+ let bar = "[";
+
+ while (1) {
+ instanceInfo.exitStatus = statusExternal(instanceInfo.pid, false);
+
+ if (instanceInfo.exitStatus.status === "RUNNING") {
+ count++;
+ if (typeof(options.valgrind) === 'string') {
+ wait(1);
+ continue;
+ }
+ if (count % 10 === 0) {
+ bar = bar + "#";
+ }
+ if (count > 600) {
+ print("forcefully terminating " + yaml.safeDump(instanceInfo.pid) +
+ " after 600 s grace period; marking crashy.");
+ serverCrashed = true;
+ killExternal(instanceInfo.pid);
+ break;
+ } else {
+ wait(1);
+ }
+ } else if (instanceInfo.exitStatus.status !== "TERMINATED") {
+ if (instanceInfo.exitStatus.hasOwnProperty('signal')) {
+ print("Server shut down with : " +
+ yaml.safeDump(instanceInfo.exitStatus) +
+ " marking build as crashy.");
+ serverCrashed = true;
+ break;
+ }
+ if (internal.platform.substr(0, 3) === 'win') {
+ // Windows: wait for procdump to do its job...
+ statusExternal(instanceInfo.monitor, true);
+ }
+ } else {
+ print("Server shutdown: Success.");
+ break; // Success.
+ }
+ }
+
+ if (count > 10) {
+ print("long Server shutdown: " + bar + ']');
+ }
+
+ }
+ }
+ });
if (res.exit != 0) {
throw("generating examples failed!");
}
diff --git a/utils/generateExamples.py b/utils/generateExamples.py
index 60aad2cdce..6e28d0a317 100644
--- a/utils/generateExamples.py
+++ b/utils/generateExamples.py
@@ -128,7 +128,13 @@ OPTION_ARANGOSH_SETUP = 1
OPTION_OUTPUT_DIR = 2
OPTION_FILTER = 3
OPTION_OUTPUT_FILE = 4
+OPTION_OUTPUT_ENGINE = 5
+OPTION_OUTPUT_FILTER_NONMATCHING = 6
+engines = ["mmfiles", "rocksdb"]
+engine = "mmfiles"
+otherEngine = "mmfiles"
+storageEngineAgnostic = True
escapeBS = re.compile("\\\\")
doubleBS = "\\\\\\\\"
@@ -198,17 +204,17 @@ def matchStartLine(line, filename):
if m:
strip = m.group(1)
name = m.group(2)
-
+
if name in AQLFiles:
print >> sys.stderr, "%s\nduplicate test name '%s' in file %s!\n%s\n" % ('#' * 80, name, filename, '#' * 80)
sys.exit(1)
-
+
# if we match for filters, only output these!
if ((FilterForTestcase != None) and not FilterForTestcase.match(name)):
print >> sys.stderr, "AQL: filtering out testcase '%s'" %name
filterTestList.append(name)
return("", STATE_BEGIN);
-
+
AQLFiles[name] = True
return (name, STATE_AQL)
@@ -257,10 +263,10 @@ AQLEXPLAIN="aql_explain"
### @brief loop over the lines of one input file
################################################################################
-def analyzeFile(f, filename):
+def analyzeFile(f, filename):
global RunTests, TESTLINES, TYPE, LINE_NO, STRING
strip = None
-
+
name = ""
partialCmd = ""
partialLine = ""
@@ -280,7 +286,7 @@ def analyzeFile(f, filename):
if state == STATE_BEGIN:
(name, state) = matchStartLine(line, filename)
- if state != STATE_BEGIN:
+ if state != STATE_BEGIN:
MapSourceFiles[name] = filename
RunTests[name] = {}
RunTests[name][TYPE] = state
@@ -315,7 +321,7 @@ def analyzeFile(f, filename):
# we are within a example
line = line[len(strip):]
showCmd = True
-
+
# end-example test
m = endExample.match(line)
@@ -601,18 +607,18 @@ if (allErrors.length > 0) {
################################################################################
def loopDirectories():
- global ArangoshSetup, OutputDir, FilterForTestcase
+ global ArangoshSetup, OutputDir, FilterForTestcase, storageEngineAgnostic, engine, otherEngine
argv = sys.argv
argv.pop(0)
filenames = []
fstate = OPTION_NORMAL
-
+
for filename in argv:
if filename == "--arangoshSetup":
fstate = OPTION_ARANGOSH_SETUP
continue
- if filename == "--onlyThisOne":
+ if filename == "--onlyThisOne":
fstate = OPTION_FILTER
continue
@@ -624,6 +630,14 @@ def loopDirectories():
fstate = OPTION_OUTPUT_FILE
continue
+ if filename == "--storageEngine":
+ fstate = OPTION_OUTPUT_ENGINE
+ continue
+
+ if filename == "--storageEngineAgnostic":
+ fstate = OPTION_OUTPUT_FILTER_NONMATCHING
+ continue
+
if fstate == OPTION_NORMAL:
if os.path.isdir(filename):
for root, dirs, files in os.walk(filename):
@@ -635,17 +649,17 @@ def loopDirectories():
elif fstate == OPTION_FILTER:
fstate = OPTION_NORMAL
- if (len(filename) > 0):
+ if (len(filename) > 0):
FilterForTestcase = re.compile(filename);
elif fstate == OPTION_ARANGOSH_SETUP:
fstate = OPTION_NORMAL
f = open(filename, "r")
-
+
for line in f:
line = line.rstrip('\n')
ArangoshSetup += line + "\n"
-
+
f.close()
elif fstate == OPTION_OUTPUT_DIR:
@@ -656,21 +670,41 @@ def loopDirectories():
fstate = OPTION_NORMAL
sys.stdout = open(filename, 'w')
+ elif fstate == OPTION_OUTPUT_ENGINE:
+ fstate = OPTION_NORMAL
+ engine = filename
+ if engine == engines[0]:
+ otherEngine = engines[1]
+ else:
+ otherEngine = engines[0]
+
+ elif fstate == OPTION_OUTPUT_FILTER_NONMATCHING:
+ fstate = OPTION_NORMAL
+ storageEngineAgnostic = filename == "true"
+
for filename in filenames:
if (filename.find("#") < 0):
f = open(filename, "r")
analyzeFile(f, filename)
-
+
f.close()
else:
print >> sys.stderr, "skipping %s\n" % (filename)
def generateTestCases():
- global TESTLINES, TYPE, LINE_NO, STRING, RunTests
+ global TESTLINES, TYPE, LINE_NO, STRING, RunTests, storageEngineAgnostic, engine, otherEngine
testNames = RunTests.keys()
testNames.sort()
+
for thisTest in testNames:
+ if thisTest.endswith(otherEngine):
+ print >> sys.stderr, "skipping " + thisTest
+ continue
+ if not storageEngineAgnostic and not thisTest.endswith(engine):
+ print >> sys.stderr, "skipping " + thisTest
+ continue
+
if RunTests[thisTest][TYPE] == STATE_ARANGOSH_OUTPUT:
generateArangoshOutput(thisTest)
elif RunTests[thisTest][TYPE] == STATE_ARANGOSH_RUN: