mirror of https://gitee.com/bigwinds/arangodb
Documentation 3.5/example generation improvements (#9498)
* Documentation/switch example generation to rocksdb (#9478) * improve the formatting of the batch request documentation * split to linewise, check whether we have a json and format it. * Add missing asterisk * Minor fixes / improvements
This commit is contained in:
parent
165e22658c
commit
ef3aeb00e5
|
@ -18,14 +18,25 @@ will be *mmfiles* or *rocksdb*
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
Return the active storage engine
|
||||
Return the active storage engine with the MMFiles storage engine in use:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestEngine}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestEngine_mmfiles}
|
||||
var response = logCurlRequest('GET', '/_api/engine');
|
||||
|
||||
assert(response.code === 200);
|
||||
|
||||
logJsonResponse(response);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
|
||||
Return the active storage engine with the RocksDB storage engine in use:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestEngine_rocksdb}
|
||||
var response = logCurlRequest('GET', '/_api/engine');
|
||||
|
||||
assert(response.code === 200);
|
||||
|
||||
logJsonResponse(response);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
|
||||
@endDocuBlock
|
||||
|
||||
|
|
|
@ -108,11 +108,11 @@ The boundary (`SomeBoundaryValue`) is passed to the server in the HTTP
|
|||
parts.join("\r\n" + "--" + boundary + "\r\n") +
|
||||
"--" + boundary + "--\r\n";
|
||||
|
||||
var response = logCurlRequestRaw('POST', '/_api/batch', body, headers);
|
||||
var response = logCurlRequestPlain('POST', '/_api/batch', body, headers);
|
||||
|
||||
assert(response.code === 200);
|
||||
|
||||
logRawResponse(response);
|
||||
logPlainResponse(response);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
|
||||
Sending a batch request, setting the boundary implicitly (the server will
|
||||
|
@ -130,12 +130,12 @@ in this case try to find the boundary at the beginning of the request body).
|
|||
parts.join("\r\n" + "--" + boundary + "\r\n") +
|
||||
"--" + boundary + "--\r\n";
|
||||
|
||||
var response = logCurlRequestRaw('POST', '/_api/batch', body);
|
||||
var response = logCurlRequestPlain('POST', '/_api/batch', body);
|
||||
|
||||
assert(response.code === 200);
|
||||
assert(response.headers['x-arango-errors'] == 2);
|
||||
|
||||
logRawResponse(response);
|
||||
logPlainResponse(response);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
@endDocuBlock
|
||||
|
||||
|
|
|
@ -18,9 +18,9 @@ The name of the collection.
|
|||
@RESTDESCRIPTION
|
||||
In addition to the above, the result also contains the number of documents
|
||||
and additional statistical information about the collection.
|
||||
**Note** : This will always load the collection into memory.
|
||||
**Note**: This will always load the collection into memory.
|
||||
|
||||
**Note**: collection data that are stored in the write-ahead log only are
|
||||
**Note**: collection data that is stored in the write-ahead log only is
|
||||
not reported in the results. When the write-ahead log is collected, documents
|
||||
might be added to journals and datafiles of the collection, which may modify
|
||||
the figures of the collection.
|
||||
|
@ -126,6 +126,7 @@ engine (in bytes). This figure does not include the document data but only mappi
|
|||
from document revision ids to storage engine datafile positions.
|
||||
|
||||
@RESTSTRUCT{indexes,collection_figures,object,required,collection_figures_indexes}
|
||||
|
||||
@RESTSTRUCT{count,collection_figures_indexes,integer,required,int64}
|
||||
The total number of indexes defined for the collection, including the pre-defined
|
||||
indexes (e.g. primary index).
|
||||
|
@ -153,6 +154,7 @@ head of the collection's cleanup queue. This information can be used for debuggi
|
|||
compaction and unload issues.
|
||||
|
||||
@RESTSTRUCT{compactionStatus,collection_figures,object,optional,compactionStatus_attributes}
|
||||
|
||||
@RESTSTRUCT{message,compactionStatus_attributes,string,optional,string}
|
||||
The action that was performed when the compaction was last run for the collection.
|
||||
This information can be used for debugging compaction issues.
|
||||
|
@ -174,9 +176,9 @@ is returned.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
Using an identifier and requesting the figures of the collection:
|
||||
Using an identifier and requesting the figures of the collection (MMFiles storage engine):
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures_mmfiles}
|
||||
var cn = "products";
|
||||
db._drop(cn);
|
||||
var coll = db._create(cn);
|
||||
|
@ -191,5 +193,24 @@ Using an identifier and requesting the figures of the collection:
|
|||
logJsonResponse(response);
|
||||
db._drop(cn);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
|
||||
Using an identifier and requesting the figures of the collection (RocksDB storage engine):
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionGetCollectionFigures_rocksdb}
|
||||
var cn = "products";
|
||||
db._drop(cn);
|
||||
var coll = db._create(cn);
|
||||
coll.save({"test":"hello"});
|
||||
require("internal").wal.flush(true, true);
|
||||
var url = "/_api/collection/"+ coll.name() + "/figures";
|
||||
|
||||
var response = logCurlRequest('GET', url);
|
||||
|
||||
assert(response.code === 200);
|
||||
|
||||
logJsonResponse(response);
|
||||
db._drop(cn);
|
||||
@END_EXAMPLE_ARANGOSH_RUN
|
||||
|
||||
@endDocuBlock
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ If the *collection-name* is unknown, then a *HTTP 404* is returned.
|
|||
|
||||
Rotating the journal:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionRotate}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionRotate_mmfiles}
|
||||
var cn = "products";
|
||||
db._drop(cn);
|
||||
var coll = db._create(cn);
|
||||
|
@ -62,7 +62,7 @@ Rotating the journal:
|
|||
|
||||
Rotating if no journal exists:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionRotateNoJournal}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestCollectionRotateNoJournal_mmfiles}
|
||||
var cn = "products";
|
||||
db._drop(cn);
|
||||
var coll = db._create(cn);
|
||||
|
|
|
@ -104,7 +104,7 @@ is returned if an error occurred while assembling the response.
|
|||
|
||||
Empty collection:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationDumpEmpty}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationDumpEmpty_mmfiles}
|
||||
db._drop("testCollection");
|
||||
var c = db._create("testCollection");
|
||||
var url = "/_api/replication/dump?collection=" + c.name();
|
||||
|
@ -118,7 +118,7 @@ Empty collection:
|
|||
|
||||
Non-empty collection *(One JSON document per line)*:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationDump}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationDump_mmfiles}
|
||||
db._drop("testCollection");
|
||||
var c = db._create("testCollection");
|
||||
c.save({ "test" : true, "a" : "abc", "_key" : "abcdef" });
|
||||
|
|
|
@ -11,7 +11,7 @@ data (identified by tick value) are still available for replication.
|
|||
|
||||
The body of the response contains a JSON object.
|
||||
* *tickMin*: minimum tick available
|
||||
* *tickMax: maximum tick available
|
||||
* *tickMax*: maximum tick available
|
||||
* *time*: the server time as string in format "YYYY-MM-DDTHH:MM:SSZ"
|
||||
* *server*: An object with fields *version* and *serverId*
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ is returned if an error occurred while assembling the response.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationInventory}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationInventory_mmfiles}
|
||||
var url = "/_api/replication/inventory";
|
||||
var response = logCurlRequest('GET', url);
|
||||
|
||||
|
@ -107,7 +107,7 @@ is returned if an error occurred while assembling the response.
|
|||
|
||||
With some additional indexes:
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationInventoryIndexes}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestReplicationInventoryIndexes_mmfiles}
|
||||
db._drop("IndexedCollection1");
|
||||
var c1 = db._create("IndexedCollection1");
|
||||
c1.ensureHashIndex("name");
|
||||
|
|
|
@ -32,7 +32,7 @@ is returned when an invalid HTTP method is used.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesGet}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesGet_mmfiles}
|
||||
var url = "/_admin/wal/properties";
|
||||
var response = logCurlRequest('GET', url);
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ is returned when an invalid HTTP method is used.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalTransactionsGet}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalTransactionsGet_mmfiles}
|
||||
var url = "/_admin/wal/transactions";
|
||||
var response = logCurlRequest('GET', url);
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ is returned when an invalid HTTP method is used.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesPut}
|
||||
@EXAMPLE_ARANGOSH_RUN{RestWalPropertiesPut_mmfiles}
|
||||
var url = "/_admin/wal/properties";
|
||||
var body = {
|
||||
logfileSize: 32 * 1024 * 1024,
|
||||
|
|
|
@ -83,7 +83,7 @@ used as a lower bound approximation of the disk usage.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{collectionFigures}
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{collectionFigures_mmfiles}
|
||||
~ require("internal").wal.flush(true, true);
|
||||
db.demo.figures()
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
|
|
|
@ -36,6 +36,7 @@ db.ids.save({ "myId": 789 });
|
|||
db.ids.save({ "myId": 123 }); // xpError(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED)
|
||||
~db._drop("ids");
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{ensureUniqueSkiplistMultiColmun}
|
||||
~db._create("ids");
|
||||
db.ids.ensureIndex({ type: "skiplist", fields: [ "name.first", "name.last" ], unique: true });
|
||||
|
@ -46,5 +47,3 @@ db.ids.save({ "name" : { "first" : "hans", "last": "jensen" }});
|
|||
~ // xpError(ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED)
|
||||
~db._drop("ids");
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ array with the following attributes:
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesGet}
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesGet_mmfiles}
|
||||
require("internal").wal.properties();
|
||||
@END_EXAMPLE_ARANGOSH_OUTPUT
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ will be ignored and the configuration for them will not be modified.
|
|||
|
||||
@EXAMPLES
|
||||
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesSet}
|
||||
@EXAMPLE_ARANGOSH_OUTPUT{WalPropertiesSet_mmfiles}
|
||||
| require("internal").wal.properties({
|
||||
| allowOverSizeEntries: true,
|
||||
logfileSize: 32 * 1024 * 1024 });
|
||||
|
|
|
@ -17,4 +17,36 @@ content-type: application/json
|
|||
x-arango-errors: 2
|
||||
x-content-type-options: nosniff
|
||||
|
||||
"--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 87\r\n\r\n{\"code\":404,\"error\":true,\"errorMessage\":\"collection or view not found\",\"errorNum\":1203}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 101\r\n\r\n{\"error\":true,\"code\":404,\"errorNum\":404,\"errorMessage\":\"unknown path '_api/collection/notexisting2'\"}\r\n--SomeBoundaryValue--"
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 87
|
||||
|
||||
{
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">404</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"errorMessage"</span> : <span class="hljs-string">"collection or view not found"</span>,
|
||||
<span class="hljs-string">"errorNum"</span> : <span class="hljs-number">1203</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 101
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">404</span>,
|
||||
<span class="hljs-string">"errorNum"</span> : <span class="hljs-number">404</span>,
|
||||
<span class="hljs-string">"errorMessage"</span> : <span class="hljs-string">"unknown path '_api/collection/notexisting2'"</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue--
|
||||
|
|
|
@ -39,4 +39,162 @@ content-type: application/json
|
|||
x-arango-errors: 1
|
||||
x-content-type-options: nosniff
|
||||
|
||||
"--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId1\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 64\r\n\r\n{\"server\":\"arango\",\"license\":\"community\",\"version\":\"3.5.0-rc.4\"}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: myId2\r\n\r\nHTTP/1.1 404 Not Found\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 87\r\n\r\n{\"code\":404,\"error\":true,\"errorMessage\":\"collection or view not found\",\"errorNum\":1203}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: someId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 328\r\n\r\n{\"error\":false,\"code\":200,\"waitForSync\":false,\"type\":2,\"status\":3,\"journalSize\":33554432,\"keyOptions\":{\"allowUserKeys\":true,\"type\":\"traditional\",\"lastValue\":0},\"globallyUniqueId\":\"h82E80CF18F1A/103173\",\"statusString\":\"loaded\",\"id\":\"103173\",\"name\":\"products\",\"doCompact\":true,\"isSystem\":false,\"indexBuckets\":8,\"isVolatile\":false}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: nextId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nLocation: /_api/collection/products/figures\r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 835\r\n\r\n{\"error\":false,\"code\":200,\"type\":2,\"status\":3,\"journalSize\":33554432,\"isVolatile\":false,\"name\":\"products\",\"doCompact\":true,\"isSystem\":false,\"count\":0,\"waitForSync\":false,\"figures\":{\"indexes\":{\"count\":1,\"size\":32128},\"documentReferences\":0,\"waitingFor\":\"-\",\"alive\":{\"count\":0,\"size\":0},\"dead\":{\"count\":0,\"size\":0,\"deletion\":0},\"compactionStatus\":{\"message\":\"compaction not yet started\",\"time\":\"2019-06-13T19:54:57Z\",\"count\":0,\"filesCombined\":0,\"bytesRead\":0,\"bytesWritten\":0},\"datafiles\":{\"count\":0,\"fileSize\":0},\"journals\":{\"count\":0,\"fileSize\":0},\"compactors\":{\"count\":0,\"fileSize\":0},\"revisions\":{\"count\":0,\"size\":48192},\"lastTick\":0,\"uncollectedLogfileEntries\":0},\"keyOptions\":{\"allowUserKeys\":true,\"type\":\"traditional\",\"lastValue\":0},\"globallyUniqueId\":\"h82E80CF18F1A/103173\",\"statusString\":\"loaded\",\"id\":\"103173\",\"indexBuckets\":8}\r\n--SomeBoundaryValue\r\nContent-Type: application/x-arango-batchpart\r\nContent-Id: otherId\r\n\r\nHTTP/1.1 200 OK\r\nServer: \r\nConnection: \r\nContent-Type: application/json; charset=utf-8\r\nContent-Length: 40\r\n\r\n{\"error\":false,\"code\":200,\"id\":\"103173\"}\r\n--SomeBoundaryValue--"
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
Content-Id: myId1
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 65
|
||||
|
||||
{
|
||||
<span class="hljs-string">"server"</span> : <span class="hljs-string">"arango"</span>,
|
||||
<span class="hljs-string">"license"</span> : <span class="hljs-string">"community"</span>,
|
||||
<span class="hljs-string">"version"</span> : <span class="hljs-string">"3.6.0-devel"</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
Content-Id: myId2
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 87
|
||||
|
||||
{
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">404</span>,
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"errorMessage"</span> : <span class="hljs-string">"collection or view not found"</span>,
|
||||
<span class="hljs-string">"errorNum"</span> : <span class="hljs-number">1203</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
Content-Id: someId
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 322
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>,
|
||||
<span class="hljs-string">"waitForSync"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-number">2</span>,
|
||||
<span class="hljs-string">"status"</span> : <span class="hljs-number">3</span>,
|
||||
<span class="hljs-string">"journalSize"</span> : <span class="hljs-number">33554432</span>,
|
||||
<span class="hljs-string">"keyOptions"</span> : {
|
||||
<span class="hljs-string">"allowUserKeys"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"traditional"</span>,
|
||||
<span class="hljs-string">"lastValue"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"globallyUniqueId"</span> : <span class="hljs-string">"h5E72DF2A848A/111"</span>,
|
||||
<span class="hljs-string">"statusString"</span> : <span class="hljs-string">"loaded"</span>,
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"111"</span>,
|
||||
<span class="hljs-string">"name"</span> : <span class="hljs-string">"products"</span>,
|
||||
<span class="hljs-string">"doCompact"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"isSystem"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"indexBuckets"</span> : <span class="hljs-number">8</span>,
|
||||
<span class="hljs-string">"isVolatile"</span> : <span class="hljs-literal">false</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
Content-Id: nextId
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Server:
|
||||
Location: /_api/collection/products/figures
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 829
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-number">2</span>,
|
||||
<span class="hljs-string">"status"</span> : <span class="hljs-number">3</span>,
|
||||
<span class="hljs-string">"journalSize"</span> : <span class="hljs-number">33554432</span>,
|
||||
<span class="hljs-string">"isVolatile"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"name"</span> : <span class="hljs-string">"products"</span>,
|
||||
<span class="hljs-string">"doCompact"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"isSystem"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"waitForSync"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"figures"</span> : {
|
||||
<span class="hljs-string">"indexes"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">1</span>,
|
||||
<span class="hljs-string">"size"</span> : <span class="hljs-number">32128</span>
|
||||
},
|
||||
<span class="hljs-string">"documentReferences"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"waitingFor"</span> : <span class="hljs-string">"-"</span>,
|
||||
<span class="hljs-string">"alive"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"size"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"dead"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"size"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"deletion"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"compactionStatus"</span> : {
|
||||
<span class="hljs-string">"message"</span> : <span class="hljs-string">"compaction not yet started"</span>,
|
||||
<span class="hljs-string">"time"</span> : <span class="hljs-string">"2019-07-17T12:01:44Z"</span>,
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"filesCombined"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"bytesRead"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"bytesWritten"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"datafiles"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"fileSize"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"journals"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"fileSize"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"compactors"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"fileSize"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"revisions"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"size"</span> : <span class="hljs-number">48192</span>
|
||||
},
|
||||
<span class="hljs-string">"lastTick"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"uncollectedLogfileEntries"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"keyOptions"</span> : {
|
||||
<span class="hljs-string">"allowUserKeys"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"traditional"</span>,
|
||||
<span class="hljs-string">"lastValue"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"globallyUniqueId"</span> : <span class="hljs-string">"h5E72DF2A848A/111"</span>,
|
||||
<span class="hljs-string">"statusString"</span> : <span class="hljs-string">"loaded"</span>,
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"111"</span>,
|
||||
<span class="hljs-string">"indexBuckets"</span> : <span class="hljs-number">8</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue
|
||||
Content-Type: application/x-arango-batchpart
|
||||
Content-Id: otherId
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Server:
|
||||
Connection:
|
||||
Content-Type: application/json; charset=utf-8
|
||||
Content-Length: 37
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>,
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"111"</span>
|
||||
}↩
|
||||
|
||||
--SomeBoundaryValue--
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
<span class="hljs-meta">shell></span><span class="bash"> curl --header <span class="hljs-string">'accept: application/json'</span> --dump - http://localhost:8529/_api/collection/products/figures</span>
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
location: <span class="hljs-regexp">/_api/</span>collection/products/figures
|
||||
x-content-type-options: nosniff
|
||||
|
||||
{
|
||||
<span class="hljs-string">"error"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"code"</span> : <span class="hljs-number">200</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-number">2</span>,
|
||||
<span class="hljs-string">"status"</span> : <span class="hljs-number">3</span>,
|
||||
<span class="hljs-string">"name"</span> : <span class="hljs-string">"products"</span>,
|
||||
<span class="hljs-string">"waitForSync"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"objectId"</span> : <span class="hljs-string">"101"</span>,
|
||||
<span class="hljs-string">"cacheEnabled"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"figures"</span> : {
|
||||
<span class="hljs-string">"indexes"</span> : {
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">1</span>,
|
||||
<span class="hljs-string">"size"</span> : <span class="hljs-number">37</span>
|
||||
},
|
||||
<span class="hljs-string">"documentsSize"</span> : <span class="hljs-number">846</span>,
|
||||
<span class="hljs-string">"cacheInUse"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"cacheSize"</span> : <span class="hljs-number">0</span>,
|
||||
<span class="hljs-string">"cacheUsage"</span> : <span class="hljs-number">0</span>
|
||||
},
|
||||
<span class="hljs-string">"keyOptions"</span> : {
|
||||
<span class="hljs-string">"allowUserKeys"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"type"</span> : <span class="hljs-string">"traditional"</span>,
|
||||
<span class="hljs-string">"lastValue"</span> : <span class="hljs-number">107</span>
|
||||
},
|
||||
<span class="hljs-string">"globallyUniqueId"</span> : <span class="hljs-string">"h5FFC6746B13/102"</span>,
|
||||
<span class="hljs-string">"statusString"</span> : <span class="hljs-string">"loaded"</span>,
|
||||
<span class="hljs-string">"id"</span> : <span class="hljs-string">"102"</span>,
|
||||
<span class="hljs-string">"count"</span> : <span class="hljs-number">1</span>,
|
||||
<span class="hljs-string">"isSystem"</span> : <span class="hljs-literal">false</span>
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
<span class="hljs-meta">shell></span><span class="bash"> curl --header <span class="hljs-string">'accept: application/json'</span> --dump - http://localhost:8529/_api/engine</span>
|
||||
|
||||
HTTP/<span class="hljs-number">1.1</span> OK
|
||||
content-type: application/json; charset=utf<span class="hljs-number">-8</span>
|
||||
x-content-type-options: nosniff
|
||||
|
||||
{
|
||||
<span class="hljs-string">"name"</span> : <span class="hljs-string">"rocksdb"</span>,
|
||||
<span class="hljs-string">"supports"</span> : {
|
||||
<span class="hljs-string">"dfdb"</span> : <span class="hljs-literal">false</span>,
|
||||
<span class="hljs-string">"indexes"</span> : [
|
||||
<span class="hljs-string">"primary"</span>,
|
||||
<span class="hljs-string">"edge"</span>,
|
||||
<span class="hljs-string">"hash"</span>,
|
||||
<span class="hljs-string">"skiplist"</span>,
|
||||
<span class="hljs-string">"ttl"</span>,
|
||||
<span class="hljs-string">"persistent"</span>,
|
||||
<span class="hljs-string">"geo"</span>,
|
||||
<span class="hljs-string">"fulltext"</span>
|
||||
],
|
||||
<span class="hljs-string">"aliases"</span> : {
|
||||
<span class="hljs-string">"indexes"</span> : {
|
||||
<span class="hljs-string">"skiplist"</span> : <span class="hljs-string">"persistent"</span>,
|
||||
<span class="hljs-string">"hash"</span> : <span class="hljs-string">"persistent"</span>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
arangosh> <span class="hljs-built_in">require</span>(<span class="hljs-string">"internal"</span>).wal.properties();
|
||||
{
|
||||
<span class="hljs-string">"allowOversizeEntries"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"logfileSize"</span> : <span class="hljs-number">33554432</span>,
|
||||
<span class="hljs-string">"historicLogfiles"</span> : <span class="hljs-number">10</span>,
|
||||
<span class="hljs-string">"reserveLogfiles"</span> : <span class="hljs-number">3</span>,
|
||||
<span class="hljs-string">"syncInterval"</span> : <span class="hljs-number">100</span>,
|
||||
<span class="hljs-string">"throttleWait"</span> : <span class="hljs-number">15000</span>,
|
||||
<span class="hljs-string">"throttleWhenPending"</span> : <span class="hljs-number">0</span>
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
arangosh> <span class="hljs-built_in">require</span>(<span class="hljs-string">"internal"</span>).wal.properties();
|
||||
{
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
arangosh> <span class="hljs-built_in">require</span>(<span class="hljs-string">"internal"</span>).wal.properties({
|
||||
........> allowOverSizeEntries: <span class="hljs-literal">true</span>,
|
||||
........> logfileSize: <span class="hljs-number">32</span> * <span class="hljs-number">1024</span> * <span class="hljs-number">1024</span> });
|
||||
{
|
||||
<span class="hljs-string">"allowOversizeEntries"</span> : <span class="hljs-literal">true</span>,
|
||||
<span class="hljs-string">"logfileSize"</span> : <span class="hljs-number">33554432</span>,
|
||||
<span class="hljs-string">"historicLogfiles"</span> : <span class="hljs-number">10</span>,
|
||||
<span class="hljs-string">"reserveLogfiles"</span> : <span class="hljs-number">3</span>,
|
||||
<span class="hljs-string">"syncInterval"</span> : <span class="hljs-number">100</span>,
|
||||
<span class="hljs-string">"throttleWait"</span> : <span class="hljs-number">15000</span>,
|
||||
<span class="hljs-string">"throttleWhenPending"</span> : <span class="hljs-number">0</span>
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
arangosh> <span class="hljs-built_in">require</span>(<span class="hljs-string">"internal"</span>).wal.properties({
|
||||
........> allowOverSizeEntries: <span class="hljs-literal">true</span>,
|
||||
........> logfileSize: <span class="hljs-number">32</span> * <span class="hljs-number">1024</span> * <span class="hljs-number">1024</span> });
|
||||
{
|
||||
}
|
|
@ -237,8 +237,10 @@ Here is how its details work:
|
|||
- an example start is marked with *@EXAMPLE_ARANGOSH_OUTPUT* or *@EXAMPLE_ARANGOSH_RUN*
|
||||
- the example is named by the string provided in brackets after the above key
|
||||
- the output is written to `Documentation/Examples/<name>.generated`
|
||||
- if your example depends on a storage engine, prepend `<name>` with `_rocksdb` or `_mmfiles` to run it against such a server
|
||||
- examples end with *@END_EXAMPLE_[OUTPUT|RUN|AQL]*
|
||||
- all code in between is executed as javascript in the **arangosh** while talking to a valid **arangod**.
|
||||
- you should strive to group your examples by naming them with a common prefix per topic.
|
||||
You may inspect the generated js code in `/tmp/arangosh.examples.js`
|
||||
|
||||
## OUTPUT, RUN and AQL specifics
|
||||
|
|
|
@ -98,6 +98,22 @@ const htmlAppender = function(text) {
|
|||
const rawAppender = function(text) {
|
||||
output += text;
|
||||
};
|
||||
|
||||
const plainAppender = function(text) {
|
||||
// do we have a line that could be json? try to parse & format it.
|
||||
if (text.match(/^{.*}$/) || text.match(/^[.*]$/)) {
|
||||
try {
|
||||
let parsed = JSON.parse(text);
|
||||
output += highlight("js", internal.inspect(parsed)) + "↩\n" ;
|
||||
} catch (x) {
|
||||
// fallback to plain text.
|
||||
output += text;
|
||||
}
|
||||
} else {
|
||||
output += text;
|
||||
}
|
||||
};
|
||||
|
||||
const shellAppender = function(text) {
|
||||
output += highlight("shell", text);
|
||||
};
|
||||
|
@ -108,6 +124,7 @@ const log = function (a) {
|
|||
};
|
||||
|
||||
var logCurlRequestRaw = internal.appendCurlRequest(shellAppender, jsonAppender, rawAppender);
|
||||
var logCurlRequestPlain = internal.appendCurlRequest(shellAppender, jsonAppender, plainAppender);
|
||||
var logCurlRequest = function () {
|
||||
if ((arguments.length > 1) &&
|
||||
(arguments[1] !== undefined) &&
|
||||
|
@ -139,6 +156,7 @@ var logJsonResponse = internal.appendJsonResponse(rawAppender, jsonAppender);
|
|||
var logJsonLResponse = internal.appendJsonLResponse(rawAppender, jsonLAppender);
|
||||
var logHtmlResponse = internal.appendRawResponse(rawAppender, htmlAppender);
|
||||
var logRawResponse = internal.appendRawResponse(rawAppender, rawAppender);
|
||||
var logPlainResponse = internal.appendPlainResponse(plainAppender, plainAppender);
|
||||
var logErrorResponse = function (response) {
|
||||
allErrors += "Server reply was: " + JSON.stringify(response) + "\n";
|
||||
};
|
||||
|
|
|
@ -1,20 +1,5 @@
|
|||
/* jshint -W051:true */
|
||||
/* eslint-disable */
|
||||
|
||||
let appendHeaders = function(appender, headers) {
|
||||
var key;
|
||||
// generate header
|
||||
appender('HTTP/1.1 ' + headers['http/1.1'] + '\n');
|
||||
|
||||
for (key in headers) {
|
||||
if (headers.hasOwnProperty(key)) {
|
||||
if (key !== 'http/1.1' && key !== 'server' && key !== 'connection'
|
||||
&& key !== 'content-length') {
|
||||
appender(key + ': ' + headers[key] + '\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
;(function () {
|
||||
'use strict'
|
||||
|
@ -226,6 +211,21 @@ let appendHeaders = function(appender, headers) {
|
|||
};
|
||||
};
|
||||
|
||||
let appendHeaders = function(appender, headers) {
|
||||
var key;
|
||||
// generate header
|
||||
appender('HTTP/1.1 ' + headers['http/1.1'] + '\n');
|
||||
|
||||
for (key in headers) {
|
||||
if (headers.hasOwnProperty(key)) {
|
||||
if (key !== 'http/1.1' && key !== 'server' && key !== 'connection'
|
||||
&& key !== 'content-length') {
|
||||
appender(key + ': ' + headers[key] + '\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
// / @brief logs a raw response
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -237,12 +237,37 @@ let appendHeaders = function(appender, headers) {
|
|||
|
||||
// append body
|
||||
if (response.body !== undefined) {
|
||||
syntaxAppender(exports.inspect(response.body));
|
||||
syntaxAppender(internal.inspect(response.body));
|
||||
appender('\n');
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
// / @brief logs a raw response - don't string escape etc.
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
exports.appendPlainResponse = function (appender, syntaxAppender) {
|
||||
return function (response) {
|
||||
appendHeaders(appender, response.headers);
|
||||
appender('\n');
|
||||
|
||||
// append body
|
||||
if (response.body !== undefined) {
|
||||
let splitted = response.body.split(/\r\n|\r|\n/);
|
||||
if (splitted.length > 0) {
|
||||
splitted.forEach(function (line) {
|
||||
syntaxAppender(line);
|
||||
appender('\n');
|
||||
});
|
||||
} else {
|
||||
syntaxAppender(response.body);
|
||||
appender('\n');
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
// / @brief logs a response in JSON
|
||||
// //////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
/*global start_pretty_print */
|
||||
'use strict';
|
||||
|
||||
const _ = require("lodash");
|
||||
const fs = require("fs");
|
||||
const internal = require("internal");
|
||||
const executeExternal = internal.executeExternal;
|
||||
|
@ -119,134 +120,144 @@ function main(argv) {
|
|||
args = args.concat(['--arangoshSetup']);
|
||||
args = args.concat(documentationSourceDirs);
|
||||
|
||||
let res = executeExternalAndWait(thePython, args);
|
||||
let storageEngines = [['mmfiles', false], ['rocksdb', true]];
|
||||
let res;
|
||||
|
||||
if (res.exit !== 0) {
|
||||
print("parsing the examples failed - aborting!");
|
||||
print(res);
|
||||
return -1;
|
||||
}
|
||||
storageEngines.forEach(function (engine) {
|
||||
let pyArgs = _.clone(args);
|
||||
pyArgs.push('--storageEngine');
|
||||
pyArgs.push(engine[0]);
|
||||
pyArgs.push('--storageEngineAgnostic');
|
||||
pyArgs.push(engine[1]);
|
||||
print(pyArgs)
|
||||
res = executeExternalAndWait(thePython, pyArgs);
|
||||
|
||||
if (startServer) {
|
||||
let port = findFreePort();
|
||||
instanceInfo.port = port;
|
||||
serverEndpoint = protocol + "://127.0.0.1:" + port;
|
||||
|
||||
instanceInfo.url = endpointToURL(serverEndpoint);
|
||||
|
||||
fs.makeDirectoryRecursive(fs.join(tmpDataDir, "data"));
|
||||
|
||||
let serverArgs = {};
|
||||
fs.makeDirectoryRecursive(fs.join(tmpDataDir, "apps"));
|
||||
|
||||
serverArgs["configuration"] = "none";
|
||||
serverArgs["database.directory"] = fs.join(tmpDataDir, "data");
|
||||
serverArgs["javascript.app-path"] = fs.join(tmpDataDir, "apps");
|
||||
serverArgs["javascript.startup-directory"] = "js";
|
||||
serverArgs["javascript.module-directory"] = "enterprise/js";
|
||||
serverArgs["log.file"] = fs.join(tmpDataDir, "log");
|
||||
serverArgs["server.authentication"] = "false";
|
||||
serverArgs["server.endpoint"] = serverEndpoint;
|
||||
serverArgs["server.storage-engine"] = "mmfiles"; // examples depend on it
|
||||
|
||||
print("================================================================================");
|
||||
ARANGOD = locateProgram("arangod", "Cannot find arangod to execute tests against");
|
||||
print(ARANGOD);
|
||||
print(toArgv(serverArgs));
|
||||
instanceInfo.pid = executeExternal(ARANGOD, toArgv(serverArgs)).pid;
|
||||
|
||||
// Wait until the server is up:
|
||||
count = 0;
|
||||
instanceInfo.endpoint = serverEndpoint;
|
||||
|
||||
while (true) {
|
||||
wait(0.5, false);
|
||||
let r = download(instanceInfo.url + "/_api/version", "");
|
||||
|
||||
if (!r.error && r.code === 200) {
|
||||
break;
|
||||
}
|
||||
|
||||
count++;
|
||||
|
||||
if (count % 60 === 0) {
|
||||
res = statusExternal(instanceInfo.pid, false);
|
||||
|
||||
if (res.status !== "RUNNING") {
|
||||
print("start failed - process is gone: " + yaml.safeDump(res));
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
if (res.exit !== 0) {
|
||||
print("parsing the examples failed - aborting!");
|
||||
print(res);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
let arangoshArgs = {
|
||||
'configuration': fs.join(fs.makeAbsolute(''), 'etc', 'relative', 'arangosh.conf'),
|
||||
'server.password': "",
|
||||
'server.endpoint': serverEndpoint,
|
||||
'javascript.execute': scriptArguments.outputFile
|
||||
};
|
||||
if (startServer) {
|
||||
let port = findFreePort();
|
||||
instanceInfo.port = port;
|
||||
serverEndpoint = protocol + "://127.0.0.1:" + port;
|
||||
|
||||
print("--------------------------------------------------------------------------------");
|
||||
ARANGOSH = locateProgram("arangosh", "Cannot find arangosh to run tests with");
|
||||
print(ARANGOSH);
|
||||
print(internal.toArgv(arangoshArgs));
|
||||
res = executeExternalAndWait(ARANGOSH, internal.toArgv(arangoshArgs));
|
||||
instanceInfo.url = endpointToURL(serverEndpoint);
|
||||
|
||||
if (startServer) {
|
||||
if (typeof(instanceInfo.exitStatus) === 'undefined') {
|
||||
download(instanceInfo.url + "/_admin/shutdown", "", {method: "DELETE"});
|
||||
fs.makeDirectoryRecursive(fs.join(tmpDataDir, engine[0], "data"));
|
||||
|
||||
print("Waiting for server shut down");
|
||||
let serverArgs = {};
|
||||
fs.makeDirectoryRecursive(fs.join(tmpDataDir, engine[0], "apps"));
|
||||
|
||||
serverArgs["configuration"] = "none";
|
||||
serverArgs["database.directory"] = fs.join(tmpDataDir, engine[0], "data");
|
||||
serverArgs["javascript.app-path"] = fs.join(tmpDataDir, engine[0], "apps");
|
||||
serverArgs["javascript.startup-directory"] = "js";
|
||||
serverArgs["javascript.module-directory"] = "enterprise/js";
|
||||
serverArgs["log.file"] = fs.join(tmpDataDir, engine[0], "log");
|
||||
serverArgs["server.authentication"] = "false";
|
||||
serverArgs["server.endpoint"] = serverEndpoint;
|
||||
serverArgs["server.storage-engine"] = engine[0]
|
||||
|
||||
print("================================================================================");
|
||||
ARANGOD = locateProgram("arangod", "Cannot find arangod to execute tests against");
|
||||
print(ARANGOD);
|
||||
print(toArgv(serverArgs));
|
||||
instanceInfo.pid = executeExternal(ARANGOD, toArgv(serverArgs)).pid;
|
||||
|
||||
// Wait until the server is up:
|
||||
count = 0;
|
||||
let bar = "[";
|
||||
instanceInfo.endpoint = serverEndpoint;
|
||||
|
||||
while (1) {
|
||||
instanceInfo.exitStatus = statusExternal(instanceInfo.pid, false);
|
||||
while (true) {
|
||||
wait(0.5, false);
|
||||
let r = download(instanceInfo.url + "/_api/version", "");
|
||||
|
||||
if (instanceInfo.exitStatus.status === "RUNNING") {
|
||||
count++;
|
||||
if (typeof(options.valgrind) === 'string') {
|
||||
wait(1);
|
||||
continue;
|
||||
if (!r.error && r.code === 200) {
|
||||
break;
|
||||
}
|
||||
|
||||
count++;
|
||||
|
||||
if (count % 60 === 0) {
|
||||
res = statusExternal(instanceInfo.pid, false);
|
||||
|
||||
if (res.status !== "RUNNING") {
|
||||
print("start failed - process is gone: " + yaml.safeDump(res));
|
||||
return 1;
|
||||
}
|
||||
if (count % 10 === 0) {
|
||||
bar = bar + "#";
|
||||
}
|
||||
if (count > 600) {
|
||||
print("forcefully terminating " + yaml.safeDump(instanceInfo.pid) +
|
||||
" after 600 s grace period; marking crashy.");
|
||||
serverCrashed = true;
|
||||
killExternal(instanceInfo.pid);
|
||||
break;
|
||||
} else {
|
||||
wait(1);
|
||||
}
|
||||
} else if (instanceInfo.exitStatus.status !== "TERMINATED") {
|
||||
if (instanceInfo.exitStatus.hasOwnProperty('signal')) {
|
||||
print("Server shut down with : " +
|
||||
yaml.safeDump(instanceInfo.exitStatus) +
|
||||
" marking build as crashy.");
|
||||
serverCrashed = true;
|
||||
break;
|
||||
}
|
||||
if (internal.platform.substr(0, 3) === 'win') {
|
||||
// Windows: wait for procdump to do its job...
|
||||
statusExternal(instanceInfo.monitor, true);
|
||||
}
|
||||
} else {
|
||||
print("Server shutdown: Success.");
|
||||
break; // Success.
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 10) {
|
||||
print("long Server shutdown: " + bar + ']');
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
let arangoshArgs = {
|
||||
'configuration': fs.join(fs.makeAbsolute(''), 'etc', 'relative', 'arangosh.conf'),
|
||||
'server.password': "",
|
||||
'server.endpoint': serverEndpoint,
|
||||
'javascript.execute': scriptArguments.outputFile
|
||||
};
|
||||
|
||||
print("--------------------------------------------------------------------------------");
|
||||
ARANGOSH = locateProgram("arangosh", "Cannot find arangosh to run tests with");
|
||||
print(ARANGOSH);
|
||||
print(internal.toArgv(arangoshArgs));
|
||||
res = executeExternalAndWait(ARANGOSH, internal.toArgv(arangoshArgs));
|
||||
|
||||
if (startServer) {
|
||||
if (typeof(instanceInfo.exitStatus) === 'undefined') {
|
||||
download(instanceInfo.url + "/_admin/shutdown", "", {method: "DELETE"});
|
||||
|
||||
print("Waiting for server shut down");
|
||||
count = 0;
|
||||
let bar = "[";
|
||||
|
||||
while (1) {
|
||||
instanceInfo.exitStatus = statusExternal(instanceInfo.pid, false);
|
||||
|
||||
if (instanceInfo.exitStatus.status === "RUNNING") {
|
||||
count++;
|
||||
if (typeof(options.valgrind) === 'string') {
|
||||
wait(1);
|
||||
continue;
|
||||
}
|
||||
if (count % 10 === 0) {
|
||||
bar = bar + "#";
|
||||
}
|
||||
if (count > 600) {
|
||||
print("forcefully terminating " + yaml.safeDump(instanceInfo.pid) +
|
||||
" after 600 s grace period; marking crashy.");
|
||||
serverCrashed = true;
|
||||
killExternal(instanceInfo.pid);
|
||||
break;
|
||||
} else {
|
||||
wait(1);
|
||||
}
|
||||
} else if (instanceInfo.exitStatus.status !== "TERMINATED") {
|
||||
if (instanceInfo.exitStatus.hasOwnProperty('signal')) {
|
||||
print("Server shut down with : " +
|
||||
yaml.safeDump(instanceInfo.exitStatus) +
|
||||
" marking build as crashy.");
|
||||
serverCrashed = true;
|
||||
break;
|
||||
}
|
||||
if (internal.platform.substr(0, 3) === 'win') {
|
||||
// Windows: wait for procdump to do its job...
|
||||
statusExternal(instanceInfo.monitor, true);
|
||||
}
|
||||
} else {
|
||||
print("Server shutdown: Success.");
|
||||
break; // Success.
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 10) {
|
||||
print("long Server shutdown: " + bar + ']');
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
});
|
||||
if (res.exit != 0) {
|
||||
throw("generating examples failed!");
|
||||
}
|
||||
|
|
|
@ -128,7 +128,13 @@ OPTION_ARANGOSH_SETUP = 1
|
|||
OPTION_OUTPUT_DIR = 2
|
||||
OPTION_FILTER = 3
|
||||
OPTION_OUTPUT_FILE = 4
|
||||
OPTION_OUTPUT_ENGINE = 5
|
||||
OPTION_OUTPUT_FILTER_NONMATCHING = 6
|
||||
|
||||
engines = ["mmfiles", "rocksdb"]
|
||||
engine = "mmfiles"
|
||||
otherEngine = "mmfiles"
|
||||
storageEngineAgnostic = True
|
||||
escapeBS = re.compile("\\\\")
|
||||
doubleBS = "\\\\\\\\"
|
||||
|
||||
|
@ -198,17 +204,17 @@ def matchStartLine(line, filename):
|
|||
if m:
|
||||
strip = m.group(1)
|
||||
name = m.group(2)
|
||||
|
||||
|
||||
if name in AQLFiles:
|
||||
print >> sys.stderr, "%s\nduplicate test name '%s' in file %s!\n%s\n" % ('#' * 80, name, filename, '#' * 80)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# if we match for filters, only output these!
|
||||
if ((FilterForTestcase != None) and not FilterForTestcase.match(name)):
|
||||
print >> sys.stderr, "AQL: filtering out testcase '%s'" %name
|
||||
filterTestList.append(name)
|
||||
return("", STATE_BEGIN);
|
||||
|
||||
|
||||
AQLFiles[name] = True
|
||||
return (name, STATE_AQL)
|
||||
|
||||
|
@ -257,10 +263,10 @@ AQLEXPLAIN="aql_explain"
|
|||
### @brief loop over the lines of one input file
|
||||
################################################################################
|
||||
|
||||
def analyzeFile(f, filename):
|
||||
def analyzeFile(f, filename):
|
||||
global RunTests, TESTLINES, TYPE, LINE_NO, STRING
|
||||
strip = None
|
||||
|
||||
|
||||
name = ""
|
||||
partialCmd = ""
|
||||
partialLine = ""
|
||||
|
@ -280,7 +286,7 @@ def analyzeFile(f, filename):
|
|||
|
||||
if state == STATE_BEGIN:
|
||||
(name, state) = matchStartLine(line, filename)
|
||||
if state != STATE_BEGIN:
|
||||
if state != STATE_BEGIN:
|
||||
MapSourceFiles[name] = filename
|
||||
RunTests[name] = {}
|
||||
RunTests[name][TYPE] = state
|
||||
|
@ -315,7 +321,7 @@ def analyzeFile(f, filename):
|
|||
# we are within a example
|
||||
line = line[len(strip):]
|
||||
showCmd = True
|
||||
|
||||
|
||||
# end-example test
|
||||
m = endExample.match(line)
|
||||
|
||||
|
@ -601,18 +607,18 @@ if (allErrors.length > 0) {
|
|||
################################################################################
|
||||
|
||||
def loopDirectories():
|
||||
global ArangoshSetup, OutputDir, FilterForTestcase
|
||||
global ArangoshSetup, OutputDir, FilterForTestcase, storageEngineAgnostic, engine, otherEngine
|
||||
argv = sys.argv
|
||||
argv.pop(0)
|
||||
filenames = []
|
||||
fstate = OPTION_NORMAL
|
||||
|
||||
|
||||
for filename in argv:
|
||||
if filename == "--arangoshSetup":
|
||||
fstate = OPTION_ARANGOSH_SETUP
|
||||
continue
|
||||
|
||||
if filename == "--onlyThisOne":
|
||||
if filename == "--onlyThisOne":
|
||||
fstate = OPTION_FILTER
|
||||
continue
|
||||
|
||||
|
@ -624,6 +630,14 @@ def loopDirectories():
|
|||
fstate = OPTION_OUTPUT_FILE
|
||||
continue
|
||||
|
||||
if filename == "--storageEngine":
|
||||
fstate = OPTION_OUTPUT_ENGINE
|
||||
continue
|
||||
|
||||
if filename == "--storageEngineAgnostic":
|
||||
fstate = OPTION_OUTPUT_FILTER_NONMATCHING
|
||||
continue
|
||||
|
||||
if fstate == OPTION_NORMAL:
|
||||
if os.path.isdir(filename):
|
||||
for root, dirs, files in os.walk(filename):
|
||||
|
@ -635,17 +649,17 @@ def loopDirectories():
|
|||
|
||||
elif fstate == OPTION_FILTER:
|
||||
fstate = OPTION_NORMAL
|
||||
if (len(filename) > 0):
|
||||
if (len(filename) > 0):
|
||||
FilterForTestcase = re.compile(filename);
|
||||
|
||||
elif fstate == OPTION_ARANGOSH_SETUP:
|
||||
fstate = OPTION_NORMAL
|
||||
f = open(filename, "r")
|
||||
|
||||
|
||||
for line in f:
|
||||
line = line.rstrip('\n')
|
||||
ArangoshSetup += line + "\n"
|
||||
|
||||
|
||||
f.close()
|
||||
|
||||
elif fstate == OPTION_OUTPUT_DIR:
|
||||
|
@ -656,21 +670,41 @@ def loopDirectories():
|
|||
fstate = OPTION_NORMAL
|
||||
sys.stdout = open(filename, 'w')
|
||||
|
||||
elif fstate == OPTION_OUTPUT_ENGINE:
|
||||
fstate = OPTION_NORMAL
|
||||
engine = filename
|
||||
if engine == engines[0]:
|
||||
otherEngine = engines[1]
|
||||
else:
|
||||
otherEngine = engines[0]
|
||||
|
||||
elif fstate == OPTION_OUTPUT_FILTER_NONMATCHING:
|
||||
fstate = OPTION_NORMAL
|
||||
storageEngineAgnostic = filename == "true"
|
||||
|
||||
for filename in filenames:
|
||||
if (filename.find("#") < 0):
|
||||
f = open(filename, "r")
|
||||
analyzeFile(f, filename)
|
||||
|
||||
|
||||
f.close()
|
||||
else:
|
||||
print >> sys.stderr, "skipping %s\n" % (filename)
|
||||
|
||||
|
||||
def generateTestCases():
|
||||
global TESTLINES, TYPE, LINE_NO, STRING, RunTests
|
||||
global TESTLINES, TYPE, LINE_NO, STRING, RunTests, storageEngineAgnostic, engine, otherEngine
|
||||
testNames = RunTests.keys()
|
||||
testNames.sort()
|
||||
|
||||
for thisTest in testNames:
|
||||
if thisTest.endswith(otherEngine):
|
||||
print >> sys.stderr, "skipping " + thisTest
|
||||
continue
|
||||
if not storageEngineAgnostic and not thisTest.endswith(engine):
|
||||
print >> sys.stderr, "skipping " + thisTest
|
||||
continue
|
||||
|
||||
if RunTests[thisTest][TYPE] == STATE_ARANGOSH_OUTPUT:
|
||||
generateArangoshOutput(thisTest)
|
||||
elif RunTests[thisTest][TYPE] == STATE_ARANGOSH_RUN:
|
||||
|
|
Loading…
Reference in New Issue